1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* Non-zero if we are folding constants inside an initializer; zero
64 int folding_initializer = 0;
66 /* The following constants represent a bit based encoding of GCC's
67 comparison operators. This encoding simplifies transformations
68 on relational comparison operators, such as AND and OR. */
69 enum comparison_code {
88 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
89 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree, int);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static tree combine_comparisons (enum tree_code, enum tree_code,
99 enum tree_code, tree, tree, tree);
100 static int truth_value_p (enum tree_code);
101 static int operand_equal_for_comparison_p (tree, tree, tree);
102 static int twoval_comparison_p (tree, tree *, tree *, int *);
103 static tree eval_subst (tree, tree, tree, tree, tree);
104 static tree pedantic_omit_one_operand (tree, tree, tree);
105 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
106 static tree make_bit_field_ref (tree, tree, int, int, int);
107 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
108 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
111 static int all_ones_mask_p (tree, int);
112 static tree sign_bit_p (tree, tree);
113 static int simple_operand_p (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree make_range (tree, int *, tree *, tree *);
118 static tree build_range_check (tree, tree, int, tree, tree);
119 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
121 static tree fold_range_test (enum tree_code, tree, tree, tree);
122 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
123 static tree unextend (tree, int, int, tree);
124 static tree fold_truthop (enum tree_code, tree, tree, tree);
125 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
126 static tree extract_muldiv (tree, tree, enum tree_code, tree);
127 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
128 static int multiple_of_p (tree, tree, tree);
129 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
132 static bool fold_real_zero_addition_p (tree, tree, int);
133 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
135 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
136 static tree fold_div_compare (enum tree_code, tree, tree, tree);
137 static bool reorder_operands_p (tree, tree);
138 static tree fold_negate_const (tree, tree);
139 static tree fold_not_const (tree, tree);
140 static tree fold_relational_const (enum tree_code, tree, tree, tree);
141 static int native_encode_expr (tree, unsigned char *, int);
142 static tree native_interpret_expr (tree, unsigned char *, int);
145 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
146 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
147 and SUM1. Then this yields nonzero if overflow occurred during the
150 Overflow occurs if A and B have the same sign, but A and SUM differ in
151 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
153 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
155 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
156 We do that by representing the two-word integer in 4 words, with only
157 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
158 number. The value of the word is LOWPART + HIGHPART * BASE. */
161 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
162 #define HIGHPART(x) \
163 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
164 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
166 /* Unpack a two-word integer into 4 words.
167 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
168 WORDS points to the array of HOST_WIDE_INTs. */
171 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
173 words[0] = LOWPART (low);
174 words[1] = HIGHPART (low);
175 words[2] = LOWPART (hi);
176 words[3] = HIGHPART (hi);
179 /* Pack an array of 4 words into a two-word integer.
180 WORDS points to the array of words.
181 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
184 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
187 *low = words[0] + words[1] * BASE;
188 *hi = words[2] + words[3] * BASE;
191 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
192 in overflow of the value, when >0 we are only interested in signed
193 overflow, for <0 we are interested in any overflow. OVERFLOWED
194 indicates whether overflow has already occurred. CONST_OVERFLOWED
195 indicates whether constant overflow has already occurred. We force
196 T's value to be within range of T's type (by setting to 0 or 1 all
197 the bits outside the type's range). We set TREE_OVERFLOWED if,
198 OVERFLOWED is nonzero,
199 or OVERFLOWABLE is >0 and signed overflow occurs
200 or OVERFLOWABLE is <0 and any overflow occurs
201 We set TREE_CONSTANT_OVERFLOWED if,
202 CONST_OVERFLOWED is nonzero
203 or we set TREE_OVERFLOWED.
204 We return either the original T, or a copy. */
207 force_fit_type (tree t, int overflowable,
208 bool overflowed, bool overflowed_const)
210 unsigned HOST_WIDE_INT low;
213 int sign_extended_type;
215 gcc_assert (TREE_CODE (t) == INTEGER_CST);
217 low = TREE_INT_CST_LOW (t);
218 high = TREE_INT_CST_HIGH (t);
220 if (POINTER_TYPE_P (TREE_TYPE (t))
221 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
224 prec = TYPE_PRECISION (TREE_TYPE (t));
225 /* Size types *are* sign extended. */
226 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
227 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
230 /* First clear all bits that are beyond the type's precision. */
232 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 else if (prec > HOST_BITS_PER_WIDE_INT)
235 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
239 if (prec < HOST_BITS_PER_WIDE_INT)
240 low &= ~((HOST_WIDE_INT) (-1) << prec);
243 if (!sign_extended_type)
244 /* No sign extension */;
245 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
246 /* Correct width already. */;
247 else if (prec > HOST_BITS_PER_WIDE_INT)
249 /* Sign extend top half? */
250 if (high & ((unsigned HOST_WIDE_INT)1
251 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
252 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
254 else if (prec == HOST_BITS_PER_WIDE_INT)
256 if ((HOST_WIDE_INT)low < 0)
261 /* Sign extend bottom half? */
262 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
265 low |= (HOST_WIDE_INT)(-1) << prec;
269 /* If the value changed, return a new node. */
270 if (overflowed || overflowed_const
271 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
273 t = build_int_cst_wide (TREE_TYPE (t), low, high);
277 || (overflowable > 0 && sign_extended_type))
280 TREE_OVERFLOW (t) = 1;
281 TREE_CONSTANT_OVERFLOW (t) = 1;
283 else if (overflowed_const)
286 TREE_CONSTANT_OVERFLOW (t) = 1;
293 /* Add two doubleword integers with doubleword result.
294 Each argument is given as two `HOST_WIDE_INT' pieces.
295 One argument is L1 and H1; the other, L2 and H2.
296 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
299 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
300 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
301 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
303 unsigned HOST_WIDE_INT l;
307 h = h1 + h2 + (l < l1);
311 return OVERFLOW_SUM_SIGN (h1, h2, h);
314 /* Negate a doubleword integer with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
327 return (*hv & h1) < 0;
337 /* Multiply two doubleword integers with doubleword result.
338 Return nonzero if the operation overflows, assuming it's signed.
339 Each argument is given as two `HOST_WIDE_INT' pieces.
340 One argument is L1 and H1; the other, L2 and H2.
341 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
344 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
345 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
346 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
348 HOST_WIDE_INT arg1[4];
349 HOST_WIDE_INT arg2[4];
350 HOST_WIDE_INT prod[4 * 2];
351 unsigned HOST_WIDE_INT carry;
353 unsigned HOST_WIDE_INT toplow, neglow;
354 HOST_WIDE_INT tophigh, neghigh;
356 encode (arg1, l1, h1);
357 encode (arg2, l2, h2);
359 memset (prod, 0, sizeof prod);
361 for (i = 0; i < 4; i++)
364 for (j = 0; j < 4; j++)
367 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
368 carry += arg1[i] * arg2[j];
369 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
371 prod[k] = LOWPART (carry);
372 carry = HIGHPART (carry);
377 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
379 /* Check for overflow by calculating the top half of the answer in full;
380 it should agree with the low half's sign bit. */
381 decode (prod + 4, &toplow, &tophigh);
384 neg_double (l2, h2, &neglow, &neghigh);
385 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
389 neg_double (l1, h1, &neglow, &neghigh);
390 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
392 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
395 /* Shift the doubleword integer in L1, H1 left by COUNT places
396 keeping only PREC bits of result.
397 Shift right if COUNT is negative.
398 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
403 HOST_WIDE_INT count, unsigned int prec,
404 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
406 unsigned HOST_WIDE_INT signmask;
410 rshift_double (l1, h1, -count, prec, lv, hv, arith);
414 if (SHIFT_COUNT_TRUNCATED)
417 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
419 /* Shifting by the host word size is undefined according to the
420 ANSI standard, so we must handle this as a special case. */
424 else if (count >= HOST_BITS_PER_WIDE_INT)
426 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
431 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
432 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
436 /* Sign extend all bits that are beyond the precision. */
438 signmask = -((prec > HOST_BITS_PER_WIDE_INT
439 ? ((unsigned HOST_WIDE_INT) *hv
440 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
441 : (*lv >> (prec - 1))) & 1);
443 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
445 else if (prec >= HOST_BITS_PER_WIDE_INT)
447 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
448 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
453 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
454 *lv |= signmask << prec;
458 /* Shift the doubleword integer in L1, H1 right by COUNT places
459 keeping only PREC bits of result. COUNT must be positive.
460 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
461 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
464 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
465 HOST_WIDE_INT count, unsigned int prec,
466 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
469 unsigned HOST_WIDE_INT signmask;
472 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
475 if (SHIFT_COUNT_TRUNCATED)
478 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
480 /* Shifting by the host word size is undefined according to the
481 ANSI standard, so we must handle this as a special case. */
485 else if (count >= HOST_BITS_PER_WIDE_INT)
488 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
492 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
494 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
497 /* Zero / sign extend all bits that are beyond the precision. */
499 if (count >= (HOST_WIDE_INT)prec)
504 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
506 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
508 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
509 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
514 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
515 *lv |= signmask << (prec - count);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result.
521 Rotate right if COUNT is negative.
522 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
525 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
526 HOST_WIDE_INT count, unsigned int prec,
527 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
529 unsigned HOST_WIDE_INT s1l, s2l;
530 HOST_WIDE_INT s1h, s2h;
536 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
537 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
542 /* Rotate the doubleword integer in L1, H1 left by COUNT places
543 keeping only PREC bits of result. COUNT must be positive.
544 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
547 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
548 HOST_WIDE_INT count, unsigned int prec,
549 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
551 unsigned HOST_WIDE_INT s1l, s2l;
552 HOST_WIDE_INT s1h, s2h;
558 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
559 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
564 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
565 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
566 CODE is a tree code for a kind of division, one of
567 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
569 It controls how the quotient is rounded to an integer.
570 Return nonzero if the operation overflows.
571 UNS nonzero says do unsigned division. */
574 div_and_round_double (enum tree_code code, int uns,
575 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
576 HOST_WIDE_INT hnum_orig,
577 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
578 HOST_WIDE_INT hden_orig,
579 unsigned HOST_WIDE_INT *lquo,
580 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
584 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
585 HOST_WIDE_INT den[4], quo[4];
587 unsigned HOST_WIDE_INT work;
588 unsigned HOST_WIDE_INT carry = 0;
589 unsigned HOST_WIDE_INT lnum = lnum_orig;
590 HOST_WIDE_INT hnum = hnum_orig;
591 unsigned HOST_WIDE_INT lden = lden_orig;
592 HOST_WIDE_INT hden = hden_orig;
595 if (hden == 0 && lden == 0)
596 overflow = 1, lden = 1;
598 /* Calculate quotient sign and convert operands to unsigned. */
604 /* (minimum integer) / (-1) is the only overflow case. */
605 if (neg_double (lnum, hnum, &lnum, &hnum)
606 && ((HOST_WIDE_INT) lden & hden) == -1)
612 neg_double (lden, hden, &lden, &hden);
616 if (hnum == 0 && hden == 0)
617 { /* single precision */
619 /* This unsigned division rounds toward zero. */
625 { /* trivial case: dividend < divisor */
626 /* hden != 0 already checked. */
633 memset (quo, 0, sizeof quo);
635 memset (num, 0, sizeof num); /* to zero 9th element */
636 memset (den, 0, sizeof den);
638 encode (num, lnum, hnum);
639 encode (den, lden, hden);
641 /* Special code for when the divisor < BASE. */
642 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
644 /* hnum != 0 already checked. */
645 for (i = 4 - 1; i >= 0; i--)
647 work = num[i] + carry * BASE;
648 quo[i] = work / lden;
654 /* Full double precision division,
655 with thanks to Don Knuth's "Seminumerical Algorithms". */
656 int num_hi_sig, den_hi_sig;
657 unsigned HOST_WIDE_INT quo_est, scale;
659 /* Find the highest nonzero divisor digit. */
660 for (i = 4 - 1;; i--)
667 /* Insure that the first digit of the divisor is at least BASE/2.
668 This is required by the quotient digit estimation algorithm. */
670 scale = BASE / (den[den_hi_sig] + 1);
672 { /* scale divisor and dividend */
674 for (i = 0; i <= 4 - 1; i++)
676 work = (num[i] * scale) + carry;
677 num[i] = LOWPART (work);
678 carry = HIGHPART (work);
683 for (i = 0; i <= 4 - 1; i++)
685 work = (den[i] * scale) + carry;
686 den[i] = LOWPART (work);
687 carry = HIGHPART (work);
688 if (den[i] != 0) den_hi_sig = i;
695 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
697 /* Guess the next quotient digit, quo_est, by dividing the first
698 two remaining dividend digits by the high order quotient digit.
699 quo_est is never low and is at most 2 high. */
700 unsigned HOST_WIDE_INT tmp;
702 num_hi_sig = i + den_hi_sig + 1;
703 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
704 if (num[num_hi_sig] != den[den_hi_sig])
705 quo_est = work / den[den_hi_sig];
709 /* Refine quo_est so it's usually correct, and at most one high. */
710 tmp = work - quo_est * den[den_hi_sig];
712 && (den[den_hi_sig - 1] * quo_est
713 > (tmp * BASE + num[num_hi_sig - 2])))
716 /* Try QUO_EST as the quotient digit, by multiplying the
717 divisor by QUO_EST and subtracting from the remaining dividend.
718 Keep in mind that QUO_EST is the I - 1st digit. */
721 for (j = 0; j <= den_hi_sig; j++)
723 work = quo_est * den[j] + carry;
724 carry = HIGHPART (work);
725 work = num[i + j] - LOWPART (work);
726 num[i + j] = LOWPART (work);
727 carry += HIGHPART (work) != 0;
730 /* If quo_est was high by one, then num[i] went negative and
731 we need to correct things. */
732 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
735 carry = 0; /* add divisor back in */
736 for (j = 0; j <= den_hi_sig; j++)
738 work = num[i + j] + den[j] + carry;
739 carry = HIGHPART (work);
740 num[i + j] = LOWPART (work);
743 num [num_hi_sig] += carry;
746 /* Store the quotient digit. */
751 decode (quo, lquo, hquo);
754 /* If result is negative, make it so. */
756 neg_double (*lquo, *hquo, lquo, hquo);
758 /* Compute trial remainder: rem = num - (quo * den) */
759 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
760 neg_double (*lrem, *hrem, lrem, hrem);
761 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
766 case TRUNC_MOD_EXPR: /* round toward zero */
767 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
771 case FLOOR_MOD_EXPR: /* round toward negative infinity */
772 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
775 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
783 case CEIL_MOD_EXPR: /* round toward positive infinity */
784 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
794 case ROUND_MOD_EXPR: /* round to closest integer */
796 unsigned HOST_WIDE_INT labs_rem = *lrem;
797 HOST_WIDE_INT habs_rem = *hrem;
798 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
799 HOST_WIDE_INT habs_den = hden, htwice;
801 /* Get absolute values. */
803 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
805 neg_double (lden, hden, &labs_den, &habs_den);
807 /* If (2 * abs (lrem) >= abs (lden)) */
808 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
809 labs_rem, habs_rem, <wice, &htwice);
811 if (((unsigned HOST_WIDE_INT) habs_den
812 < (unsigned HOST_WIDE_INT) htwice)
813 || (((unsigned HOST_WIDE_INT) habs_den
814 == (unsigned HOST_WIDE_INT) htwice)
815 && (labs_den < ltwice)))
819 add_double (*lquo, *hquo,
820 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
823 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
835 /* Compute true remainder: rem = num - (quo * den) */
836 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
837 neg_double (*lrem, *hrem, lrem, hrem);
838 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
842 /* If ARG2 divides ARG1 with zero remainder, carries out the division
843 of type CODE and returns the quotient.
844 Otherwise returns NULL_TREE. */
847 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
849 unsigned HOST_WIDE_INT int1l, int2l;
850 HOST_WIDE_INT int1h, int2h;
851 unsigned HOST_WIDE_INT quol, reml;
852 HOST_WIDE_INT quoh, remh;
853 tree type = TREE_TYPE (arg1);
854 int uns = TYPE_UNSIGNED (type);
856 int1l = TREE_INT_CST_LOW (arg1);
857 int1h = TREE_INT_CST_HIGH (arg1);
858 int2l = TREE_INT_CST_LOW (arg2);
859 int2h = TREE_INT_CST_HIGH (arg2);
861 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
862 &quol, &quoh, &reml, &remh);
863 if (remh != 0 || reml != 0)
866 return build_int_cst_wide (type, quol, quoh);
869 /* Return true if the built-in mathematical function specified by CODE
870 is odd, i.e. -f(x) == f(-x). */
873 negate_mathfn_p (enum built_in_function code)
877 CASE_FLT_FN (BUILT_IN_ASIN):
878 CASE_FLT_FN (BUILT_IN_ASINH):
879 CASE_FLT_FN (BUILT_IN_ATAN):
880 CASE_FLT_FN (BUILT_IN_ATANH):
881 CASE_FLT_FN (BUILT_IN_CBRT):
882 CASE_FLT_FN (BUILT_IN_SIN):
883 CASE_FLT_FN (BUILT_IN_SINH):
884 CASE_FLT_FN (BUILT_IN_TAN):
885 CASE_FLT_FN (BUILT_IN_TANH):
894 /* Check whether we may negate an integer constant T without causing
898 may_negate_without_overflow_p (tree t)
900 unsigned HOST_WIDE_INT val;
904 gcc_assert (TREE_CODE (t) == INTEGER_CST);
906 type = TREE_TYPE (t);
907 if (TYPE_UNSIGNED (type))
910 prec = TYPE_PRECISION (type);
911 if (prec > HOST_BITS_PER_WIDE_INT)
913 if (TREE_INT_CST_LOW (t) != 0)
915 prec -= HOST_BITS_PER_WIDE_INT;
916 val = TREE_INT_CST_HIGH (t);
919 val = TREE_INT_CST_LOW (t);
920 if (prec < HOST_BITS_PER_WIDE_INT)
921 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
922 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
925 /* Determine whether an expression T can be cheaply negated using
926 the function negate_expr. */
929 negate_expr_p (tree t)
936 type = TREE_TYPE (t);
939 switch (TREE_CODE (t))
942 if (TYPE_UNSIGNED (type) || ! flag_trapv)
945 /* Check that -CST will not overflow type. */
946 return may_negate_without_overflow_p (t);
948 return INTEGRAL_TYPE_P (type)
949 && (TYPE_UNSIGNED (type)
950 || (flag_wrapv && !flag_trapv));
957 return negate_expr_p (TREE_REALPART (t))
958 && negate_expr_p (TREE_IMAGPART (t));
961 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
963 /* -(A + B) -> (-B) - A. */
964 if (negate_expr_p (TREE_OPERAND (t, 1))
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1)))
968 /* -(A + B) -> (-A) - B. */
969 return negate_expr_p (TREE_OPERAND (t, 0));
972 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
973 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
974 && reorder_operands_p (TREE_OPERAND (t, 0),
975 TREE_OPERAND (t, 1));
978 if (TYPE_UNSIGNED (TREE_TYPE (t)))
984 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
985 return negate_expr_p (TREE_OPERAND (t, 1))
986 || negate_expr_p (TREE_OPERAND (t, 0));
994 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
996 return negate_expr_p (TREE_OPERAND (t, 1))
997 || negate_expr_p (TREE_OPERAND (t, 0));
1000 /* Negate -((double)float) as (double)(-float). */
1001 if (TREE_CODE (type) == REAL_TYPE)
1003 tree tem = strip_float_extensions (t);
1005 return negate_expr_p (tem);
1010 /* Negate -f(x) as f(-x). */
1011 if (negate_mathfn_p (builtin_mathfn_code (t)))
1012 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1016 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1017 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1019 tree op1 = TREE_OPERAND (t, 1);
1020 if (TREE_INT_CST_HIGH (op1) == 0
1021 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1022 == TREE_INT_CST_LOW (op1))
1033 /* Given T, an expression, return the negation of T. Allow for T to be
1034 null, in which case return null. */
1037 negate_expr (tree t)
1045 type = TREE_TYPE (t);
1046 STRIP_SIGN_NOPS (t);
1048 switch (TREE_CODE (t))
1050 /* Convert - (~A) to A + 1. */
1052 if (INTEGRAL_TYPE_P (type)
1053 && (TYPE_UNSIGNED (type)
1054 || (flag_wrapv && !flag_trapv)))
1055 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1056 build_int_cst (type, 1));
1060 tem = fold_negate_const (t, type);
1061 if (! TREE_OVERFLOW (tem)
1062 || TYPE_UNSIGNED (type)
1068 tem = fold_negate_const (t, type);
1069 /* Two's complement FP formats, such as c4x, may overflow. */
1070 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1071 return fold_convert (type, tem);
1076 tree rpart = negate_expr (TREE_REALPART (t));
1077 tree ipart = negate_expr (TREE_IMAGPART (t));
1079 if ((TREE_CODE (rpart) == REAL_CST
1080 && TREE_CODE (ipart) == REAL_CST)
1081 || (TREE_CODE (rpart) == INTEGER_CST
1082 && TREE_CODE (ipart) == INTEGER_CST))
1083 return build_complex (type, rpart, ipart);
1088 return fold_convert (type, TREE_OPERAND (t, 0));
1091 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1093 /* -(A + B) -> (-B) - A. */
1094 if (negate_expr_p (TREE_OPERAND (t, 1))
1095 && reorder_operands_p (TREE_OPERAND (t, 0),
1096 TREE_OPERAND (t, 1)))
1098 tem = negate_expr (TREE_OPERAND (t, 1));
1099 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1100 tem, TREE_OPERAND (t, 0));
1101 return fold_convert (type, tem);
1104 /* -(A + B) -> (-A) - B. */
1105 if (negate_expr_p (TREE_OPERAND (t, 0)))
1107 tem = negate_expr (TREE_OPERAND (t, 0));
1108 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1109 tem, TREE_OPERAND (t, 1));
1110 return fold_convert (type, tem);
1116 /* - (A - B) -> B - A */
1117 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1118 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1119 return fold_convert (type,
1120 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1121 TREE_OPERAND (t, 1),
1122 TREE_OPERAND (t, 0)));
1126 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1132 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1134 tem = TREE_OPERAND (t, 1);
1135 if (negate_expr_p (tem))
1136 return fold_convert (type,
1137 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1138 TREE_OPERAND (t, 0),
1139 negate_expr (tem)));
1140 tem = TREE_OPERAND (t, 0);
1141 if (negate_expr_p (tem))
1142 return fold_convert (type,
1143 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1145 TREE_OPERAND (t, 1)));
1149 case TRUNC_DIV_EXPR:
1150 case ROUND_DIV_EXPR:
1151 case FLOOR_DIV_EXPR:
1153 case EXACT_DIV_EXPR:
1154 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1156 tem = TREE_OPERAND (t, 1);
1157 if (negate_expr_p (tem))
1158 return fold_convert (type,
1159 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1160 TREE_OPERAND (t, 0),
1161 negate_expr (tem)));
1162 tem = TREE_OPERAND (t, 0);
1163 if (negate_expr_p (tem))
1164 return fold_convert (type,
1165 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1167 TREE_OPERAND (t, 1)));
1172 /* Convert -((double)float) into (double)(-float). */
1173 if (TREE_CODE (type) == REAL_TYPE)
1175 tem = strip_float_extensions (t);
1176 if (tem != t && negate_expr_p (tem))
1177 return fold_convert (type, negate_expr (tem));
1182 /* Negate -f(x) as f(-x). */
1183 if (negate_mathfn_p (builtin_mathfn_code (t))
1184 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1186 tree fndecl, arg, arglist;
1188 fndecl = get_callee_fndecl (t);
1189 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1190 arglist = build_tree_list (NULL_TREE, arg);
1191 return build_function_call_expr (fndecl, arglist);
1196 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1197 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1199 tree op1 = TREE_OPERAND (t, 1);
1200 if (TREE_INT_CST_HIGH (op1) == 0
1201 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1202 == TREE_INT_CST_LOW (op1))
1204 tree ntype = TYPE_UNSIGNED (type)
1205 ? lang_hooks.types.signed_type (type)
1206 : lang_hooks.types.unsigned_type (type);
1207 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1208 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1209 return fold_convert (type, temp);
1218 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1219 return fold_convert (type, tem);
1222 /* Split a tree IN into a constant, literal and variable parts that could be
1223 combined with CODE to make IN. "constant" means an expression with
1224 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1225 commutative arithmetic operation. Store the constant part into *CONP,
1226 the literal in *LITP and return the variable part. If a part isn't
1227 present, set it to null. If the tree does not decompose in this way,
1228 return the entire tree as the variable part and the other parts as null.
1230 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1231 case, we negate an operand that was subtracted. Except if it is a
1232 literal for which we use *MINUS_LITP instead.
1234 If NEGATE_P is true, we are negating all of IN, again except a literal
1235 for which we use *MINUS_LITP instead.
1237 If IN is itself a literal or constant, return it as appropriate.
1239 Note that we do not guarantee that any of the three values will be the
1240 same type as IN, but they will have the same signedness and mode. */
1243 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1244 tree *minus_litp, int negate_p)
1252 /* Strip any conversions that don't change the machine mode or signedness. */
1253 STRIP_SIGN_NOPS (in);
1255 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1257 else if (TREE_CODE (in) == code
1258 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1259 /* We can associate addition and subtraction together (even
1260 though the C standard doesn't say so) for integers because
1261 the value is not affected. For reals, the value might be
1262 affected, so we can't. */
1263 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1264 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1266 tree op0 = TREE_OPERAND (in, 0);
1267 tree op1 = TREE_OPERAND (in, 1);
1268 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1269 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1271 /* First see if either of the operands is a literal, then a constant. */
1272 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1273 *litp = op0, op0 = 0;
1274 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1275 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1277 if (op0 != 0 && TREE_CONSTANT (op0))
1278 *conp = op0, op0 = 0;
1279 else if (op1 != 0 && TREE_CONSTANT (op1))
1280 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1282 /* If we haven't dealt with either operand, this is not a case we can
1283 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1284 if (op0 != 0 && op1 != 0)
1289 var = op1, neg_var_p = neg1_p;
1291 /* Now do any needed negations. */
1293 *minus_litp = *litp, *litp = 0;
1295 *conp = negate_expr (*conp);
1297 var = negate_expr (var);
1299 else if (TREE_CONSTANT (in))
1307 *minus_litp = *litp, *litp = 0;
1308 else if (*minus_litp)
1309 *litp = *minus_litp, *minus_litp = 0;
1310 *conp = negate_expr (*conp);
1311 var = negate_expr (var);
1317 /* Re-associate trees split by the above function. T1 and T2 are either
1318 expressions to associate or null. Return the new expression, if any. If
1319 we build an operation, do it in TYPE and with CODE. */
1322 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1329 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1330 try to fold this since we will have infinite recursion. But do
1331 deal with any NEGATE_EXPRs. */
1332 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1333 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1335 if (code == PLUS_EXPR)
1337 if (TREE_CODE (t1) == NEGATE_EXPR)
1338 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1339 fold_convert (type, TREE_OPERAND (t1, 0)));
1340 else if (TREE_CODE (t2) == NEGATE_EXPR)
1341 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1342 fold_convert (type, TREE_OPERAND (t2, 0)));
1343 else if (integer_zerop (t2))
1344 return fold_convert (type, t1);
1346 else if (code == MINUS_EXPR)
1348 if (integer_zerop (t2))
1349 return fold_convert (type, t1);
1352 return build2 (code, type, fold_convert (type, t1),
1353 fold_convert (type, t2));
1356 return fold_build2 (code, type, fold_convert (type, t1),
1357 fold_convert (type, t2));
1360 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1361 to produce a new constant. Return NULL_TREE if we don't know how
1362 to evaluate CODE at compile-time.
1364 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1367 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1369 unsigned HOST_WIDE_INT int1l, int2l;
1370 HOST_WIDE_INT int1h, int2h;
1371 unsigned HOST_WIDE_INT low;
1373 unsigned HOST_WIDE_INT garbagel;
1374 HOST_WIDE_INT garbageh;
1376 tree type = TREE_TYPE (arg1);
1377 int uns = TYPE_UNSIGNED (type);
1379 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1382 int1l = TREE_INT_CST_LOW (arg1);
1383 int1h = TREE_INT_CST_HIGH (arg1);
1384 int2l = TREE_INT_CST_LOW (arg2);
1385 int2h = TREE_INT_CST_HIGH (arg2);
1390 low = int1l | int2l, hi = int1h | int2h;
1394 low = int1l ^ int2l, hi = int1h ^ int2h;
1398 low = int1l & int2l, hi = int1h & int2h;
1404 /* It's unclear from the C standard whether shifts can overflow.
1405 The following code ignores overflow; perhaps a C standard
1406 interpretation ruling is needed. */
1407 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1414 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1419 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1423 neg_double (int2l, int2h, &low, &hi);
1424 add_double (int1l, int1h, low, hi, &low, &hi);
1425 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1429 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1432 case TRUNC_DIV_EXPR:
1433 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1434 case EXACT_DIV_EXPR:
1435 /* This is a shortcut for a common special case. */
1436 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1437 && ! TREE_CONSTANT_OVERFLOW (arg1)
1438 && ! TREE_CONSTANT_OVERFLOW (arg2)
1439 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1441 if (code == CEIL_DIV_EXPR)
1444 low = int1l / int2l, hi = 0;
1448 /* ... fall through ... */
1450 case ROUND_DIV_EXPR:
1451 if (int2h == 0 && int2l == 0)
1453 if (int2h == 0 && int2l == 1)
1455 low = int1l, hi = int1h;
1458 if (int1l == int2l && int1h == int2h
1459 && ! (int1l == 0 && int1h == 0))
1464 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1465 &low, &hi, &garbagel, &garbageh);
1468 case TRUNC_MOD_EXPR:
1469 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1470 /* This is a shortcut for a common special case. */
1471 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1472 && ! TREE_CONSTANT_OVERFLOW (arg1)
1473 && ! TREE_CONSTANT_OVERFLOW (arg2)
1474 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1476 if (code == CEIL_MOD_EXPR)
1478 low = int1l % int2l, hi = 0;
1482 /* ... fall through ... */
1484 case ROUND_MOD_EXPR:
1485 if (int2h == 0 && int2l == 0)
1487 overflow = div_and_round_double (code, uns,
1488 int1l, int1h, int2l, int2h,
1489 &garbagel, &garbageh, &low, &hi);
1495 low = (((unsigned HOST_WIDE_INT) int1h
1496 < (unsigned HOST_WIDE_INT) int2h)
1497 || (((unsigned HOST_WIDE_INT) int1h
1498 == (unsigned HOST_WIDE_INT) int2h)
1501 low = (int1h < int2h
1502 || (int1h == int2h && int1l < int2l));
1504 if (low == (code == MIN_EXPR))
1505 low = int1l, hi = int1h;
1507 low = int2l, hi = int2h;
1514 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1518 /* Propagate overflow flags ourselves. */
1519 if (((!uns || is_sizetype) && overflow)
1520 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1523 TREE_OVERFLOW (t) = 1;
1524 TREE_CONSTANT_OVERFLOW (t) = 1;
1526 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1529 TREE_CONSTANT_OVERFLOW (t) = 1;
1533 t = force_fit_type (t, 1,
1534 ((!uns || is_sizetype) && overflow)
1535 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1536 TREE_CONSTANT_OVERFLOW (arg1)
1537 | TREE_CONSTANT_OVERFLOW (arg2));
1542 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1543 constant. We assume ARG1 and ARG2 have the same data type, or at least
1544 are the same kind of constant and the same machine mode.
1546 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1549 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1554 if (TREE_CODE (arg1) == INTEGER_CST)
1555 return int_const_binop (code, arg1, arg2, notrunc);
1557 if (TREE_CODE (arg1) == REAL_CST)
1559 enum machine_mode mode;
1562 REAL_VALUE_TYPE value;
1563 REAL_VALUE_TYPE result;
1567 /* The following codes are handled by real_arithmetic. */
1582 d1 = TREE_REAL_CST (arg1);
1583 d2 = TREE_REAL_CST (arg2);
1585 type = TREE_TYPE (arg1);
1586 mode = TYPE_MODE (type);
1588 /* Don't perform operation if we honor signaling NaNs and
1589 either operand is a NaN. */
1590 if (HONOR_SNANS (mode)
1591 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1594 /* Don't perform operation if it would raise a division
1595 by zero exception. */
1596 if (code == RDIV_EXPR
1597 && REAL_VALUES_EQUAL (d2, dconst0)
1598 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1601 /* If either operand is a NaN, just return it. Otherwise, set up
1602 for floating-point trap; we return an overflow. */
1603 if (REAL_VALUE_ISNAN (d1))
1605 else if (REAL_VALUE_ISNAN (d2))
1608 inexact = real_arithmetic (&value, code, &d1, &d2);
1609 real_convert (&result, mode, &value);
1611 /* Don't constant fold this floating point operation if
1612 the result has overflowed and flag_trapping_math. */
1614 if (flag_trapping_math
1615 && MODE_HAS_INFINITIES (mode)
1616 && REAL_VALUE_ISINF (result)
1617 && !REAL_VALUE_ISINF (d1)
1618 && !REAL_VALUE_ISINF (d2))
1621 /* Don't constant fold this floating point operation if the
1622 result may dependent upon the run-time rounding mode and
1623 flag_rounding_math is set, or if GCC's software emulation
1624 is unable to accurately represent the result. */
1626 if ((flag_rounding_math
1627 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1628 && !flag_unsafe_math_optimizations))
1629 && (inexact || !real_identical (&result, &value)))
1632 t = build_real (type, result);
1634 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1635 TREE_CONSTANT_OVERFLOW (t)
1637 | TREE_CONSTANT_OVERFLOW (arg1)
1638 | TREE_CONSTANT_OVERFLOW (arg2);
1642 if (TREE_CODE (arg1) == COMPLEX_CST)
1644 tree type = TREE_TYPE (arg1);
1645 tree r1 = TREE_REALPART (arg1);
1646 tree i1 = TREE_IMAGPART (arg1);
1647 tree r2 = TREE_REALPART (arg2);
1648 tree i2 = TREE_IMAGPART (arg2);
1654 t = build_complex (type,
1655 const_binop (PLUS_EXPR, r1, r2, notrunc),
1656 const_binop (PLUS_EXPR, i1, i2, notrunc));
1660 t = build_complex (type,
1661 const_binop (MINUS_EXPR, r1, r2, notrunc),
1662 const_binop (MINUS_EXPR, i1, i2, notrunc));
1666 t = build_complex (type,
1667 const_binop (MINUS_EXPR,
1668 const_binop (MULT_EXPR,
1670 const_binop (MULT_EXPR,
1673 const_binop (PLUS_EXPR,
1674 const_binop (MULT_EXPR,
1676 const_binop (MULT_EXPR,
1683 tree t1, t2, real, imag;
1685 = const_binop (PLUS_EXPR,
1686 const_binop (MULT_EXPR, r2, r2, notrunc),
1687 const_binop (MULT_EXPR, i2, i2, notrunc),
1690 t1 = const_binop (PLUS_EXPR,
1691 const_binop (MULT_EXPR, r1, r2, notrunc),
1692 const_binop (MULT_EXPR, i1, i2, notrunc),
1694 t2 = const_binop (MINUS_EXPR,
1695 const_binop (MULT_EXPR, i1, r2, notrunc),
1696 const_binop (MULT_EXPR, r1, i2, notrunc),
1699 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1701 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1702 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1706 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1707 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1712 t = build_complex (type, real, imag);
1724 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1725 indicates which particular sizetype to create. */
1728 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1730 return build_int_cst (sizetype_tab[(int) kind], number);
1733 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1734 is a tree code. The type of the result is taken from the operands.
1735 Both must be the same type integer type and it must be a size type.
1736 If the operands are constant, so is the result. */
1739 size_binop (enum tree_code code, tree arg0, tree arg1)
1741 tree type = TREE_TYPE (arg0);
1743 if (arg0 == error_mark_node || arg1 == error_mark_node)
1744 return error_mark_node;
1746 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1747 && type == TREE_TYPE (arg1));
1749 /* Handle the special case of two integer constants faster. */
1750 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1752 /* And some specific cases even faster than that. */
1753 if (code == PLUS_EXPR && integer_zerop (arg0))
1755 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1756 && integer_zerop (arg1))
1758 else if (code == MULT_EXPR && integer_onep (arg0))
1761 /* Handle general case of two integer constants. */
1762 return int_const_binop (code, arg0, arg1, 0);
1765 return fold_build2 (code, type, arg0, arg1);
1768 /* Given two values, either both of sizetype or both of bitsizetype,
1769 compute the difference between the two values. Return the value
1770 in signed type corresponding to the type of the operands. */
1773 size_diffop (tree arg0, tree arg1)
1775 tree type = TREE_TYPE (arg0);
1778 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1779 && type == TREE_TYPE (arg1));
1781 /* If the type is already signed, just do the simple thing. */
1782 if (!TYPE_UNSIGNED (type))
1783 return size_binop (MINUS_EXPR, arg0, arg1);
1785 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1787 /* If either operand is not a constant, do the conversions to the signed
1788 type and subtract. The hardware will do the right thing with any
1789 overflow in the subtraction. */
1790 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1791 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1792 fold_convert (ctype, arg1));
1794 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1795 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1796 overflow) and negate (which can't either). Special-case a result
1797 of zero while we're here. */
1798 if (tree_int_cst_equal (arg0, arg1))
1799 return build_int_cst (ctype, 0);
1800 else if (tree_int_cst_lt (arg1, arg0))
1801 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1803 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1804 fold_convert (ctype, size_binop (MINUS_EXPR,
1808 /* A subroutine of fold_convert_const handling conversions of an
1809 INTEGER_CST to another integer type. */
1812 fold_convert_const_int_from_int (tree type, tree arg1)
1816 /* Given an integer constant, make new constant with new type,
1817 appropriately sign-extended or truncated. */
1818 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1819 TREE_INT_CST_HIGH (arg1));
1821 t = force_fit_type (t,
1822 /* Don't set the overflow when
1823 converting a pointer */
1824 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1825 (TREE_INT_CST_HIGH (arg1) < 0
1826 && (TYPE_UNSIGNED (type)
1827 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1828 | TREE_OVERFLOW (arg1),
1829 TREE_CONSTANT_OVERFLOW (arg1));
1834 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1835 to an integer type. */
1838 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1843 /* The following code implements the floating point to integer
1844 conversion rules required by the Java Language Specification,
1845 that IEEE NaNs are mapped to zero and values that overflow
1846 the target precision saturate, i.e. values greater than
1847 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1848 are mapped to INT_MIN. These semantics are allowed by the
1849 C and C++ standards that simply state that the behavior of
1850 FP-to-integer conversion is unspecified upon overflow. */
1852 HOST_WIDE_INT high, low;
1854 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1858 case FIX_TRUNC_EXPR:
1859 real_trunc (&r, VOIDmode, &x);
1863 real_ceil (&r, VOIDmode, &x);
1866 case FIX_FLOOR_EXPR:
1867 real_floor (&r, VOIDmode, &x);
1870 case FIX_ROUND_EXPR:
1871 real_round (&r, VOIDmode, &x);
1878 /* If R is NaN, return zero and show we have an overflow. */
1879 if (REAL_VALUE_ISNAN (r))
1886 /* See if R is less than the lower bound or greater than the
1891 tree lt = TYPE_MIN_VALUE (type);
1892 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1893 if (REAL_VALUES_LESS (r, l))
1896 high = TREE_INT_CST_HIGH (lt);
1897 low = TREE_INT_CST_LOW (lt);
1903 tree ut = TYPE_MAX_VALUE (type);
1906 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1907 if (REAL_VALUES_LESS (u, r))
1910 high = TREE_INT_CST_HIGH (ut);
1911 low = TREE_INT_CST_LOW (ut);
1917 REAL_VALUE_TO_INT (&low, &high, r);
1919 t = build_int_cst_wide (type, low, high);
1921 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1922 TREE_CONSTANT_OVERFLOW (arg1));
1926 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1927 to another floating point type. */
1930 fold_convert_const_real_from_real (tree type, tree arg1)
1932 REAL_VALUE_TYPE value;
1935 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1936 t = build_real (type, value);
1938 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1939 TREE_CONSTANT_OVERFLOW (t)
1940 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1944 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1945 type TYPE. If no simplification can be done return NULL_TREE. */
1948 fold_convert_const (enum tree_code code, tree type, tree arg1)
1950 if (TREE_TYPE (arg1) == type)
1953 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1955 if (TREE_CODE (arg1) == INTEGER_CST)
1956 return fold_convert_const_int_from_int (type, arg1);
1957 else if (TREE_CODE (arg1) == REAL_CST)
1958 return fold_convert_const_int_from_real (code, type, arg1);
1960 else if (TREE_CODE (type) == REAL_TYPE)
1962 if (TREE_CODE (arg1) == INTEGER_CST)
1963 return build_real_from_int_cst (type, arg1);
1964 if (TREE_CODE (arg1) == REAL_CST)
1965 return fold_convert_const_real_from_real (type, arg1);
1970 /* Construct a vector of zero elements of vector type TYPE. */
1973 build_zero_vector (tree type)
1978 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1979 units = TYPE_VECTOR_SUBPARTS (type);
1982 for (i = 0; i < units; i++)
1983 list = tree_cons (NULL_TREE, elem, list);
1984 return build_vector (type, list);
1987 /* Convert expression ARG to type TYPE. Used by the middle-end for
1988 simple conversions in preference to calling the front-end's convert. */
1991 fold_convert (tree type, tree arg)
1993 tree orig = TREE_TYPE (arg);
1999 if (TREE_CODE (arg) == ERROR_MARK
2000 || TREE_CODE (type) == ERROR_MARK
2001 || TREE_CODE (orig) == ERROR_MARK)
2002 return error_mark_node;
2004 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2005 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2006 TYPE_MAIN_VARIANT (orig)))
2007 return fold_build1 (NOP_EXPR, type, arg);
2009 switch (TREE_CODE (type))
2011 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2012 case POINTER_TYPE: case REFERENCE_TYPE:
2014 if (TREE_CODE (arg) == INTEGER_CST)
2016 tem = fold_convert_const (NOP_EXPR, type, arg);
2017 if (tem != NULL_TREE)
2020 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2021 || TREE_CODE (orig) == OFFSET_TYPE)
2022 return fold_build1 (NOP_EXPR, type, arg);
2023 if (TREE_CODE (orig) == COMPLEX_TYPE)
2025 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2026 return fold_convert (type, tem);
2028 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2029 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2030 return fold_build1 (NOP_EXPR, type, arg);
2033 if (TREE_CODE (arg) == INTEGER_CST)
2035 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2036 if (tem != NULL_TREE)
2039 else if (TREE_CODE (arg) == REAL_CST)
2041 tem = fold_convert_const (NOP_EXPR, type, arg);
2042 if (tem != NULL_TREE)
2046 switch (TREE_CODE (orig))
2049 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2050 case POINTER_TYPE: case REFERENCE_TYPE:
2051 return fold_build1 (FLOAT_EXPR, type, arg);
2054 return fold_build1 (NOP_EXPR, type, arg);
2057 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2058 return fold_convert (type, tem);
2065 switch (TREE_CODE (orig))
2068 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2069 case POINTER_TYPE: case REFERENCE_TYPE:
2071 return build2 (COMPLEX_EXPR, type,
2072 fold_convert (TREE_TYPE (type), arg),
2073 fold_convert (TREE_TYPE (type), integer_zero_node));
2078 if (TREE_CODE (arg) == COMPLEX_EXPR)
2080 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2081 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2085 arg = save_expr (arg);
2086 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2087 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2088 rpart = fold_convert (TREE_TYPE (type), rpart);
2089 ipart = fold_convert (TREE_TYPE (type), ipart);
2090 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2098 if (integer_zerop (arg))
2099 return build_zero_vector (type);
2100 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2101 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2102 || TREE_CODE (orig) == VECTOR_TYPE);
2103 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2106 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2113 /* Return false if expr can be assumed not to be an lvalue, true
2117 maybe_lvalue_p (tree x)
2119 /* We only need to wrap lvalue tree codes. */
2120 switch (TREE_CODE (x))
2131 case ALIGN_INDIRECT_REF:
2132 case MISALIGNED_INDIRECT_REF:
2134 case ARRAY_RANGE_REF:
2140 case PREINCREMENT_EXPR:
2141 case PREDECREMENT_EXPR:
2143 case TRY_CATCH_EXPR:
2144 case WITH_CLEANUP_EXPR:
2155 /* Assume the worst for front-end tree codes. */
2156 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2164 /* Return an expr equal to X but certainly not valid as an lvalue. */
2169 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2174 if (! maybe_lvalue_p (x))
2176 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2179 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2180 Zero means allow extended lvalues. */
2182 int pedantic_lvalues;
2184 /* When pedantic, return an expr equal to X but certainly not valid as a
2185 pedantic lvalue. Otherwise, return X. */
2188 pedantic_non_lvalue (tree x)
2190 if (pedantic_lvalues)
2191 return non_lvalue (x);
2196 /* Given a tree comparison code, return the code that is the logical inverse
2197 of the given code. It is not safe to do this for floating-point
2198 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2199 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2202 invert_tree_comparison (enum tree_code code, bool honor_nans)
2204 if (honor_nans && flag_trapping_math)
2214 return honor_nans ? UNLE_EXPR : LE_EXPR;
2216 return honor_nans ? UNLT_EXPR : LT_EXPR;
2218 return honor_nans ? UNGE_EXPR : GE_EXPR;
2220 return honor_nans ? UNGT_EXPR : GT_EXPR;
2234 return UNORDERED_EXPR;
2235 case UNORDERED_EXPR:
2236 return ORDERED_EXPR;
2242 /* Similar, but return the comparison that results if the operands are
2243 swapped. This is safe for floating-point. */
2246 swap_tree_comparison (enum tree_code code)
2253 case UNORDERED_EXPR:
2279 /* Convert a comparison tree code from an enum tree_code representation
2280 into a compcode bit-based encoding. This function is the inverse of
2281 compcode_to_comparison. */
2283 static enum comparison_code
2284 comparison_to_compcode (enum tree_code code)
2301 return COMPCODE_ORD;
2302 case UNORDERED_EXPR:
2303 return COMPCODE_UNORD;
2305 return COMPCODE_UNLT;
2307 return COMPCODE_UNEQ;
2309 return COMPCODE_UNLE;
2311 return COMPCODE_UNGT;
2313 return COMPCODE_LTGT;
2315 return COMPCODE_UNGE;
2321 /* Convert a compcode bit-based encoding of a comparison operator back
2322 to GCC's enum tree_code representation. This function is the
2323 inverse of comparison_to_compcode. */
2325 static enum tree_code
2326 compcode_to_comparison (enum comparison_code code)
2343 return ORDERED_EXPR;
2344 case COMPCODE_UNORD:
2345 return UNORDERED_EXPR;
2363 /* Return a tree for the comparison which is the combination of
2364 doing the AND or OR (depending on CODE) of the two operations LCODE
2365 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2366 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2367 if this makes the transformation invalid. */
2370 combine_comparisons (enum tree_code code, enum tree_code lcode,
2371 enum tree_code rcode, tree truth_type,
2372 tree ll_arg, tree lr_arg)
2374 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2375 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2376 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2377 enum comparison_code compcode;
2381 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2382 compcode = lcompcode & rcompcode;
2385 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2386 compcode = lcompcode | rcompcode;
2395 /* Eliminate unordered comparisons, as well as LTGT and ORD
2396 which are not used unless the mode has NaNs. */
2397 compcode &= ~COMPCODE_UNORD;
2398 if (compcode == COMPCODE_LTGT)
2399 compcode = COMPCODE_NE;
2400 else if (compcode == COMPCODE_ORD)
2401 compcode = COMPCODE_TRUE;
2403 else if (flag_trapping_math)
2405 /* Check that the original operation and the optimized ones will trap
2406 under the same condition. */
2407 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2408 && (lcompcode != COMPCODE_EQ)
2409 && (lcompcode != COMPCODE_ORD);
2410 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2411 && (rcompcode != COMPCODE_EQ)
2412 && (rcompcode != COMPCODE_ORD);
2413 bool trap = (compcode & COMPCODE_UNORD) == 0
2414 && (compcode != COMPCODE_EQ)
2415 && (compcode != COMPCODE_ORD);
2417 /* In a short-circuited boolean expression the LHS might be
2418 such that the RHS, if evaluated, will never trap. For
2419 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2420 if neither x nor y is NaN. (This is a mixed blessing: for
2421 example, the expression above will never trap, hence
2422 optimizing it to x < y would be invalid). */
2423 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2424 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2427 /* If the comparison was short-circuited, and only the RHS
2428 trapped, we may now generate a spurious trap. */
2430 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2433 /* If we changed the conditions that cause a trap, we lose. */
2434 if ((ltrap || rtrap) != trap)
2438 if (compcode == COMPCODE_TRUE)
2439 return constant_boolean_node (true, truth_type);
2440 else if (compcode == COMPCODE_FALSE)
2441 return constant_boolean_node (false, truth_type);
2443 return fold_build2 (compcode_to_comparison (compcode),
2444 truth_type, ll_arg, lr_arg);
2447 /* Return nonzero if CODE is a tree code that represents a truth value. */
2450 truth_value_p (enum tree_code code)
2452 return (TREE_CODE_CLASS (code) == tcc_comparison
2453 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2454 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2455 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2458 /* Return nonzero if two operands (typically of the same tree node)
2459 are necessarily equal. If either argument has side-effects this
2460 function returns zero. FLAGS modifies behavior as follows:
2462 If OEP_ONLY_CONST is set, only return nonzero for constants.
2463 This function tests whether the operands are indistinguishable;
2464 it does not test whether they are equal using C's == operation.
2465 The distinction is important for IEEE floating point, because
2466 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2467 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2469 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2470 even though it may hold multiple values during a function.
2471 This is because a GCC tree node guarantees that nothing else is
2472 executed between the evaluation of its "operands" (which may often
2473 be evaluated in arbitrary order). Hence if the operands themselves
2474 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2475 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2476 unset means assuming isochronic (or instantaneous) tree equivalence.
2477 Unless comparing arbitrary expression trees, such as from different
2478 statements, this flag can usually be left unset.
2480 If OEP_PURE_SAME is set, then pure functions with identical arguments
2481 are considered the same. It is used when the caller has other ways
2482 to ensure that global memory is unchanged in between. */
2485 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2487 /* If either is ERROR_MARK, they aren't equal. */
2488 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2491 /* If both types don't have the same signedness, then we can't consider
2492 them equal. We must check this before the STRIP_NOPS calls
2493 because they may change the signedness of the arguments. */
2494 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2500 /* In case both args are comparisons but with different comparison
2501 code, try to swap the comparison operands of one arg to produce
2502 a match and compare that variant. */
2503 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2504 && COMPARISON_CLASS_P (arg0)
2505 && COMPARISON_CLASS_P (arg1))
2507 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2509 if (TREE_CODE (arg0) == swap_code)
2510 return operand_equal_p (TREE_OPERAND (arg0, 0),
2511 TREE_OPERAND (arg1, 1), flags)
2512 && operand_equal_p (TREE_OPERAND (arg0, 1),
2513 TREE_OPERAND (arg1, 0), flags);
2516 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2517 /* This is needed for conversions and for COMPONENT_REF.
2518 Might as well play it safe and always test this. */
2519 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2520 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2521 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2524 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2525 We don't care about side effects in that case because the SAVE_EXPR
2526 takes care of that for us. In all other cases, two expressions are
2527 equal if they have no side effects. If we have two identical
2528 expressions with side effects that should be treated the same due
2529 to the only side effects being identical SAVE_EXPR's, that will
2530 be detected in the recursive calls below. */
2531 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2532 && (TREE_CODE (arg0) == SAVE_EXPR
2533 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2536 /* Next handle constant cases, those for which we can return 1 even
2537 if ONLY_CONST is set. */
2538 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2539 switch (TREE_CODE (arg0))
2542 return (! TREE_CONSTANT_OVERFLOW (arg0)
2543 && ! TREE_CONSTANT_OVERFLOW (arg1)
2544 && tree_int_cst_equal (arg0, arg1));
2547 return (! TREE_CONSTANT_OVERFLOW (arg0)
2548 && ! TREE_CONSTANT_OVERFLOW (arg1)
2549 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2550 TREE_REAL_CST (arg1)));
2556 if (TREE_CONSTANT_OVERFLOW (arg0)
2557 || TREE_CONSTANT_OVERFLOW (arg1))
2560 v1 = TREE_VECTOR_CST_ELTS (arg0);
2561 v2 = TREE_VECTOR_CST_ELTS (arg1);
2564 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2567 v1 = TREE_CHAIN (v1);
2568 v2 = TREE_CHAIN (v2);
2575 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2577 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2581 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2582 && ! memcmp (TREE_STRING_POINTER (arg0),
2583 TREE_STRING_POINTER (arg1),
2584 TREE_STRING_LENGTH (arg0)));
2587 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2593 if (flags & OEP_ONLY_CONST)
2596 /* Define macros to test an operand from arg0 and arg1 for equality and a
2597 variant that allows null and views null as being different from any
2598 non-null value. In the latter case, if either is null, the both
2599 must be; otherwise, do the normal comparison. */
2600 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2601 TREE_OPERAND (arg1, N), flags)
2603 #define OP_SAME_WITH_NULL(N) \
2604 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2605 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2610 /* Two conversions are equal only if signedness and modes match. */
2611 switch (TREE_CODE (arg0))
2616 case FIX_TRUNC_EXPR:
2617 case FIX_FLOOR_EXPR:
2618 case FIX_ROUND_EXPR:
2619 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2620 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2630 case tcc_comparison:
2632 if (OP_SAME (0) && OP_SAME (1))
2635 /* For commutative ops, allow the other order. */
2636 return (commutative_tree_code (TREE_CODE (arg0))
2637 && operand_equal_p (TREE_OPERAND (arg0, 0),
2638 TREE_OPERAND (arg1, 1), flags)
2639 && operand_equal_p (TREE_OPERAND (arg0, 1),
2640 TREE_OPERAND (arg1, 0), flags));
2643 /* If either of the pointer (or reference) expressions we are
2644 dereferencing contain a side effect, these cannot be equal. */
2645 if (TREE_SIDE_EFFECTS (arg0)
2646 || TREE_SIDE_EFFECTS (arg1))
2649 switch (TREE_CODE (arg0))
2652 case ALIGN_INDIRECT_REF:
2653 case MISALIGNED_INDIRECT_REF:
2659 case ARRAY_RANGE_REF:
2660 /* Operands 2 and 3 may be null. */
2663 && OP_SAME_WITH_NULL (2)
2664 && OP_SAME_WITH_NULL (3));
2667 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2668 may be NULL when we're called to compare MEM_EXPRs. */
2669 return OP_SAME_WITH_NULL (0)
2671 && OP_SAME_WITH_NULL (2);
2674 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2680 case tcc_expression:
2681 switch (TREE_CODE (arg0))
2684 case TRUTH_NOT_EXPR:
2687 case TRUTH_ANDIF_EXPR:
2688 case TRUTH_ORIF_EXPR:
2689 return OP_SAME (0) && OP_SAME (1);
2691 case TRUTH_AND_EXPR:
2693 case TRUTH_XOR_EXPR:
2694 if (OP_SAME (0) && OP_SAME (1))
2697 /* Otherwise take into account this is a commutative operation. */
2698 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2699 TREE_OPERAND (arg1, 1), flags)
2700 && operand_equal_p (TREE_OPERAND (arg0, 1),
2701 TREE_OPERAND (arg1, 0), flags));
2704 /* If the CALL_EXPRs call different functions, then they
2705 clearly can not be equal. */
2710 unsigned int cef = call_expr_flags (arg0);
2711 if (flags & OEP_PURE_SAME)
2712 cef &= ECF_CONST | ECF_PURE;
2719 /* Now see if all the arguments are the same. operand_equal_p
2720 does not handle TREE_LIST, so we walk the operands here
2721 feeding them to operand_equal_p. */
2722 arg0 = TREE_OPERAND (arg0, 1);
2723 arg1 = TREE_OPERAND (arg1, 1);
2724 while (arg0 && arg1)
2726 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2730 arg0 = TREE_CHAIN (arg0);
2731 arg1 = TREE_CHAIN (arg1);
2734 /* If we get here and both argument lists are exhausted
2735 then the CALL_EXPRs are equal. */
2736 return ! (arg0 || arg1);
2742 case tcc_declaration:
2743 /* Consider __builtin_sqrt equal to sqrt. */
2744 return (TREE_CODE (arg0) == FUNCTION_DECL
2745 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2746 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2747 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2754 #undef OP_SAME_WITH_NULL
2757 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2758 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2760 When in doubt, return 0. */
2763 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2765 int unsignedp1, unsignedpo;
2766 tree primarg0, primarg1, primother;
2767 unsigned int correct_width;
2769 if (operand_equal_p (arg0, arg1, 0))
2772 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2773 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2776 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2777 and see if the inner values are the same. This removes any
2778 signedness comparison, which doesn't matter here. */
2779 primarg0 = arg0, primarg1 = arg1;
2780 STRIP_NOPS (primarg0);
2781 STRIP_NOPS (primarg1);
2782 if (operand_equal_p (primarg0, primarg1, 0))
2785 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2786 actual comparison operand, ARG0.
2788 First throw away any conversions to wider types
2789 already present in the operands. */
2791 primarg1 = get_narrower (arg1, &unsignedp1);
2792 primother = get_narrower (other, &unsignedpo);
2794 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2795 if (unsignedp1 == unsignedpo
2796 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2797 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2799 tree type = TREE_TYPE (arg0);
2801 /* Make sure shorter operand is extended the right way
2802 to match the longer operand. */
2803 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2804 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2806 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2813 /* See if ARG is an expression that is either a comparison or is performing
2814 arithmetic on comparisons. The comparisons must only be comparing
2815 two different values, which will be stored in *CVAL1 and *CVAL2; if
2816 they are nonzero it means that some operands have already been found.
2817 No variables may be used anywhere else in the expression except in the
2818 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2819 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2821 If this is true, return 1. Otherwise, return zero. */
2824 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2826 enum tree_code code = TREE_CODE (arg);
2827 enum tree_code_class class = TREE_CODE_CLASS (code);
2829 /* We can handle some of the tcc_expression cases here. */
2830 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2832 else if (class == tcc_expression
2833 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2834 || code == COMPOUND_EXPR))
2837 else if (class == tcc_expression && code == SAVE_EXPR
2838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2840 /* If we've already found a CVAL1 or CVAL2, this expression is
2841 two complex to handle. */
2842 if (*cval1 || *cval2)
2852 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2855 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2856 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2857 cval1, cval2, save_p));
2862 case tcc_expression:
2863 if (code == COND_EXPR)
2864 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2865 cval1, cval2, save_p)
2866 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2867 cval1, cval2, save_p)
2868 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2869 cval1, cval2, save_p));
2872 case tcc_comparison:
2873 /* First see if we can handle the first operand, then the second. For
2874 the second operand, we know *CVAL1 can't be zero. It must be that
2875 one side of the comparison is each of the values; test for the
2876 case where this isn't true by failing if the two operands
2879 if (operand_equal_p (TREE_OPERAND (arg, 0),
2880 TREE_OPERAND (arg, 1), 0))
2884 *cval1 = TREE_OPERAND (arg, 0);
2885 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2887 else if (*cval2 == 0)
2888 *cval2 = TREE_OPERAND (arg, 0);
2889 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2894 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2896 else if (*cval2 == 0)
2897 *cval2 = TREE_OPERAND (arg, 1);
2898 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2910 /* ARG is a tree that is known to contain just arithmetic operations and
2911 comparisons. Evaluate the operations in the tree substituting NEW0 for
2912 any occurrence of OLD0 as an operand of a comparison and likewise for
2916 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2918 tree type = TREE_TYPE (arg);
2919 enum tree_code code = TREE_CODE (arg);
2920 enum tree_code_class class = TREE_CODE_CLASS (code);
2922 /* We can handle some of the tcc_expression cases here. */
2923 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2925 else if (class == tcc_expression
2926 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2932 return fold_build1 (code, type,
2933 eval_subst (TREE_OPERAND (arg, 0),
2934 old0, new0, old1, new1));
2937 return fold_build2 (code, type,
2938 eval_subst (TREE_OPERAND (arg, 0),
2939 old0, new0, old1, new1),
2940 eval_subst (TREE_OPERAND (arg, 1),
2941 old0, new0, old1, new1));
2943 case tcc_expression:
2947 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2950 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2953 return fold_build3 (code, type,
2954 eval_subst (TREE_OPERAND (arg, 0),
2955 old0, new0, old1, new1),
2956 eval_subst (TREE_OPERAND (arg, 1),
2957 old0, new0, old1, new1),
2958 eval_subst (TREE_OPERAND (arg, 2),
2959 old0, new0, old1, new1));
2963 /* Fall through - ??? */
2965 case tcc_comparison:
2967 tree arg0 = TREE_OPERAND (arg, 0);
2968 tree arg1 = TREE_OPERAND (arg, 1);
2970 /* We need to check both for exact equality and tree equality. The
2971 former will be true if the operand has a side-effect. In that
2972 case, we know the operand occurred exactly once. */
2974 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2976 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2979 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2981 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2984 return fold_build2 (code, type, arg0, arg1);
2992 /* Return a tree for the case when the result of an expression is RESULT
2993 converted to TYPE and OMITTED was previously an operand of the expression
2994 but is now not needed (e.g., we folded OMITTED * 0).
2996 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2997 the conversion of RESULT to TYPE. */
3000 omit_one_operand (tree type, tree result, tree omitted)
3002 tree t = fold_convert (type, result);
3004 if (TREE_SIDE_EFFECTS (omitted))
3005 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3007 return non_lvalue (t);
3010 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3013 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3015 tree t = fold_convert (type, result);
3017 if (TREE_SIDE_EFFECTS (omitted))
3018 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3020 return pedantic_non_lvalue (t);
3023 /* Return a tree for the case when the result of an expression is RESULT
3024 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3025 of the expression but are now not needed.
3027 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3028 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3029 evaluated before OMITTED2. Otherwise, if neither has side effects,
3030 just do the conversion of RESULT to TYPE. */
3033 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3035 tree t = fold_convert (type, result);
3037 if (TREE_SIDE_EFFECTS (omitted2))
3038 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3039 if (TREE_SIDE_EFFECTS (omitted1))
3040 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3042 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3046 /* Return a simplified tree node for the truth-negation of ARG. This
3047 never alters ARG itself. We assume that ARG is an operation that
3048 returns a truth value (0 or 1).
3050 FIXME: one would think we would fold the result, but it causes
3051 problems with the dominator optimizer. */
3053 invert_truthvalue (tree arg)
3055 tree type = TREE_TYPE (arg);
3056 enum tree_code code = TREE_CODE (arg);
3058 if (code == ERROR_MARK)
3061 /* If this is a comparison, we can simply invert it, except for
3062 floating-point non-equality comparisons, in which case we just
3063 enclose a TRUTH_NOT_EXPR around what we have. */
3065 if (TREE_CODE_CLASS (code) == tcc_comparison)
3067 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3068 if (FLOAT_TYPE_P (op_type)
3069 && flag_trapping_math
3070 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3071 && code != NE_EXPR && code != EQ_EXPR)
3072 return build1 (TRUTH_NOT_EXPR, type, arg);
3075 code = invert_tree_comparison (code,
3076 HONOR_NANS (TYPE_MODE (op_type)));
3077 if (code == ERROR_MARK)
3078 return build1 (TRUTH_NOT_EXPR, type, arg);
3080 return build2 (code, type,
3081 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3088 return constant_boolean_node (integer_zerop (arg), type);
3090 case TRUTH_AND_EXPR:
3091 return build2 (TRUTH_OR_EXPR, type,
3092 invert_truthvalue (TREE_OPERAND (arg, 0)),
3093 invert_truthvalue (TREE_OPERAND (arg, 1)));
3096 return build2 (TRUTH_AND_EXPR, type,
3097 invert_truthvalue (TREE_OPERAND (arg, 0)),
3098 invert_truthvalue (TREE_OPERAND (arg, 1)));
3100 case TRUTH_XOR_EXPR:
3101 /* Here we can invert either operand. We invert the first operand
3102 unless the second operand is a TRUTH_NOT_EXPR in which case our
3103 result is the XOR of the first operand with the inside of the
3104 negation of the second operand. */
3106 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3107 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3108 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3110 return build2 (TRUTH_XOR_EXPR, type,
3111 invert_truthvalue (TREE_OPERAND (arg, 0)),
3112 TREE_OPERAND (arg, 1));
3114 case TRUTH_ANDIF_EXPR:
3115 return build2 (TRUTH_ORIF_EXPR, type,
3116 invert_truthvalue (TREE_OPERAND (arg, 0)),
3117 invert_truthvalue (TREE_OPERAND (arg, 1)));
3119 case TRUTH_ORIF_EXPR:
3120 return build2 (TRUTH_ANDIF_EXPR, type,
3121 invert_truthvalue (TREE_OPERAND (arg, 0)),
3122 invert_truthvalue (TREE_OPERAND (arg, 1)));
3124 case TRUTH_NOT_EXPR:
3125 return TREE_OPERAND (arg, 0);
3129 tree arg1 = TREE_OPERAND (arg, 1);
3130 tree arg2 = TREE_OPERAND (arg, 2);
3131 /* A COND_EXPR may have a throw as one operand, which
3132 then has void type. Just leave void operands
3134 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3135 VOID_TYPE_P (TREE_TYPE (arg1))
3136 ? arg1 : invert_truthvalue (arg1),
3137 VOID_TYPE_P (TREE_TYPE (arg2))
3138 ? arg2 : invert_truthvalue (arg2));
3142 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3143 invert_truthvalue (TREE_OPERAND (arg, 1)));
3145 case NON_LVALUE_EXPR:
3146 return invert_truthvalue (TREE_OPERAND (arg, 0));
3149 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3154 return build1 (TREE_CODE (arg), type,
3155 invert_truthvalue (TREE_OPERAND (arg, 0)));
3158 if (!integer_onep (TREE_OPERAND (arg, 1)))
3160 return build2 (EQ_EXPR, type, arg,
3161 build_int_cst (type, 0));
3164 return build1 (TRUTH_NOT_EXPR, type, arg);
3166 case CLEANUP_POINT_EXPR:
3167 return build1 (CLEANUP_POINT_EXPR, type,
3168 invert_truthvalue (TREE_OPERAND (arg, 0)));
3173 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3174 return build1 (TRUTH_NOT_EXPR, type, arg);
3177 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3178 operands are another bit-wise operation with a common input. If so,
3179 distribute the bit operations to save an operation and possibly two if
3180 constants are involved. For example, convert
3181 (A | B) & (A | C) into A | (B & C)
3182 Further simplification will occur if B and C are constants.
3184 If this optimization cannot be done, 0 will be returned. */
3187 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3192 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3193 || TREE_CODE (arg0) == code
3194 || (TREE_CODE (arg0) != BIT_AND_EXPR
3195 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3198 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3200 common = TREE_OPERAND (arg0, 0);
3201 left = TREE_OPERAND (arg0, 1);
3202 right = TREE_OPERAND (arg1, 1);
3204 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3206 common = TREE_OPERAND (arg0, 0);
3207 left = TREE_OPERAND (arg0, 1);
3208 right = TREE_OPERAND (arg1, 0);
3210 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3212 common = TREE_OPERAND (arg0, 1);
3213 left = TREE_OPERAND (arg0, 0);
3214 right = TREE_OPERAND (arg1, 1);
3216 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3218 common = TREE_OPERAND (arg0, 1);
3219 left = TREE_OPERAND (arg0, 0);
3220 right = TREE_OPERAND (arg1, 0);
3225 return fold_build2 (TREE_CODE (arg0), type, common,
3226 fold_build2 (code, type, left, right));
3229 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3230 with code CODE. This optimization is unsafe. */
3232 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3234 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3235 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3237 /* (A / C) +- (B / C) -> (A +- B) / C. */
3239 && operand_equal_p (TREE_OPERAND (arg0, 1),
3240 TREE_OPERAND (arg1, 1), 0))
3241 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3242 fold_build2 (code, type,
3243 TREE_OPERAND (arg0, 0),
3244 TREE_OPERAND (arg1, 0)),
3245 TREE_OPERAND (arg0, 1));
3247 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3248 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3249 TREE_OPERAND (arg1, 0), 0)
3250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3251 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3253 REAL_VALUE_TYPE r0, r1;
3254 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3255 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3257 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3259 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3260 real_arithmetic (&r0, code, &r0, &r1);
3261 return fold_build2 (MULT_EXPR, type,
3262 TREE_OPERAND (arg0, 0),
3263 build_real (type, r0));
3269 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3270 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3273 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3280 tree size = TYPE_SIZE (TREE_TYPE (inner));
3281 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3282 || POINTER_TYPE_P (TREE_TYPE (inner)))
3283 && host_integerp (size, 0)
3284 && tree_low_cst (size, 0) == bitsize)
3285 return fold_convert (type, inner);
3288 result = build3 (BIT_FIELD_REF, type, inner,
3289 size_int (bitsize), bitsize_int (bitpos));
3291 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3296 /* Optimize a bit-field compare.
3298 There are two cases: First is a compare against a constant and the
3299 second is a comparison of two items where the fields are at the same
3300 bit position relative to the start of a chunk (byte, halfword, word)
3301 large enough to contain it. In these cases we can avoid the shift
3302 implicit in bitfield extractions.
3304 For constants, we emit a compare of the shifted constant with the
3305 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3306 compared. For two fields at the same position, we do the ANDs with the
3307 similar mask and compare the result of the ANDs.
3309 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3310 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3311 are the left and right operands of the comparison, respectively.
3313 If the optimization described above can be done, we return the resulting
3314 tree. Otherwise we return zero. */
3317 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3320 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3321 tree type = TREE_TYPE (lhs);
3322 tree signed_type, unsigned_type;
3323 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3324 enum machine_mode lmode, rmode, nmode;
3325 int lunsignedp, runsignedp;
3326 int lvolatilep = 0, rvolatilep = 0;
3327 tree linner, rinner = NULL_TREE;
3331 /* Get all the information about the extractions being done. If the bit size
3332 if the same as the size of the underlying object, we aren't doing an
3333 extraction at all and so can do nothing. We also don't want to
3334 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3335 then will no longer be able to replace it. */
3336 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3337 &lunsignedp, &lvolatilep, false);
3338 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3339 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3344 /* If this is not a constant, we can only do something if bit positions,
3345 sizes, and signedness are the same. */
3346 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3347 &runsignedp, &rvolatilep, false);
3349 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3350 || lunsignedp != runsignedp || offset != 0
3351 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3355 /* See if we can find a mode to refer to this field. We should be able to,
3356 but fail if we can't. */
3357 nmode = get_best_mode (lbitsize, lbitpos,
3358 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3359 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3360 TYPE_ALIGN (TREE_TYPE (rinner))),
3361 word_mode, lvolatilep || rvolatilep);
3362 if (nmode == VOIDmode)
3365 /* Set signed and unsigned types of the precision of this mode for the
3367 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3368 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3370 /* Compute the bit position and size for the new reference and our offset
3371 within it. If the new reference is the same size as the original, we
3372 won't optimize anything, so return zero. */
3373 nbitsize = GET_MODE_BITSIZE (nmode);
3374 nbitpos = lbitpos & ~ (nbitsize - 1);
3376 if (nbitsize == lbitsize)
3379 if (BYTES_BIG_ENDIAN)
3380 lbitpos = nbitsize - lbitsize - lbitpos;
3382 /* Make the mask to be used against the extracted field. */
3383 mask = build_int_cst (unsigned_type, -1);
3384 mask = force_fit_type (mask, 0, false, false);
3385 mask = fold_convert (unsigned_type, mask);
3386 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3387 mask = const_binop (RSHIFT_EXPR, mask,
3388 size_int (nbitsize - lbitsize - lbitpos), 0);
3391 /* If not comparing with constant, just rework the comparison
3393 return build2 (code, compare_type,
3394 build2 (BIT_AND_EXPR, unsigned_type,
3395 make_bit_field_ref (linner, unsigned_type,
3396 nbitsize, nbitpos, 1),
3398 build2 (BIT_AND_EXPR, unsigned_type,
3399 make_bit_field_ref (rinner, unsigned_type,
3400 nbitsize, nbitpos, 1),
3403 /* Otherwise, we are handling the constant case. See if the constant is too
3404 big for the field. Warn and return a tree of for 0 (false) if so. We do
3405 this not only for its own sake, but to avoid having to test for this
3406 error case below. If we didn't, we might generate wrong code.
3408 For unsigned fields, the constant shifted right by the field length should
3409 be all zero. For signed fields, the high-order bits should agree with
3414 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3415 fold_convert (unsigned_type, rhs),
3416 size_int (lbitsize), 0)))
3418 warning (0, "comparison is always %d due to width of bit-field",
3420 return constant_boolean_node (code == NE_EXPR, compare_type);
3425 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3426 size_int (lbitsize - 1), 0);
3427 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3429 warning (0, "comparison is always %d due to width of bit-field",
3431 return constant_boolean_node (code == NE_EXPR, compare_type);
3435 /* Single-bit compares should always be against zero. */
3436 if (lbitsize == 1 && ! integer_zerop (rhs))
3438 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3439 rhs = build_int_cst (type, 0);
3442 /* Make a new bitfield reference, shift the constant over the
3443 appropriate number of bits and mask it with the computed mask
3444 (in case this was a signed field). If we changed it, make a new one. */
3445 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3448 TREE_SIDE_EFFECTS (lhs) = 1;
3449 TREE_THIS_VOLATILE (lhs) = 1;
3452 rhs = const_binop (BIT_AND_EXPR,
3453 const_binop (LSHIFT_EXPR,
3454 fold_convert (unsigned_type, rhs),
3455 size_int (lbitpos), 0),
3458 return build2 (code, compare_type,
3459 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3463 /* Subroutine for fold_truthop: decode a field reference.
3465 If EXP is a comparison reference, we return the innermost reference.
3467 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3468 set to the starting bit number.
3470 If the innermost field can be completely contained in a mode-sized
3471 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3473 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3474 otherwise it is not changed.
3476 *PUNSIGNEDP is set to the signedness of the field.
3478 *PMASK is set to the mask used. This is either contained in a
3479 BIT_AND_EXPR or derived from the width of the field.
3481 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3483 Return 0 if this is not a component reference or is one that we can't
3484 do anything with. */
3487 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3488 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3489 int *punsignedp, int *pvolatilep,
3490 tree *pmask, tree *pand_mask)
3492 tree outer_type = 0;
3494 tree mask, inner, offset;
3496 unsigned int precision;
3498 /* All the optimizations using this function assume integer fields.
3499 There are problems with FP fields since the type_for_size call
3500 below can fail for, e.g., XFmode. */
3501 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3504 /* We are interested in the bare arrangement of bits, so strip everything
3505 that doesn't affect the machine mode. However, record the type of the
3506 outermost expression if it may matter below. */
3507 if (TREE_CODE (exp) == NOP_EXPR
3508 || TREE_CODE (exp) == CONVERT_EXPR
3509 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3510 outer_type = TREE_TYPE (exp);
3513 if (TREE_CODE (exp) == BIT_AND_EXPR)
3515 and_mask = TREE_OPERAND (exp, 1);
3516 exp = TREE_OPERAND (exp, 0);
3517 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3518 if (TREE_CODE (and_mask) != INTEGER_CST)
3522 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3523 punsignedp, pvolatilep, false);
3524 if ((inner == exp && and_mask == 0)
3525 || *pbitsize < 0 || offset != 0
3526 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3529 /* If the number of bits in the reference is the same as the bitsize of
3530 the outer type, then the outer type gives the signedness. Otherwise
3531 (in case of a small bitfield) the signedness is unchanged. */
3532 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3533 *punsignedp = TYPE_UNSIGNED (outer_type);
3535 /* Compute the mask to access the bitfield. */
3536 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3537 precision = TYPE_PRECISION (unsigned_type);
3539 mask = build_int_cst (unsigned_type, -1);
3540 mask = force_fit_type (mask, 0, false, false);
3542 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3543 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3545 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3547 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3548 fold_convert (unsigned_type, and_mask), mask);
3551 *pand_mask = and_mask;
3555 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3559 all_ones_mask_p (tree mask, int size)
3561 tree type = TREE_TYPE (mask);
3562 unsigned int precision = TYPE_PRECISION (type);
3565 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3566 tmask = force_fit_type (tmask, 0, false, false);
3569 tree_int_cst_equal (mask,
3570 const_binop (RSHIFT_EXPR,
3571 const_binop (LSHIFT_EXPR, tmask,
3572 size_int (precision - size),
3574 size_int (precision - size), 0));
3577 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3578 represents the sign bit of EXP's type. If EXP represents a sign
3579 or zero extension, also test VAL against the unextended type.
3580 The return value is the (sub)expression whose sign bit is VAL,
3581 or NULL_TREE otherwise. */
3584 sign_bit_p (tree exp, tree val)
3586 unsigned HOST_WIDE_INT mask_lo, lo;
3587 HOST_WIDE_INT mask_hi, hi;
3591 /* Tree EXP must have an integral type. */
3592 t = TREE_TYPE (exp);
3593 if (! INTEGRAL_TYPE_P (t))
3596 /* Tree VAL must be an integer constant. */
3597 if (TREE_CODE (val) != INTEGER_CST
3598 || TREE_CONSTANT_OVERFLOW (val))
3601 width = TYPE_PRECISION (t);
3602 if (width > HOST_BITS_PER_WIDE_INT)
3604 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3607 mask_hi = ((unsigned HOST_WIDE_INT) -1
3608 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3614 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3617 mask_lo = ((unsigned HOST_WIDE_INT) -1
3618 >> (HOST_BITS_PER_WIDE_INT - width));
3621 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3622 treat VAL as if it were unsigned. */
3623 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3624 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3627 /* Handle extension from a narrower type. */
3628 if (TREE_CODE (exp) == NOP_EXPR
3629 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3630 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3635 /* Subroutine for fold_truthop: determine if an operand is simple enough
3636 to be evaluated unconditionally. */
3639 simple_operand_p (tree exp)
3641 /* Strip any conversions that don't change the machine mode. */
3644 return (CONSTANT_CLASS_P (exp)
3645 || TREE_CODE (exp) == SSA_NAME
3647 && ! TREE_ADDRESSABLE (exp)
3648 && ! TREE_THIS_VOLATILE (exp)
3649 && ! DECL_NONLOCAL (exp)
3650 /* Don't regard global variables as simple. They may be
3651 allocated in ways unknown to the compiler (shared memory,
3652 #pragma weak, etc). */
3653 && ! TREE_PUBLIC (exp)
3654 && ! DECL_EXTERNAL (exp)
3655 /* Loading a static variable is unduly expensive, but global
3656 registers aren't expensive. */
3657 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3660 /* The following functions are subroutines to fold_range_test and allow it to
3661 try to change a logical combination of comparisons into a range test.
3664 X == 2 || X == 3 || X == 4 || X == 5
3668 (unsigned) (X - 2) <= 3
3670 We describe each set of comparisons as being either inside or outside
3671 a range, using a variable named like IN_P, and then describe the
3672 range with a lower and upper bound. If one of the bounds is omitted,
3673 it represents either the highest or lowest value of the type.
3675 In the comments below, we represent a range by two numbers in brackets
3676 preceded by a "+" to designate being inside that range, or a "-" to
3677 designate being outside that range, so the condition can be inverted by
3678 flipping the prefix. An omitted bound is represented by a "-". For
3679 example, "- [-, 10]" means being outside the range starting at the lowest
3680 possible value and ending at 10, in other words, being greater than 10.
3681 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3684 We set up things so that the missing bounds are handled in a consistent
3685 manner so neither a missing bound nor "true" and "false" need to be
3686 handled using a special case. */
3688 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3689 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3690 and UPPER1_P are nonzero if the respective argument is an upper bound
3691 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3692 must be specified for a comparison. ARG1 will be converted to ARG0's
3693 type if both are specified. */
3696 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3697 tree arg1, int upper1_p)
3703 /* If neither arg represents infinity, do the normal operation.
3704 Else, if not a comparison, return infinity. Else handle the special
3705 comparison rules. Note that most of the cases below won't occur, but
3706 are handled for consistency. */
3708 if (arg0 != 0 && arg1 != 0)
3710 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3711 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3713 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3716 if (TREE_CODE_CLASS (code) != tcc_comparison)
3719 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3720 for neither. In real maths, we cannot assume open ended ranges are
3721 the same. But, this is computer arithmetic, where numbers are finite.
3722 We can therefore make the transformation of any unbounded range with
3723 the value Z, Z being greater than any representable number. This permits
3724 us to treat unbounded ranges as equal. */
3725 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3726 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3730 result = sgn0 == sgn1;
3733 result = sgn0 != sgn1;
3736 result = sgn0 < sgn1;
3739 result = sgn0 <= sgn1;
3742 result = sgn0 > sgn1;
3745 result = sgn0 >= sgn1;
3751 return constant_boolean_node (result, type);
3754 /* Given EXP, a logical expression, set the range it is testing into
3755 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3756 actually being tested. *PLOW and *PHIGH will be made of the same type
3757 as the returned expression. If EXP is not a comparison, we will most
3758 likely not be returning a useful value and range. */
3761 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3763 enum tree_code code;
3764 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3765 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3767 tree low, high, n_low, n_high;
3769 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3770 and see if we can refine the range. Some of the cases below may not
3771 happen, but it doesn't seem worth worrying about this. We "continue"
3772 the outer loop when we've changed something; otherwise we "break"
3773 the switch, which will "break" the while. */
3776 low = high = build_int_cst (TREE_TYPE (exp), 0);
3780 code = TREE_CODE (exp);
3781 exp_type = TREE_TYPE (exp);
3783 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3785 if (TREE_CODE_LENGTH (code) > 0)
3786 arg0 = TREE_OPERAND (exp, 0);
3787 if (TREE_CODE_CLASS (code) == tcc_comparison
3788 || TREE_CODE_CLASS (code) == tcc_unary
3789 || TREE_CODE_CLASS (code) == tcc_binary)
3790 arg0_type = TREE_TYPE (arg0);
3791 if (TREE_CODE_CLASS (code) == tcc_binary
3792 || TREE_CODE_CLASS (code) == tcc_comparison
3793 || (TREE_CODE_CLASS (code) == tcc_expression
3794 && TREE_CODE_LENGTH (code) > 1))
3795 arg1 = TREE_OPERAND (exp, 1);
3800 case TRUTH_NOT_EXPR:
3801 in_p = ! in_p, exp = arg0;
3804 case EQ_EXPR: case NE_EXPR:
3805 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3806 /* We can only do something if the range is testing for zero
3807 and if the second operand is an integer constant. Note that
3808 saying something is "in" the range we make is done by
3809 complementing IN_P since it will set in the initial case of
3810 being not equal to zero; "out" is leaving it alone. */
3811 if (low == 0 || high == 0
3812 || ! integer_zerop (low) || ! integer_zerop (high)
3813 || TREE_CODE (arg1) != INTEGER_CST)
3818 case NE_EXPR: /* - [c, c] */
3821 case EQ_EXPR: /* + [c, c] */
3822 in_p = ! in_p, low = high = arg1;
3824 case GT_EXPR: /* - [-, c] */
3825 low = 0, high = arg1;
3827 case GE_EXPR: /* + [c, -] */
3828 in_p = ! in_p, low = arg1, high = 0;
3830 case LT_EXPR: /* - [c, -] */
3831 low = arg1, high = 0;
3833 case LE_EXPR: /* + [-, c] */
3834 in_p = ! in_p, low = 0, high = arg1;
3840 /* If this is an unsigned comparison, we also know that EXP is
3841 greater than or equal to zero. We base the range tests we make
3842 on that fact, so we record it here so we can parse existing
3843 range tests. We test arg0_type since often the return type
3844 of, e.g. EQ_EXPR, is boolean. */
3845 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3847 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3849 build_int_cst (arg0_type, 0),
3853 in_p = n_in_p, low = n_low, high = n_high;
3855 /* If the high bound is missing, but we have a nonzero low
3856 bound, reverse the range so it goes from zero to the low bound
3858 if (high == 0 && low && ! integer_zerop (low))
3861 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3862 integer_one_node, 0);
3863 low = build_int_cst (arg0_type, 0);
3871 /* (-x) IN [a,b] -> x in [-b, -a] */
3872 n_low = range_binop (MINUS_EXPR, exp_type,
3873 build_int_cst (exp_type, 0),
3875 n_high = range_binop (MINUS_EXPR, exp_type,
3876 build_int_cst (exp_type, 0),
3878 low = n_low, high = n_high;
3884 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3885 build_int_cst (exp_type, 1));
3888 case PLUS_EXPR: case MINUS_EXPR:
3889 if (TREE_CODE (arg1) != INTEGER_CST)
3892 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3893 move a constant to the other side. */
3894 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3897 /* If EXP is signed, any overflow in the computation is undefined,
3898 so we don't worry about it so long as our computations on
3899 the bounds don't overflow. For unsigned, overflow is defined
3900 and this is exactly the right thing. */
3901 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3902 arg0_type, low, 0, arg1, 0);
3903 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3904 arg0_type, high, 1, arg1, 0);
3905 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3906 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3909 /* Check for an unsigned range which has wrapped around the maximum
3910 value thus making n_high < n_low, and normalize it. */
3911 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3913 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3914 integer_one_node, 0);
3915 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3916 integer_one_node, 0);
3918 /* If the range is of the form +/- [ x+1, x ], we won't
3919 be able to normalize it. But then, it represents the
3920 whole range or the empty set, so make it
3922 if (tree_int_cst_equal (n_low, low)
3923 && tree_int_cst_equal (n_high, high))
3929 low = n_low, high = n_high;
3934 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3935 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3938 if (! INTEGRAL_TYPE_P (arg0_type)
3939 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3940 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3943 n_low = low, n_high = high;
3946 n_low = fold_convert (arg0_type, n_low);
3949 n_high = fold_convert (arg0_type, n_high);
3952 /* If we're converting arg0 from an unsigned type, to exp,
3953 a signed type, we will be doing the comparison as unsigned.
3954 The tests above have already verified that LOW and HIGH
3957 So we have to ensure that we will handle large unsigned
3958 values the same way that the current signed bounds treat
3961 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3964 tree equiv_type = lang_hooks.types.type_for_mode
3965 (TYPE_MODE (arg0_type), 1);
3967 /* A range without an upper bound is, naturally, unbounded.
3968 Since convert would have cropped a very large value, use
3969 the max value for the destination type. */
3971 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3972 : TYPE_MAX_VALUE (arg0_type);
3974 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3975 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3976 fold_convert (arg0_type,
3978 fold_convert (arg0_type,
3981 /* If the low bound is specified, "and" the range with the
3982 range for which the original unsigned value will be
3986 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3987 1, n_low, n_high, 1,
3988 fold_convert (arg0_type,
3993 in_p = (n_in_p == in_p);
3997 /* Otherwise, "or" the range with the range of the input
3998 that will be interpreted as negative. */
3999 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4000 0, n_low, n_high, 1,
4001 fold_convert (arg0_type,
4006 in_p = (in_p != n_in_p);
4011 low = n_low, high = n_high;
4021 /* If EXP is a constant, we can evaluate whether this is true or false. */
4022 if (TREE_CODE (exp) == INTEGER_CST)
4024 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4026 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4032 *pin_p = in_p, *plow = low, *phigh = high;
4036 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4037 type, TYPE, return an expression to test if EXP is in (or out of, depending
4038 on IN_P) the range. Return 0 if the test couldn't be created. */
4041 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4043 tree etype = TREE_TYPE (exp);
4046 #ifdef HAVE_canonicalize_funcptr_for_compare
4047 /* Disable this optimization for function pointer expressions
4048 on targets that require function pointer canonicalization. */
4049 if (HAVE_canonicalize_funcptr_for_compare
4050 && TREE_CODE (etype) == POINTER_TYPE
4051 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4057 value = build_range_check (type, exp, 1, low, high);
4059 return invert_truthvalue (value);
4064 if (low == 0 && high == 0)
4065 return build_int_cst (type, 1);
4068 return fold_build2 (LE_EXPR, type, exp,
4069 fold_convert (etype, high));
4072 return fold_build2 (GE_EXPR, type, exp,
4073 fold_convert (etype, low));
4075 if (operand_equal_p (low, high, 0))
4076 return fold_build2 (EQ_EXPR, type, exp,
4077 fold_convert (etype, low));
4079 if (integer_zerop (low))
4081 if (! TYPE_UNSIGNED (etype))
4083 etype = lang_hooks.types.unsigned_type (etype);
4084 high = fold_convert (etype, high);
4085 exp = fold_convert (etype, exp);
4087 return build_range_check (type, exp, 1, 0, high);
4090 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4091 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4093 unsigned HOST_WIDE_INT lo;
4097 prec = TYPE_PRECISION (etype);
4098 if (prec <= HOST_BITS_PER_WIDE_INT)
4101 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4105 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4106 lo = (unsigned HOST_WIDE_INT) -1;
4109 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4111 if (TYPE_UNSIGNED (etype))
4113 etype = lang_hooks.types.signed_type (etype);
4114 exp = fold_convert (etype, exp);
4116 return fold_build2 (GT_EXPR, type, exp,
4117 build_int_cst (etype, 0));
4121 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4122 This requires wrap-around arithmetics for the type of the expression. */
4123 switch (TREE_CODE (etype))
4126 /* There is no requirement that LOW be within the range of ETYPE
4127 if the latter is a subtype. It must, however, be within the base
4128 type of ETYPE. So be sure we do the subtraction in that type. */
4129 if (TREE_TYPE (etype))
4130 etype = TREE_TYPE (etype);
4135 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4136 TYPE_UNSIGNED (etype));
4143 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4144 if (TREE_CODE (etype) == INTEGER_TYPE
4145 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4147 tree utype, minv, maxv;
4149 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4150 for the type in question, as we rely on this here. */
4151 utype = lang_hooks.types.unsigned_type (etype);
4152 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4153 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4154 integer_one_node, 1);
4155 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4157 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4164 high = fold_convert (etype, high);
4165 low = fold_convert (etype, low);
4166 exp = fold_convert (etype, exp);
4168 value = const_binop (MINUS_EXPR, high, low, 0);
4170 if (value != 0 && !TREE_OVERFLOW (value))
4171 return build_range_check (type,
4172 fold_build2 (MINUS_EXPR, etype, exp, low),
4173 1, build_int_cst (etype, 0), value);
4178 /* Return the predecessor of VAL in its type, handling the infinite case. */
4181 range_predecessor (tree val)
4183 tree type = TREE_TYPE (val);
4185 if (INTEGRAL_TYPE_P (type)
4186 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4189 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4192 /* Return the successor of VAL in its type, handling the infinite case. */
4195 range_successor (tree val)
4197 tree type = TREE_TYPE (val);
4199 if (INTEGRAL_TYPE_P (type)
4200 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4203 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4206 /* Given two ranges, see if we can merge them into one. Return 1 if we
4207 can, 0 if we can't. Set the output range into the specified parameters. */
4210 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4211 tree high0, int in1_p, tree low1, tree high1)
4219 int lowequal = ((low0 == 0 && low1 == 0)
4220 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4221 low0, 0, low1, 0)));
4222 int highequal = ((high0 == 0 && high1 == 0)
4223 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4224 high0, 1, high1, 1)));
4226 /* Make range 0 be the range that starts first, or ends last if they
4227 start at the same value. Swap them if it isn't. */
4228 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4231 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4232 high1, 1, high0, 1))))
4234 temp = in0_p, in0_p = in1_p, in1_p = temp;
4235 tem = low0, low0 = low1, low1 = tem;
4236 tem = high0, high0 = high1, high1 = tem;
4239 /* Now flag two cases, whether the ranges are disjoint or whether the
4240 second range is totally subsumed in the first. Note that the tests
4241 below are simplified by the ones above. */
4242 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4243 high0, 1, low1, 0));
4244 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4245 high1, 1, high0, 1));
4247 /* We now have four cases, depending on whether we are including or
4248 excluding the two ranges. */
4251 /* If they don't overlap, the result is false. If the second range
4252 is a subset it is the result. Otherwise, the range is from the start
4253 of the second to the end of the first. */
4255 in_p = 0, low = high = 0;
4257 in_p = 1, low = low1, high = high1;
4259 in_p = 1, low = low1, high = high0;
4262 else if (in0_p && ! in1_p)
4264 /* If they don't overlap, the result is the first range. If they are
4265 equal, the result is false. If the second range is a subset of the
4266 first, and the ranges begin at the same place, we go from just after
4267 the end of the second range to the end of the first. If the second
4268 range is not a subset of the first, or if it is a subset and both
4269 ranges end at the same place, the range starts at the start of the
4270 first range and ends just before the second range.
4271 Otherwise, we can't describe this as a single range. */
4273 in_p = 1, low = low0, high = high0;
4274 else if (lowequal && highequal)
4275 in_p = 0, low = high = 0;
4276 else if (subset && lowequal)
4278 low = range_successor (high1);
4282 else if (! subset || highequal)
4285 high = range_predecessor (low1);
4292 else if (! in0_p && in1_p)
4294 /* If they don't overlap, the result is the second range. If the second
4295 is a subset of the first, the result is false. Otherwise,
4296 the range starts just after the first range and ends at the
4297 end of the second. */
4299 in_p = 1, low = low1, high = high1;
4300 else if (subset || highequal)
4301 in_p = 0, low = high = 0;
4304 low = range_successor (high0);
4312 /* The case where we are excluding both ranges. Here the complex case
4313 is if they don't overlap. In that case, the only time we have a
4314 range is if they are adjacent. If the second is a subset of the
4315 first, the result is the first. Otherwise, the range to exclude
4316 starts at the beginning of the first range and ends at the end of the
4320 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 range_successor (high0),
4323 in_p = 0, low = low0, high = high1;
4326 /* Canonicalize - [min, x] into - [-, x]. */
4327 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4328 switch (TREE_CODE (TREE_TYPE (low0)))
4331 if (TYPE_PRECISION (TREE_TYPE (low0))
4332 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4336 if (tree_int_cst_equal (low0,
4337 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4341 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4342 && integer_zerop (low0))
4349 /* Canonicalize - [x, max] into - [x, -]. */
4350 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4351 switch (TREE_CODE (TREE_TYPE (high1)))
4354 if (TYPE_PRECISION (TREE_TYPE (high1))
4355 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4359 if (tree_int_cst_equal (high1,
4360 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4364 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4365 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4367 integer_one_node, 1)))
4374 /* The ranges might be also adjacent between the maximum and
4375 minimum values of the given type. For
4376 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4377 return + [x + 1, y - 1]. */
4378 if (low0 == 0 && high1 == 0)
4380 low = range_successor (high0);
4381 high = range_predecessor (low1);
4382 if (low == 0 || high == 0)
4392 in_p = 0, low = low0, high = high0;
4394 in_p = 0, low = low0, high = high1;
4397 *pin_p = in_p, *plow = low, *phigh = high;
4402 /* Subroutine of fold, looking inside expressions of the form
4403 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4404 of the COND_EXPR. This function is being used also to optimize
4405 A op B ? C : A, by reversing the comparison first.
4407 Return a folded expression whose code is not a COND_EXPR
4408 anymore, or NULL_TREE if no folding opportunity is found. */
4411 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4413 enum tree_code comp_code = TREE_CODE (arg0);
4414 tree arg00 = TREE_OPERAND (arg0, 0);
4415 tree arg01 = TREE_OPERAND (arg0, 1);
4416 tree arg1_type = TREE_TYPE (arg1);
4422 /* If we have A op 0 ? A : -A, consider applying the following
4425 A == 0? A : -A same as -A
4426 A != 0? A : -A same as A
4427 A >= 0? A : -A same as abs (A)
4428 A > 0? A : -A same as abs (A)
4429 A <= 0? A : -A same as -abs (A)
4430 A < 0? A : -A same as -abs (A)
4432 None of these transformations work for modes with signed
4433 zeros. If A is +/-0, the first two transformations will
4434 change the sign of the result (from +0 to -0, or vice
4435 versa). The last four will fix the sign of the result,
4436 even though the original expressions could be positive or
4437 negative, depending on the sign of A.
4439 Note that all these transformations are correct if A is
4440 NaN, since the two alternatives (A and -A) are also NaNs. */
4441 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4442 ? real_zerop (arg01)
4443 : integer_zerop (arg01))
4444 && ((TREE_CODE (arg2) == NEGATE_EXPR
4445 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4446 /* In the case that A is of the form X-Y, '-A' (arg2) may
4447 have already been folded to Y-X, check for that. */
4448 || (TREE_CODE (arg1) == MINUS_EXPR
4449 && TREE_CODE (arg2) == MINUS_EXPR
4450 && operand_equal_p (TREE_OPERAND (arg1, 0),
4451 TREE_OPERAND (arg2, 1), 0)
4452 && operand_equal_p (TREE_OPERAND (arg1, 1),
4453 TREE_OPERAND (arg2, 0), 0))))
4458 tem = fold_convert (arg1_type, arg1);
4459 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4462 return pedantic_non_lvalue (fold_convert (type, arg1));
4465 if (flag_trapping_math)
4470 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4471 arg1 = fold_convert (lang_hooks.types.signed_type
4472 (TREE_TYPE (arg1)), arg1);
4473 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4474 return pedantic_non_lvalue (fold_convert (type, tem));
4477 if (flag_trapping_math)
4481 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4482 arg1 = fold_convert (lang_hooks.types.signed_type
4483 (TREE_TYPE (arg1)), arg1);
4484 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4485 return negate_expr (fold_convert (type, tem));
4487 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4491 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4492 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4493 both transformations are correct when A is NaN: A != 0
4494 is then true, and A == 0 is false. */
4496 if (integer_zerop (arg01) && integer_zerop (arg2))
4498 if (comp_code == NE_EXPR)
4499 return pedantic_non_lvalue (fold_convert (type, arg1));
4500 else if (comp_code == EQ_EXPR)
4501 return build_int_cst (type, 0);
4504 /* Try some transformations of A op B ? A : B.
4506 A == B? A : B same as B
4507 A != B? A : B same as A
4508 A >= B? A : B same as max (A, B)
4509 A > B? A : B same as max (B, A)
4510 A <= B? A : B same as min (A, B)
4511 A < B? A : B same as min (B, A)
4513 As above, these transformations don't work in the presence
4514 of signed zeros. For example, if A and B are zeros of
4515 opposite sign, the first two transformations will change
4516 the sign of the result. In the last four, the original
4517 expressions give different results for (A=+0, B=-0) and
4518 (A=-0, B=+0), but the transformed expressions do not.
4520 The first two transformations are correct if either A or B
4521 is a NaN. In the first transformation, the condition will
4522 be false, and B will indeed be chosen. In the case of the
4523 second transformation, the condition A != B will be true,
4524 and A will be chosen.
4526 The conversions to max() and min() are not correct if B is
4527 a number and A is not. The conditions in the original
4528 expressions will be false, so all four give B. The min()
4529 and max() versions would give a NaN instead. */
4530 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4531 /* Avoid these transformations if the COND_EXPR may be used
4532 as an lvalue in the C++ front-end. PR c++/19199. */
4534 || strcmp (lang_hooks.name, "GNU C++") != 0
4535 || ! maybe_lvalue_p (arg1)
4536 || ! maybe_lvalue_p (arg2)))
4538 tree comp_op0 = arg00;
4539 tree comp_op1 = arg01;
4540 tree comp_type = TREE_TYPE (comp_op0);
4542 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4543 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4553 return pedantic_non_lvalue (fold_convert (type, arg2));
4555 return pedantic_non_lvalue (fold_convert (type, arg1));
4560 /* In C++ a ?: expression can be an lvalue, so put the
4561 operand which will be used if they are equal first
4562 so that we can convert this back to the
4563 corresponding COND_EXPR. */
4564 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4566 comp_op0 = fold_convert (comp_type, comp_op0);
4567 comp_op1 = fold_convert (comp_type, comp_op1);
4568 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4569 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4570 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4571 return pedantic_non_lvalue (fold_convert (type, tem));
4578 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4580 comp_op0 = fold_convert (comp_type, comp_op0);
4581 comp_op1 = fold_convert (comp_type, comp_op1);
4582 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4583 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4584 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4585 return pedantic_non_lvalue (fold_convert (type, tem));
4589 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4590 return pedantic_non_lvalue (fold_convert (type, arg2));
4593 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4594 return pedantic_non_lvalue (fold_convert (type, arg1));
4597 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4602 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4603 we might still be able to simplify this. For example,
4604 if C1 is one less or one more than C2, this might have started
4605 out as a MIN or MAX and been transformed by this function.
4606 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4608 if (INTEGRAL_TYPE_P (type)
4609 && TREE_CODE (arg01) == INTEGER_CST
4610 && TREE_CODE (arg2) == INTEGER_CST)
4614 /* We can replace A with C1 in this case. */
4615 arg1 = fold_convert (type, arg01);
4616 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4619 /* If C1 is C2 + 1, this is min(A, C2). */
4620 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4622 && operand_equal_p (arg01,
4623 const_binop (PLUS_EXPR, arg2,
4624 integer_one_node, 0),
4626 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4631 /* If C1 is C2 - 1, this is min(A, C2). */
4632 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4634 && operand_equal_p (arg01,
4635 const_binop (MINUS_EXPR, arg2,
4636 integer_one_node, 0),
4638 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4643 /* If C1 is C2 - 1, this is max(A, C2). */
4644 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4646 && operand_equal_p (arg01,
4647 const_binop (MINUS_EXPR, arg2,
4648 integer_one_node, 0),
4650 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4655 /* If C1 is C2 + 1, this is max(A, C2). */
4656 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4658 && operand_equal_p (arg01,
4659 const_binop (PLUS_EXPR, arg2,
4660 integer_one_node, 0),
4662 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4676 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4677 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4680 /* EXP is some logical combination of boolean tests. See if we can
4681 merge it into some range test. Return the new tree if so. */
4684 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4686 int or_op = (code == TRUTH_ORIF_EXPR
4687 || code == TRUTH_OR_EXPR);
4688 int in0_p, in1_p, in_p;
4689 tree low0, low1, low, high0, high1, high;
4690 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4691 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4694 /* If this is an OR operation, invert both sides; we will invert
4695 again at the end. */
4697 in0_p = ! in0_p, in1_p = ! in1_p;
4699 /* If both expressions are the same, if we can merge the ranges, and we
4700 can build the range test, return it or it inverted. If one of the
4701 ranges is always true or always false, consider it to be the same
4702 expression as the other. */
4703 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4704 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4706 && 0 != (tem = (build_range_check (type,
4708 : rhs != 0 ? rhs : integer_zero_node,
4710 return or_op ? invert_truthvalue (tem) : tem;
4712 /* On machines where the branch cost is expensive, if this is a
4713 short-circuited branch and the underlying object on both sides
4714 is the same, make a non-short-circuit operation. */
4715 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4716 && lhs != 0 && rhs != 0
4717 && (code == TRUTH_ANDIF_EXPR
4718 || code == TRUTH_ORIF_EXPR)
4719 && operand_equal_p (lhs, rhs, 0))
4721 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4722 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4723 which cases we can't do this. */
4724 if (simple_operand_p (lhs))
4725 return build2 (code == TRUTH_ANDIF_EXPR
4726 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4729 else if (lang_hooks.decls.global_bindings_p () == 0
4730 && ! CONTAINS_PLACEHOLDER_P (lhs))
4732 tree common = save_expr (lhs);
4734 if (0 != (lhs = build_range_check (type, common,
4735 or_op ? ! in0_p : in0_p,
4737 && (0 != (rhs = build_range_check (type, common,
4738 or_op ? ! in1_p : in1_p,
4740 return build2 (code == TRUTH_ANDIF_EXPR
4741 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4749 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4750 bit value. Arrange things so the extra bits will be set to zero if and
4751 only if C is signed-extended to its full width. If MASK is nonzero,
4752 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4755 unextend (tree c, int p, int unsignedp, tree mask)
4757 tree type = TREE_TYPE (c);
4758 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4761 if (p == modesize || unsignedp)
4764 /* We work by getting just the sign bit into the low-order bit, then
4765 into the high-order bit, then sign-extend. We then XOR that value
4767 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4768 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4770 /* We must use a signed type in order to get an arithmetic right shift.
4771 However, we must also avoid introducing accidental overflows, so that
4772 a subsequent call to integer_zerop will work. Hence we must
4773 do the type conversion here. At this point, the constant is either
4774 zero or one, and the conversion to a signed type can never overflow.
4775 We could get an overflow if this conversion is done anywhere else. */
4776 if (TYPE_UNSIGNED (type))
4777 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4779 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4780 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4782 temp = const_binop (BIT_AND_EXPR, temp,
4783 fold_convert (TREE_TYPE (c), mask), 0);
4784 /* If necessary, convert the type back to match the type of C. */
4785 if (TYPE_UNSIGNED (type))
4786 temp = fold_convert (type, temp);
4788 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4791 /* Find ways of folding logical expressions of LHS and RHS:
4792 Try to merge two comparisons to the same innermost item.
4793 Look for range tests like "ch >= '0' && ch <= '9'".
4794 Look for combinations of simple terms on machines with expensive branches
4795 and evaluate the RHS unconditionally.
4797 For example, if we have p->a == 2 && p->b == 4 and we can make an
4798 object large enough to span both A and B, we can do this with a comparison
4799 against the object ANDed with the a mask.
4801 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4802 operations to do this with one comparison.
4804 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4805 function and the one above.
4807 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4808 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4810 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4813 We return the simplified tree or 0 if no optimization is possible. */
4816 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4818 /* If this is the "or" of two comparisons, we can do something if
4819 the comparisons are NE_EXPR. If this is the "and", we can do something
4820 if the comparisons are EQ_EXPR. I.e.,
4821 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4823 WANTED_CODE is this operation code. For single bit fields, we can
4824 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4825 comparison for one-bit fields. */
4827 enum tree_code wanted_code;
4828 enum tree_code lcode, rcode;
4829 tree ll_arg, lr_arg, rl_arg, rr_arg;
4830 tree ll_inner, lr_inner, rl_inner, rr_inner;
4831 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4832 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4833 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4834 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4835 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4836 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4837 enum machine_mode lnmode, rnmode;
4838 tree ll_mask, lr_mask, rl_mask, rr_mask;
4839 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4840 tree l_const, r_const;
4841 tree lntype, rntype, result;
4842 int first_bit, end_bit;
4845 /* Start by getting the comparison codes. Fail if anything is volatile.
4846 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4847 it were surrounded with a NE_EXPR. */
4849 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4852 lcode = TREE_CODE (lhs);
4853 rcode = TREE_CODE (rhs);
4855 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4857 lhs = build2 (NE_EXPR, truth_type, lhs,
4858 build_int_cst (TREE_TYPE (lhs), 0));
4862 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4864 rhs = build2 (NE_EXPR, truth_type, rhs,
4865 build_int_cst (TREE_TYPE (rhs), 0));
4869 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4870 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4873 ll_arg = TREE_OPERAND (lhs, 0);
4874 lr_arg = TREE_OPERAND (lhs, 1);
4875 rl_arg = TREE_OPERAND (rhs, 0);
4876 rr_arg = TREE_OPERAND (rhs, 1);
4878 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4879 if (simple_operand_p (ll_arg)
4880 && simple_operand_p (lr_arg))
4883 if (operand_equal_p (ll_arg, rl_arg, 0)
4884 && operand_equal_p (lr_arg, rr_arg, 0))
4886 result = combine_comparisons (code, lcode, rcode,
4887 truth_type, ll_arg, lr_arg);
4891 else if (operand_equal_p (ll_arg, rr_arg, 0)
4892 && operand_equal_p (lr_arg, rl_arg, 0))
4894 result = combine_comparisons (code, lcode,
4895 swap_tree_comparison (rcode),
4896 truth_type, ll_arg, lr_arg);
4902 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4903 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4905 /* If the RHS can be evaluated unconditionally and its operands are
4906 simple, it wins to evaluate the RHS unconditionally on machines
4907 with expensive branches. In this case, this isn't a comparison
4908 that can be merged. Avoid doing this if the RHS is a floating-point
4909 comparison since those can trap. */
4911 if (BRANCH_COST >= 2
4912 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4913 && simple_operand_p (rl_arg)
4914 && simple_operand_p (rr_arg))
4916 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4917 if (code == TRUTH_OR_EXPR
4918 && lcode == NE_EXPR && integer_zerop (lr_arg)
4919 && rcode == NE_EXPR && integer_zerop (rr_arg)
4920 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4921 return build2 (NE_EXPR, truth_type,
4922 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4924 build_int_cst (TREE_TYPE (ll_arg), 0));
4926 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4927 if (code == TRUTH_AND_EXPR
4928 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4929 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4930 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4931 return build2 (EQ_EXPR, truth_type,
4932 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4934 build_int_cst (TREE_TYPE (ll_arg), 0));
4936 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4937 return build2 (code, truth_type, lhs, rhs);
4940 /* See if the comparisons can be merged. Then get all the parameters for
4943 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4944 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4948 ll_inner = decode_field_reference (ll_arg,
4949 &ll_bitsize, &ll_bitpos, &ll_mode,
4950 &ll_unsignedp, &volatilep, &ll_mask,
4952 lr_inner = decode_field_reference (lr_arg,
4953 &lr_bitsize, &lr_bitpos, &lr_mode,
4954 &lr_unsignedp, &volatilep, &lr_mask,
4956 rl_inner = decode_field_reference (rl_arg,
4957 &rl_bitsize, &rl_bitpos, &rl_mode,
4958 &rl_unsignedp, &volatilep, &rl_mask,
4960 rr_inner = decode_field_reference (rr_arg,
4961 &rr_bitsize, &rr_bitpos, &rr_mode,
4962 &rr_unsignedp, &volatilep, &rr_mask,
4965 /* It must be true that the inner operation on the lhs of each
4966 comparison must be the same if we are to be able to do anything.
4967 Then see if we have constants. If not, the same must be true for
4969 if (volatilep || ll_inner == 0 || rl_inner == 0
4970 || ! operand_equal_p (ll_inner, rl_inner, 0))
4973 if (TREE_CODE (lr_arg) == INTEGER_CST
4974 && TREE_CODE (rr_arg) == INTEGER_CST)
4975 l_const = lr_arg, r_const = rr_arg;
4976 else if (lr_inner == 0 || rr_inner == 0
4977 || ! operand_equal_p (lr_inner, rr_inner, 0))
4980 l_const = r_const = 0;
4982 /* If either comparison code is not correct for our logical operation,
4983 fail. However, we can convert a one-bit comparison against zero into
4984 the opposite comparison against that bit being set in the field. */
4986 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4987 if (lcode != wanted_code)
4989 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4991 /* Make the left operand unsigned, since we are only interested
4992 in the value of one bit. Otherwise we are doing the wrong
5001 /* This is analogous to the code for l_const above. */
5002 if (rcode != wanted_code)
5004 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5013 /* After this point all optimizations will generate bit-field
5014 references, which we might not want. */
5015 if (! lang_hooks.can_use_bit_fields_p ())
5018 /* See if we can find a mode that contains both fields being compared on
5019 the left. If we can't, fail. Otherwise, update all constants and masks
5020 to be relative to a field of that size. */
5021 first_bit = MIN (ll_bitpos, rl_bitpos);
5022 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5023 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5024 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5026 if (lnmode == VOIDmode)
5029 lnbitsize = GET_MODE_BITSIZE (lnmode);
5030 lnbitpos = first_bit & ~ (lnbitsize - 1);
5031 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5032 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5034 if (BYTES_BIG_ENDIAN)
5036 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5037 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5040 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5041 size_int (xll_bitpos), 0);
5042 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5043 size_int (xrl_bitpos), 0);
5047 l_const = fold_convert (lntype, l_const);
5048 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5049 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5050 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5051 fold_build1 (BIT_NOT_EXPR,
5055 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5057 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5062 r_const = fold_convert (lntype, r_const);
5063 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5064 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5065 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5066 fold_build1 (BIT_NOT_EXPR,
5070 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5072 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5076 /* If the right sides are not constant, do the same for it. Also,
5077 disallow this optimization if a size or signedness mismatch occurs
5078 between the left and right sides. */
5081 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5082 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5083 /* Make sure the two fields on the right
5084 correspond to the left without being swapped. */
5085 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5088 first_bit = MIN (lr_bitpos, rr_bitpos);
5089 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5090 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5091 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5093 if (rnmode == VOIDmode)
5096 rnbitsize = GET_MODE_BITSIZE (rnmode);
5097 rnbitpos = first_bit & ~ (rnbitsize - 1);
5098 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5099 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5101 if (BYTES_BIG_ENDIAN)
5103 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5104 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5107 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5108 size_int (xlr_bitpos), 0);
5109 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5110 size_int (xrr_bitpos), 0);
5112 /* Make a mask that corresponds to both fields being compared.
5113 Do this for both items being compared. If the operands are the
5114 same size and the bits being compared are in the same position
5115 then we can do this by masking both and comparing the masked
5117 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5118 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5119 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5121 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5122 ll_unsignedp || rl_unsignedp);
5123 if (! all_ones_mask_p (ll_mask, lnbitsize))
5124 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5126 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5127 lr_unsignedp || rr_unsignedp);
5128 if (! all_ones_mask_p (lr_mask, rnbitsize))
5129 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5131 return build2 (wanted_code, truth_type, lhs, rhs);
5134 /* There is still another way we can do something: If both pairs of
5135 fields being compared are adjacent, we may be able to make a wider
5136 field containing them both.
5138 Note that we still must mask the lhs/rhs expressions. Furthermore,
5139 the mask must be shifted to account for the shift done by
5140 make_bit_field_ref. */
5141 if ((ll_bitsize + ll_bitpos == rl_bitpos
5142 && lr_bitsize + lr_bitpos == rr_bitpos)
5143 || (ll_bitpos == rl_bitpos + rl_bitsize
5144 && lr_bitpos == rr_bitpos + rr_bitsize))
5148 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5149 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5150 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5151 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5153 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5154 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5155 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5156 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5158 /* Convert to the smaller type before masking out unwanted bits. */
5160 if (lntype != rntype)
5162 if (lnbitsize > rnbitsize)
5164 lhs = fold_convert (rntype, lhs);
5165 ll_mask = fold_convert (rntype, ll_mask);
5168 else if (lnbitsize < rnbitsize)
5170 rhs = fold_convert (lntype, rhs);
5171 lr_mask = fold_convert (lntype, lr_mask);
5176 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5177 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5179 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5180 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5182 return build2 (wanted_code, truth_type, lhs, rhs);
5188 /* Handle the case of comparisons with constants. If there is something in
5189 common between the masks, those bits of the constants must be the same.
5190 If not, the condition is always false. Test for this to avoid generating
5191 incorrect code below. */
5192 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5193 if (! integer_zerop (result)
5194 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5195 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5197 if (wanted_code == NE_EXPR)
5199 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5200 return constant_boolean_node (true, truth_type);
5204 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5205 return constant_boolean_node (false, truth_type);
5209 /* Construct the expression we will return. First get the component
5210 reference we will make. Unless the mask is all ones the width of
5211 that field, perform the mask operation. Then compare with the
5213 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5214 ll_unsignedp || rl_unsignedp);
5216 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5217 if (! all_ones_mask_p (ll_mask, lnbitsize))
5218 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5220 return build2 (wanted_code, truth_type, result,
5221 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5224 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5228 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5231 enum tree_code op_code;
5232 tree comp_const = op1;
5234 int consts_equal, consts_lt;
5237 STRIP_SIGN_NOPS (arg0);
5239 op_code = TREE_CODE (arg0);
5240 minmax_const = TREE_OPERAND (arg0, 1);
5241 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5242 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5243 inner = TREE_OPERAND (arg0, 0);
5245 /* If something does not permit us to optimize, return the original tree. */
5246 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5247 || TREE_CODE (comp_const) != INTEGER_CST
5248 || TREE_CONSTANT_OVERFLOW (comp_const)
5249 || TREE_CODE (minmax_const) != INTEGER_CST
5250 || TREE_CONSTANT_OVERFLOW (minmax_const))
5253 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5254 and GT_EXPR, doing the rest with recursive calls using logical
5258 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5260 /* FIXME: We should be able to invert code without building a
5261 scratch tree node, but doing so would require us to
5262 duplicate a part of invert_truthvalue here. */
5263 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5264 tem = optimize_minmax_comparison (TREE_CODE (tem),
5266 TREE_OPERAND (tem, 0),
5267 TREE_OPERAND (tem, 1));
5268 return invert_truthvalue (tem);
5273 fold_build2 (TRUTH_ORIF_EXPR, type,
5274 optimize_minmax_comparison
5275 (EQ_EXPR, type, arg0, comp_const),
5276 optimize_minmax_comparison
5277 (GT_EXPR, type, arg0, comp_const));
5280 if (op_code == MAX_EXPR && consts_equal)
5281 /* MAX (X, 0) == 0 -> X <= 0 */
5282 return fold_build2 (LE_EXPR, type, inner, comp_const);
5284 else if (op_code == MAX_EXPR && consts_lt)
5285 /* MAX (X, 0) == 5 -> X == 5 */
5286 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5288 else if (op_code == MAX_EXPR)
5289 /* MAX (X, 0) == -1 -> false */
5290 return omit_one_operand (type, integer_zero_node, inner);
5292 else if (consts_equal)
5293 /* MIN (X, 0) == 0 -> X >= 0 */
5294 return fold_build2 (GE_EXPR, type, inner, comp_const);
5297 /* MIN (X, 0) == 5 -> false */
5298 return omit_one_operand (type, integer_zero_node, inner);
5301 /* MIN (X, 0) == -1 -> X == -1 */
5302 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5305 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5306 /* MAX (X, 0) > 0 -> X > 0
5307 MAX (X, 0) > 5 -> X > 5 */
5308 return fold_build2 (GT_EXPR, type, inner, comp_const);
5310 else if (op_code == MAX_EXPR)
5311 /* MAX (X, 0) > -1 -> true */
5312 return omit_one_operand (type, integer_one_node, inner);
5314 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5315 /* MIN (X, 0) > 0 -> false
5316 MIN (X, 0) > 5 -> false */
5317 return omit_one_operand (type, integer_zero_node, inner);
5320 /* MIN (X, 0) > -1 -> X > -1 */
5321 return fold_build2 (GT_EXPR, type, inner, comp_const);
5328 /* T is an integer expression that is being multiplied, divided, or taken a
5329 modulus (CODE says which and what kind of divide or modulus) by a
5330 constant C. See if we can eliminate that operation by folding it with
5331 other operations already in T. WIDE_TYPE, if non-null, is a type that
5332 should be used for the computation if wider than our type.
5334 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5335 (X * 2) + (Y * 4). We must, however, be assured that either the original
5336 expression would not overflow or that overflow is undefined for the type
5337 in the language in question.
5339 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5340 the machine has a multiply-accumulate insn or that this is part of an
5341 addressing calculation.
5343 If we return a non-null expression, it is an equivalent form of the
5344 original computation, but need not be in the original type. */
5347 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5349 /* To avoid exponential search depth, refuse to allow recursion past
5350 three levels. Beyond that (1) it's highly unlikely that we'll find
5351 something interesting and (2) we've probably processed it before
5352 when we built the inner expression. */
5361 ret = extract_muldiv_1 (t, c, code, wide_type);
5368 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5370 tree type = TREE_TYPE (t);
5371 enum tree_code tcode = TREE_CODE (t);
5372 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5373 > GET_MODE_SIZE (TYPE_MODE (type)))
5374 ? wide_type : type);
5376 int same_p = tcode == code;
5377 tree op0 = NULL_TREE, op1 = NULL_TREE;
5379 /* Don't deal with constants of zero here; they confuse the code below. */
5380 if (integer_zerop (c))
5383 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5384 op0 = TREE_OPERAND (t, 0);
5386 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5387 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5389 /* Note that we need not handle conditional operations here since fold
5390 already handles those cases. So just do arithmetic here. */
5394 /* For a constant, we can always simplify if we are a multiply
5395 or (for divide and modulus) if it is a multiple of our constant. */
5396 if (code == MULT_EXPR
5397 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5398 return const_binop (code, fold_convert (ctype, t),
5399 fold_convert (ctype, c), 0);
5402 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5403 /* If op0 is an expression ... */
5404 if ((COMPARISON_CLASS_P (op0)
5405 || UNARY_CLASS_P (op0)
5406 || BINARY_CLASS_P (op0)
5407 || EXPRESSION_CLASS_P (op0))
5408 /* ... and is unsigned, and its type is smaller than ctype,
5409 then we cannot pass through as widening. */
5410 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5411 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5412 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5413 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5414 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5415 /* ... or this is a truncation (t is narrower than op0),
5416 then we cannot pass through this narrowing. */
5417 || (GET_MODE_SIZE (TYPE_MODE (type))
5418 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5419 /* ... or signedness changes for division or modulus,
5420 then we cannot pass through this conversion. */
5421 || (code != MULT_EXPR
5422 && (TYPE_UNSIGNED (ctype)
5423 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5426 /* Pass the constant down and see if we can make a simplification. If
5427 we can, replace this expression with the inner simplification for
5428 possible later conversion to our or some other type. */
5429 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5430 && TREE_CODE (t2) == INTEGER_CST
5431 && ! TREE_CONSTANT_OVERFLOW (t2)
5432 && (0 != (t1 = extract_muldiv (op0, t2, code,
5434 ? ctype : NULL_TREE))))
5439 /* If widening the type changes it from signed to unsigned, then we
5440 must avoid building ABS_EXPR itself as unsigned. */
5441 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5443 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5444 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5446 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5447 return fold_convert (ctype, t1);
5453 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5454 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5457 case MIN_EXPR: case MAX_EXPR:
5458 /* If widening the type changes the signedness, then we can't perform
5459 this optimization as that changes the result. */
5460 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5463 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5464 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5465 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5467 if (tree_int_cst_sgn (c) < 0)
5468 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5470 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5471 fold_convert (ctype, t2));
5475 case LSHIFT_EXPR: case RSHIFT_EXPR:
5476 /* If the second operand is constant, this is a multiplication
5477 or floor division, by a power of two, so we can treat it that
5478 way unless the multiplier or divisor overflows. Signed
5479 left-shift overflow is implementation-defined rather than
5480 undefined in C90, so do not convert signed left shift into
5482 if (TREE_CODE (op1) == INTEGER_CST
5483 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5484 /* const_binop may not detect overflow correctly,
5485 so check for it explicitly here. */
5486 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5487 && TREE_INT_CST_HIGH (op1) == 0
5488 && 0 != (t1 = fold_convert (ctype,
5489 const_binop (LSHIFT_EXPR,
5492 && ! TREE_OVERFLOW (t1))
5493 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5494 ? MULT_EXPR : FLOOR_DIV_EXPR,
5495 ctype, fold_convert (ctype, op0), t1),
5496 c, code, wide_type);
5499 case PLUS_EXPR: case MINUS_EXPR:
5500 /* See if we can eliminate the operation on both sides. If we can, we
5501 can return a new PLUS or MINUS. If we can't, the only remaining
5502 cases where we can do anything are if the second operand is a
5504 t1 = extract_muldiv (op0, c, code, wide_type);
5505 t2 = extract_muldiv (op1, c, code, wide_type);
5506 if (t1 != 0 && t2 != 0
5507 && (code == MULT_EXPR
5508 /* If not multiplication, we can only do this if both operands
5509 are divisible by c. */
5510 || (multiple_of_p (ctype, op0, c)
5511 && multiple_of_p (ctype, op1, c))))
5512 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5513 fold_convert (ctype, t2));
5515 /* If this was a subtraction, negate OP1 and set it to be an addition.
5516 This simplifies the logic below. */
5517 if (tcode == MINUS_EXPR)
5518 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5520 if (TREE_CODE (op1) != INTEGER_CST)
5523 /* If either OP1 or C are negative, this optimization is not safe for
5524 some of the division and remainder types while for others we need
5525 to change the code. */
5526 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5528 if (code == CEIL_DIV_EXPR)
5529 code = FLOOR_DIV_EXPR;
5530 else if (code == FLOOR_DIV_EXPR)
5531 code = CEIL_DIV_EXPR;
5532 else if (code != MULT_EXPR
5533 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5537 /* If it's a multiply or a division/modulus operation of a multiple
5538 of our constant, do the operation and verify it doesn't overflow. */
5539 if (code == MULT_EXPR
5540 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5542 op1 = const_binop (code, fold_convert (ctype, op1),
5543 fold_convert (ctype, c), 0);
5544 /* We allow the constant to overflow with wrapping semantics. */
5546 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5552 /* If we have an unsigned type is not a sizetype, we cannot widen
5553 the operation since it will change the result if the original
5554 computation overflowed. */
5555 if (TYPE_UNSIGNED (ctype)
5556 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5560 /* If we were able to eliminate our operation from the first side,
5561 apply our operation to the second side and reform the PLUS. */
5562 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5563 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5565 /* The last case is if we are a multiply. In that case, we can
5566 apply the distributive law to commute the multiply and addition
5567 if the multiplication of the constants doesn't overflow. */
5568 if (code == MULT_EXPR)
5569 return fold_build2 (tcode, ctype,
5570 fold_build2 (code, ctype,
5571 fold_convert (ctype, op0),
5572 fold_convert (ctype, c)),
5578 /* We have a special case here if we are doing something like
5579 (C * 8) % 4 since we know that's zero. */
5580 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5581 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5582 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5583 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5584 return omit_one_operand (type, integer_zero_node, op0);
5586 /* ... fall through ... */
5588 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5589 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5590 /* If we can extract our operation from the LHS, do so and return a
5591 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5592 do something only if the second operand is a constant. */
5594 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5595 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5596 fold_convert (ctype, op1));
5597 else if (tcode == MULT_EXPR && code == MULT_EXPR
5598 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5599 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5600 fold_convert (ctype, t1));
5601 else if (TREE_CODE (op1) != INTEGER_CST)
5604 /* If these are the same operation types, we can associate them
5605 assuming no overflow. */
5607 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5608 fold_convert (ctype, c), 0))
5609 && ! TREE_OVERFLOW (t1))
5610 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5612 /* If these operations "cancel" each other, we have the main
5613 optimizations of this pass, which occur when either constant is a
5614 multiple of the other, in which case we replace this with either an
5615 operation or CODE or TCODE.
5617 If we have an unsigned type that is not a sizetype, we cannot do
5618 this since it will change the result if the original computation
5620 if ((! TYPE_UNSIGNED (ctype)
5621 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5623 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5624 || (tcode == MULT_EXPR
5625 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5626 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5628 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5629 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5630 fold_convert (ctype,
5631 const_binop (TRUNC_DIV_EXPR,
5633 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5634 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5635 fold_convert (ctype,
5636 const_binop (TRUNC_DIV_EXPR,
5648 /* Return a node which has the indicated constant VALUE (either 0 or
5649 1), and is of the indicated TYPE. */
5652 constant_boolean_node (int value, tree type)
5654 if (type == integer_type_node)
5655 return value ? integer_one_node : integer_zero_node;
5656 else if (type == boolean_type_node)
5657 return value ? boolean_true_node : boolean_false_node;
5659 return build_int_cst (type, value);
5663 /* Return true if expr looks like an ARRAY_REF and set base and
5664 offset to the appropriate trees. If there is no offset,
5665 offset is set to NULL_TREE. Base will be canonicalized to
5666 something you can get the element type from using
5667 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5668 in bytes to the base. */
5671 extract_array_ref (tree expr, tree *base, tree *offset)
5673 /* One canonical form is a PLUS_EXPR with the first
5674 argument being an ADDR_EXPR with a possible NOP_EXPR
5676 if (TREE_CODE (expr) == PLUS_EXPR)
5678 tree op0 = TREE_OPERAND (expr, 0);
5679 tree inner_base, dummy1;
5680 /* Strip NOP_EXPRs here because the C frontends and/or
5681 folders present us (int *)&x.a + 4B possibly. */
5683 if (extract_array_ref (op0, &inner_base, &dummy1))
5686 if (dummy1 == NULL_TREE)
5687 *offset = TREE_OPERAND (expr, 1);
5689 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5690 dummy1, TREE_OPERAND (expr, 1));
5694 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5695 which we transform into an ADDR_EXPR with appropriate
5696 offset. For other arguments to the ADDR_EXPR we assume
5697 zero offset and as such do not care about the ADDR_EXPR
5698 type and strip possible nops from it. */
5699 else if (TREE_CODE (expr) == ADDR_EXPR)
5701 tree op0 = TREE_OPERAND (expr, 0);
5702 if (TREE_CODE (op0) == ARRAY_REF)
5704 tree idx = TREE_OPERAND (op0, 1);
5705 *base = TREE_OPERAND (op0, 0);
5706 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5707 array_ref_element_size (op0));
5711 /* Handle array-to-pointer decay as &a. */
5712 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5713 *base = TREE_OPERAND (expr, 0);
5716 *offset = NULL_TREE;
5720 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5721 else if (SSA_VAR_P (expr)
5722 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5725 *offset = NULL_TREE;
5733 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5734 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5735 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5736 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5737 COND is the first argument to CODE; otherwise (as in the example
5738 given here), it is the second argument. TYPE is the type of the
5739 original expression. Return NULL_TREE if no simplification is
5743 fold_binary_op_with_conditional_arg (enum tree_code code,
5744 tree type, tree op0, tree op1,
5745 tree cond, tree arg, int cond_first_p)
5747 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5748 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5749 tree test, true_value, false_value;
5750 tree lhs = NULL_TREE;
5751 tree rhs = NULL_TREE;
5753 /* This transformation is only worthwhile if we don't have to wrap
5754 arg in a SAVE_EXPR, and the operation can be simplified on at least
5755 one of the branches once its pushed inside the COND_EXPR. */
5756 if (!TREE_CONSTANT (arg))
5759 if (TREE_CODE (cond) == COND_EXPR)
5761 test = TREE_OPERAND (cond, 0);
5762 true_value = TREE_OPERAND (cond, 1);
5763 false_value = TREE_OPERAND (cond, 2);
5764 /* If this operand throws an expression, then it does not make
5765 sense to try to perform a logical or arithmetic operation
5767 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5769 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5774 tree testtype = TREE_TYPE (cond);
5776 true_value = constant_boolean_node (true, testtype);
5777 false_value = constant_boolean_node (false, testtype);
5780 arg = fold_convert (arg_type, arg);
5783 true_value = fold_convert (cond_type, true_value);
5785 lhs = fold_build2 (code, type, true_value, arg);
5787 lhs = fold_build2 (code, type, arg, true_value);
5791 false_value = fold_convert (cond_type, false_value);
5793 rhs = fold_build2 (code, type, false_value, arg);
5795 rhs = fold_build2 (code, type, arg, false_value);
5798 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5799 return fold_convert (type, test);
5803 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5805 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5806 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5807 ADDEND is the same as X.
5809 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5810 and finite. The problematic cases are when X is zero, and its mode
5811 has signed zeros. In the case of rounding towards -infinity,
5812 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5813 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5816 fold_real_zero_addition_p (tree type, tree addend, int negate)
5818 if (!real_zerop (addend))
5821 /* Don't allow the fold with -fsignaling-nans. */
5822 if (HONOR_SNANS (TYPE_MODE (type)))
5825 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5826 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5829 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5830 if (TREE_CODE (addend) == REAL_CST
5831 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5834 /* The mode has signed zeros, and we have to honor their sign.
5835 In this situation, there is only one case we can return true for.
5836 X - 0 is the same as X unless rounding towards -infinity is
5838 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5841 /* Subroutine of fold() that checks comparisons of built-in math
5842 functions against real constants.
5844 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5845 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5846 is the type of the result and ARG0 and ARG1 are the operands of the
5847 comparison. ARG1 must be a TREE_REAL_CST.
5849 The function returns the constant folded tree if a simplification
5850 can be made, and NULL_TREE otherwise. */
5853 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5854 tree type, tree arg0, tree arg1)
5858 if (BUILTIN_SQRT_P (fcode))
5860 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5861 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5863 c = TREE_REAL_CST (arg1);
5864 if (REAL_VALUE_NEGATIVE (c))
5866 /* sqrt(x) < y is always false, if y is negative. */
5867 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5868 return omit_one_operand (type, integer_zero_node, arg);
5870 /* sqrt(x) > y is always true, if y is negative and we
5871 don't care about NaNs, i.e. negative values of x. */
5872 if (code == NE_EXPR || !HONOR_NANS (mode))
5873 return omit_one_operand (type, integer_one_node, arg);
5875 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5876 return fold_build2 (GE_EXPR, type, arg,
5877 build_real (TREE_TYPE (arg), dconst0));
5879 else if (code == GT_EXPR || code == GE_EXPR)
5883 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5884 real_convert (&c2, mode, &c2);
5886 if (REAL_VALUE_ISINF (c2))
5888 /* sqrt(x) > y is x == +Inf, when y is very large. */
5889 if (HONOR_INFINITIES (mode))
5890 return fold_build2 (EQ_EXPR, type, arg,
5891 build_real (TREE_TYPE (arg), c2));
5893 /* sqrt(x) > y is always false, when y is very large
5894 and we don't care about infinities. */
5895 return omit_one_operand (type, integer_zero_node, arg);
5898 /* sqrt(x) > c is the same as x > c*c. */
5899 return fold_build2 (code, type, arg,
5900 build_real (TREE_TYPE (arg), c2));
5902 else if (code == LT_EXPR || code == LE_EXPR)
5906 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5907 real_convert (&c2, mode, &c2);
5909 if (REAL_VALUE_ISINF (c2))
5911 /* sqrt(x) < y is always true, when y is a very large
5912 value and we don't care about NaNs or Infinities. */
5913 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5914 return omit_one_operand (type, integer_one_node, arg);
5916 /* sqrt(x) < y is x != +Inf when y is very large and we
5917 don't care about NaNs. */
5918 if (! HONOR_NANS (mode))
5919 return fold_build2 (NE_EXPR, type, arg,
5920 build_real (TREE_TYPE (arg), c2));
5922 /* sqrt(x) < y is x >= 0 when y is very large and we
5923 don't care about Infinities. */
5924 if (! HONOR_INFINITIES (mode))
5925 return fold_build2 (GE_EXPR, type, arg,
5926 build_real (TREE_TYPE (arg), dconst0));
5928 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5929 if (lang_hooks.decls.global_bindings_p () != 0
5930 || CONTAINS_PLACEHOLDER_P (arg))
5933 arg = save_expr (arg);
5934 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5935 fold_build2 (GE_EXPR, type, arg,
5936 build_real (TREE_TYPE (arg),
5938 fold_build2 (NE_EXPR, type, arg,
5939 build_real (TREE_TYPE (arg),
5943 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5944 if (! HONOR_NANS (mode))
5945 return fold_build2 (code, type, arg,
5946 build_real (TREE_TYPE (arg), c2));
5948 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5949 if (lang_hooks.decls.global_bindings_p () == 0
5950 && ! CONTAINS_PLACEHOLDER_P (arg))
5952 arg = save_expr (arg);
5953 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5954 fold_build2 (GE_EXPR, type, arg,
5955 build_real (TREE_TYPE (arg),
5957 fold_build2 (code, type, arg,
5958 build_real (TREE_TYPE (arg),
5967 /* Subroutine of fold() that optimizes comparisons against Infinities,
5968 either +Inf or -Inf.
5970 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5971 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5972 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5974 The function returns the constant folded tree if a simplification
5975 can be made, and NULL_TREE otherwise. */
5978 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5980 enum machine_mode mode;
5981 REAL_VALUE_TYPE max;
5985 mode = TYPE_MODE (TREE_TYPE (arg0));
5987 /* For negative infinity swap the sense of the comparison. */
5988 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5990 code = swap_tree_comparison (code);
5995 /* x > +Inf is always false, if with ignore sNANs. */
5996 if (HONOR_SNANS (mode))
5998 return omit_one_operand (type, integer_zero_node, arg0);
6001 /* x <= +Inf is always true, if we don't case about NaNs. */
6002 if (! HONOR_NANS (mode))
6003 return omit_one_operand (type, integer_one_node, arg0);
6005 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6006 if (lang_hooks.decls.global_bindings_p () == 0
6007 && ! CONTAINS_PLACEHOLDER_P (arg0))
6009 arg0 = save_expr (arg0);
6010 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6016 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6017 real_maxval (&max, neg, mode);
6018 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6019 arg0, build_real (TREE_TYPE (arg0), max));
6022 /* x < +Inf is always equal to x <= DBL_MAX. */
6023 real_maxval (&max, neg, mode);
6024 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6025 arg0, build_real (TREE_TYPE (arg0), max));
6028 /* x != +Inf is always equal to !(x > DBL_MAX). */
6029 real_maxval (&max, neg, mode);
6030 if (! HONOR_NANS (mode))
6031 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6032 arg0, build_real (TREE_TYPE (arg0), max));
6034 /* The transformation below creates non-gimple code and thus is
6035 not appropriate if we are in gimple form. */
6039 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6040 arg0, build_real (TREE_TYPE (arg0), max));
6041 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6050 /* Subroutine of fold() that optimizes comparisons of a division by
6051 a nonzero integer constant against an integer constant, i.e.
6054 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6055 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6056 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6058 The function returns the constant folded tree if a simplification
6059 can be made, and NULL_TREE otherwise. */
6062 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6064 tree prod, tmp, hi, lo;
6065 tree arg00 = TREE_OPERAND (arg0, 0);
6066 tree arg01 = TREE_OPERAND (arg0, 1);
6067 unsigned HOST_WIDE_INT lpart;
6068 HOST_WIDE_INT hpart;
6072 /* We have to do this the hard way to detect unsigned overflow.
6073 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6074 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6075 TREE_INT_CST_HIGH (arg01),
6076 TREE_INT_CST_LOW (arg1),
6077 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6078 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6079 prod = force_fit_type (prod, -1, overflow, false);
6080 neg_overflow = false;
6082 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6084 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6087 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6088 overflow = add_double (TREE_INT_CST_LOW (prod),
6089 TREE_INT_CST_HIGH (prod),
6090 TREE_INT_CST_LOW (tmp),
6091 TREE_INT_CST_HIGH (tmp),
6093 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6094 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6095 TREE_CONSTANT_OVERFLOW (prod));
6097 else if (tree_int_cst_sgn (arg01) >= 0)
6099 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6100 switch (tree_int_cst_sgn (arg1))
6103 neg_overflow = true;
6104 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6109 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6114 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6124 /* A negative divisor reverses the relational operators. */
6125 code = swap_tree_comparison (code);
6127 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6128 switch (tree_int_cst_sgn (arg1))
6131 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6136 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6141 neg_overflow = true;
6142 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6154 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6155 return omit_one_operand (type, integer_zero_node, arg00);
6156 if (TREE_OVERFLOW (hi))
6157 return fold_build2 (GE_EXPR, type, arg00, lo);
6158 if (TREE_OVERFLOW (lo))
6159 return fold_build2 (LE_EXPR, type, arg00, hi);
6160 return build_range_check (type, arg00, 1, lo, hi);
6163 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6164 return omit_one_operand (type, integer_one_node, arg00);
6165 if (TREE_OVERFLOW (hi))
6166 return fold_build2 (LT_EXPR, type, arg00, lo);
6167 if (TREE_OVERFLOW (lo))
6168 return fold_build2 (GT_EXPR, type, arg00, hi);
6169 return build_range_check (type, arg00, 0, lo, hi);
6172 if (TREE_OVERFLOW (lo))
6174 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6175 return omit_one_operand (type, tmp, arg00);
6177 return fold_build2 (LT_EXPR, type, arg00, lo);
6180 if (TREE_OVERFLOW (hi))
6182 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6183 return omit_one_operand (type, tmp, arg00);
6185 return fold_build2 (LE_EXPR, type, arg00, hi);
6188 if (TREE_OVERFLOW (hi))
6190 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6191 return omit_one_operand (type, tmp, arg00);
6193 return fold_build2 (GT_EXPR, type, arg00, hi);
6196 if (TREE_OVERFLOW (lo))
6198 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6199 return omit_one_operand (type, tmp, arg00);
6201 return fold_build2 (GE_EXPR, type, arg00, lo);
6211 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6212 equality/inequality test, then return a simplified form of the test
6213 using a sign testing. Otherwise return NULL. TYPE is the desired
6217 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6220 /* If this is testing a single bit, we can optimize the test. */
6221 if ((code == NE_EXPR || code == EQ_EXPR)
6222 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6223 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6225 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6226 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6227 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6229 if (arg00 != NULL_TREE
6230 /* This is only a win if casting to a signed type is cheap,
6231 i.e. when arg00's type is not a partial mode. */
6232 && TYPE_PRECISION (TREE_TYPE (arg00))
6233 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6235 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6236 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6237 result_type, fold_convert (stype, arg00),
6238 build_int_cst (stype, 0));
6245 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6246 equality/inequality test, then return a simplified form of
6247 the test using shifts and logical operations. Otherwise return
6248 NULL. TYPE is the desired result type. */
6251 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6254 /* If this is testing a single bit, we can optimize the test. */
6255 if ((code == NE_EXPR || code == EQ_EXPR)
6256 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6257 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6259 tree inner = TREE_OPERAND (arg0, 0);
6260 tree type = TREE_TYPE (arg0);
6261 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6262 enum machine_mode operand_mode = TYPE_MODE (type);
6264 tree signed_type, unsigned_type, intermediate_type;
6267 /* First, see if we can fold the single bit test into a sign-bit
6269 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6274 /* Otherwise we have (A & C) != 0 where C is a single bit,
6275 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6276 Similarly for (A & C) == 0. */
6278 /* If INNER is a right shift of a constant and it plus BITNUM does
6279 not overflow, adjust BITNUM and INNER. */
6280 if (TREE_CODE (inner) == RSHIFT_EXPR
6281 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6282 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6283 && bitnum < TYPE_PRECISION (type)
6284 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6285 bitnum - TYPE_PRECISION (type)))
6287 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6288 inner = TREE_OPERAND (inner, 0);
6291 /* If we are going to be able to omit the AND below, we must do our
6292 operations as unsigned. If we must use the AND, we have a choice.
6293 Normally unsigned is faster, but for some machines signed is. */
6294 #ifdef LOAD_EXTEND_OP
6295 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6296 && !flag_syntax_only) ? 0 : 1;
6301 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6302 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6303 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6304 inner = fold_convert (intermediate_type, inner);
6307 inner = build2 (RSHIFT_EXPR, intermediate_type,
6308 inner, size_int (bitnum));
6310 if (code == EQ_EXPR)
6311 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6312 inner, integer_one_node);
6314 /* Put the AND last so it can combine with more things. */
6315 inner = build2 (BIT_AND_EXPR, intermediate_type,
6316 inner, integer_one_node);
6318 /* Make sure to return the proper type. */
6319 inner = fold_convert (result_type, inner);
6326 /* Check whether we are allowed to reorder operands arg0 and arg1,
6327 such that the evaluation of arg1 occurs before arg0. */
6330 reorder_operands_p (tree arg0, tree arg1)
6332 if (! flag_evaluation_order)
6334 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6336 return ! TREE_SIDE_EFFECTS (arg0)
6337 && ! TREE_SIDE_EFFECTS (arg1);
6340 /* Test whether it is preferable two swap two operands, ARG0 and
6341 ARG1, for example because ARG0 is an integer constant and ARG1
6342 isn't. If REORDER is true, only recommend swapping if we can
6343 evaluate the operands in reverse order. */
6346 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6348 STRIP_SIGN_NOPS (arg0);
6349 STRIP_SIGN_NOPS (arg1);
6351 if (TREE_CODE (arg1) == INTEGER_CST)
6353 if (TREE_CODE (arg0) == INTEGER_CST)
6356 if (TREE_CODE (arg1) == REAL_CST)
6358 if (TREE_CODE (arg0) == REAL_CST)
6361 if (TREE_CODE (arg1) == COMPLEX_CST)
6363 if (TREE_CODE (arg0) == COMPLEX_CST)
6366 if (TREE_CONSTANT (arg1))
6368 if (TREE_CONSTANT (arg0))
6374 if (reorder && flag_evaluation_order
6375 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6383 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6384 for commutative and comparison operators. Ensuring a canonical
6385 form allows the optimizers to find additional redundancies without
6386 having to explicitly check for both orderings. */
6387 if (TREE_CODE (arg0) == SSA_NAME
6388 && TREE_CODE (arg1) == SSA_NAME
6389 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6395 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6396 ARG0 is extended to a wider type. */
6399 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6401 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6403 tree shorter_type, outer_type;
6407 if (arg0_unw == arg0)
6409 shorter_type = TREE_TYPE (arg0_unw);
6411 #ifdef HAVE_canonicalize_funcptr_for_compare
6412 /* Disable this optimization if we're casting a function pointer
6413 type on targets that require function pointer canonicalization. */
6414 if (HAVE_canonicalize_funcptr_for_compare
6415 && TREE_CODE (shorter_type) == POINTER_TYPE
6416 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6420 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6423 arg1_unw = get_unwidened (arg1, shorter_type);
6425 /* If possible, express the comparison in the shorter mode. */
6426 if ((code == EQ_EXPR || code == NE_EXPR
6427 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6428 && (TREE_TYPE (arg1_unw) == shorter_type
6429 || (TREE_CODE (arg1_unw) == INTEGER_CST
6430 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6431 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6432 && int_fits_type_p (arg1_unw, shorter_type))))
6433 return fold_build2 (code, type, arg0_unw,
6434 fold_convert (shorter_type, arg1_unw));
6436 if (TREE_CODE (arg1_unw) != INTEGER_CST
6437 || TREE_CODE (shorter_type) != INTEGER_TYPE
6438 || !int_fits_type_p (arg1_unw, shorter_type))
6441 /* If we are comparing with the integer that does not fit into the range
6442 of the shorter type, the result is known. */
6443 outer_type = TREE_TYPE (arg1_unw);
6444 min = lower_bound_in_type (outer_type, shorter_type);
6445 max = upper_bound_in_type (outer_type, shorter_type);
6447 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6449 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6456 return omit_one_operand (type, integer_zero_node, arg0);
6461 return omit_one_operand (type, integer_one_node, arg0);
6467 return omit_one_operand (type, integer_one_node, arg0);
6469 return omit_one_operand (type, integer_zero_node, arg0);
6474 return omit_one_operand (type, integer_zero_node, arg0);
6476 return omit_one_operand (type, integer_one_node, arg0);
6485 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6486 ARG0 just the signedness is changed. */
6489 fold_sign_changed_comparison (enum tree_code code, tree type,
6490 tree arg0, tree arg1)
6492 tree arg0_inner, tmp;
6493 tree inner_type, outer_type;
6495 if (TREE_CODE (arg0) != NOP_EXPR
6496 && TREE_CODE (arg0) != CONVERT_EXPR)
6499 outer_type = TREE_TYPE (arg0);
6500 arg0_inner = TREE_OPERAND (arg0, 0);
6501 inner_type = TREE_TYPE (arg0_inner);
6503 #ifdef HAVE_canonicalize_funcptr_for_compare
6504 /* Disable this optimization if we're casting a function pointer
6505 type on targets that require function pointer canonicalization. */
6506 if (HAVE_canonicalize_funcptr_for_compare
6507 && TREE_CODE (inner_type) == POINTER_TYPE
6508 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6512 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6515 if (TREE_CODE (arg1) != INTEGER_CST
6516 && !((TREE_CODE (arg1) == NOP_EXPR
6517 || TREE_CODE (arg1) == CONVERT_EXPR)
6518 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6521 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6526 if (TREE_CODE (arg1) == INTEGER_CST)
6528 tmp = build_int_cst_wide (inner_type,
6529 TREE_INT_CST_LOW (arg1),
6530 TREE_INT_CST_HIGH (arg1));
6531 arg1 = force_fit_type (tmp, 0,
6532 TREE_OVERFLOW (arg1),
6533 TREE_CONSTANT_OVERFLOW (arg1));
6536 arg1 = fold_convert (inner_type, arg1);
6538 return fold_build2 (code, type, arg0_inner, arg1);
6541 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6542 step of the array. Reconstructs s and delta in the case of s * delta
6543 being an integer constant (and thus already folded).
6544 ADDR is the address. MULT is the multiplicative expression.
6545 If the function succeeds, the new address expression is returned. Otherwise
6546 NULL_TREE is returned. */
6549 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6551 tree s, delta, step;
6552 tree ref = TREE_OPERAND (addr, 0), pref;
6556 /* Canonicalize op1 into a possibly non-constant delta
6557 and an INTEGER_CST s. */
6558 if (TREE_CODE (op1) == MULT_EXPR)
6560 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6565 if (TREE_CODE (arg0) == INTEGER_CST)
6570 else if (TREE_CODE (arg1) == INTEGER_CST)
6578 else if (TREE_CODE (op1) == INTEGER_CST)
6585 /* Simulate we are delta * 1. */
6587 s = integer_one_node;
6590 for (;; ref = TREE_OPERAND (ref, 0))
6592 if (TREE_CODE (ref) == ARRAY_REF)
6594 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6598 step = array_ref_element_size (ref);
6599 if (TREE_CODE (step) != INTEGER_CST)
6604 if (! tree_int_cst_equal (step, s))
6609 /* Try if delta is a multiple of step. */
6610 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6619 if (!handled_component_p (ref))
6623 /* We found the suitable array reference. So copy everything up to it,
6624 and replace the index. */
6626 pref = TREE_OPERAND (addr, 0);
6627 ret = copy_node (pref);
6632 pref = TREE_OPERAND (pref, 0);
6633 TREE_OPERAND (pos, 0) = copy_node (pref);
6634 pos = TREE_OPERAND (pos, 0);
6637 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6638 fold_convert (itype,
6639 TREE_OPERAND (pos, 1)),
6640 fold_convert (itype, delta));
6642 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6646 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6647 means A >= Y && A != MAX, but in this case we know that
6648 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6651 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6653 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6655 if (TREE_CODE (bound) == LT_EXPR)
6656 a = TREE_OPERAND (bound, 0);
6657 else if (TREE_CODE (bound) == GT_EXPR)
6658 a = TREE_OPERAND (bound, 1);
6662 typea = TREE_TYPE (a);
6663 if (!INTEGRAL_TYPE_P (typea)
6664 && !POINTER_TYPE_P (typea))
6667 if (TREE_CODE (ineq) == LT_EXPR)
6669 a1 = TREE_OPERAND (ineq, 1);
6670 y = TREE_OPERAND (ineq, 0);
6672 else if (TREE_CODE (ineq) == GT_EXPR)
6674 a1 = TREE_OPERAND (ineq, 0);
6675 y = TREE_OPERAND (ineq, 1);
6680 if (TREE_TYPE (a1) != typea)
6683 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6684 if (!integer_onep (diff))
6687 return fold_build2 (GE_EXPR, type, a, y);
6690 /* Fold a sum or difference of at least one multiplication.
6691 Returns the folded tree or NULL if no simplification could be made. */
6694 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6696 tree arg00, arg01, arg10, arg11;
6697 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6699 /* (A * C) +- (B * C) -> (A+-B) * C.
6700 (A * C) +- A -> A * (C+-1).
6701 We are most concerned about the case where C is a constant,
6702 but other combinations show up during loop reduction. Since
6703 it is not difficult, try all four possibilities. */
6705 if (TREE_CODE (arg0) == MULT_EXPR)
6707 arg00 = TREE_OPERAND (arg0, 0);
6708 arg01 = TREE_OPERAND (arg0, 1);
6713 arg01 = fold_convert (type, integer_one_node);
6715 if (TREE_CODE (arg1) == MULT_EXPR)
6717 arg10 = TREE_OPERAND (arg1, 0);
6718 arg11 = TREE_OPERAND (arg1, 1);
6723 arg11 = fold_convert (type, integer_one_node);
6727 if (operand_equal_p (arg01, arg11, 0))
6728 same = arg01, alt0 = arg00, alt1 = arg10;
6729 else if (operand_equal_p (arg00, arg10, 0))
6730 same = arg00, alt0 = arg01, alt1 = arg11;
6731 else if (operand_equal_p (arg00, arg11, 0))
6732 same = arg00, alt0 = arg01, alt1 = arg10;
6733 else if (operand_equal_p (arg01, arg10, 0))
6734 same = arg01, alt0 = arg00, alt1 = arg11;
6736 /* No identical multiplicands; see if we can find a common
6737 power-of-two factor in non-power-of-two multiplies. This
6738 can help in multi-dimensional array access. */
6739 else if (host_integerp (arg01, 0)
6740 && host_integerp (arg11, 0))
6742 HOST_WIDE_INT int01, int11, tmp;
6745 int01 = TREE_INT_CST_LOW (arg01);
6746 int11 = TREE_INT_CST_LOW (arg11);
6748 /* Move min of absolute values to int11. */
6749 if ((int01 >= 0 ? int01 : -int01)
6750 < (int11 >= 0 ? int11 : -int11))
6752 tmp = int01, int01 = int11, int11 = tmp;
6753 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6760 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6762 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6763 build_int_cst (TREE_TYPE (arg00),
6768 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6773 return fold_build2 (MULT_EXPR, type,
6774 fold_build2 (code, type,
6775 fold_convert (type, alt0),
6776 fold_convert (type, alt1)),
6777 fold_convert (type, same));
6782 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6783 specified by EXPR into the buffer PTR of length LEN bytes.
6784 Return the number of bytes placed in the buffer, or zero
6788 native_encode_int (tree expr, unsigned char *ptr, int len)
6790 tree type = TREE_TYPE (expr);
6791 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6792 int byte, offset, word, words;
6793 unsigned char value;
6795 if (total_bytes > len)
6797 words = total_bytes / UNITS_PER_WORD;
6799 for (byte = 0; byte < total_bytes; byte++)
6801 int bitpos = byte * BITS_PER_UNIT;
6802 if (bitpos < HOST_BITS_PER_WIDE_INT)
6803 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6805 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6806 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6808 if (total_bytes > UNITS_PER_WORD)
6810 word = byte / UNITS_PER_WORD;
6811 if (WORDS_BIG_ENDIAN)
6812 word = (words - 1) - word;
6813 offset = word * UNITS_PER_WORD;
6814 if (BYTES_BIG_ENDIAN)
6815 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6817 offset += byte % UNITS_PER_WORD;
6820 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6821 ptr[offset] = value;
6827 /* Subroutine of native_encode_expr. Encode the REAL_CST
6828 specified by EXPR into the buffer PTR of length LEN bytes.
6829 Return the number of bytes placed in the buffer, or zero
6833 native_encode_real (tree expr, unsigned char *ptr, int len)
6835 tree type = TREE_TYPE (expr);
6836 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6837 int byte, offset, word, words;
6838 unsigned char value;
6840 /* There are always 32 bits in each long, no matter the size of
6841 the hosts long. We handle floating point representations with
6845 if (total_bytes > len)
6847 words = total_bytes / UNITS_PER_WORD;
6849 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6851 for (byte = 0; byte < total_bytes; byte++)
6853 int bitpos = byte * BITS_PER_UNIT;
6854 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6856 if (total_bytes > UNITS_PER_WORD)
6858 word = byte / UNITS_PER_WORD;
6859 if (FLOAT_WORDS_BIG_ENDIAN)
6860 word = (words - 1) - word;
6861 offset = word * UNITS_PER_WORD;
6862 if (BYTES_BIG_ENDIAN)
6863 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6865 offset += byte % UNITS_PER_WORD;
6868 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6869 ptr[offset] = value;
6874 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6875 specified by EXPR into the buffer PTR of length LEN bytes.
6876 Return the number of bytes placed in the buffer, or zero
6880 native_encode_complex (tree expr, unsigned char *ptr, int len)
6885 part = TREE_REALPART (expr);
6886 rsize = native_encode_expr (part, ptr, len);
6889 part = TREE_IMAGPART (expr);
6890 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6893 return rsize + isize;
6897 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6898 specified by EXPR into the buffer PTR of length LEN bytes.
6899 Return the number of bytes placed in the buffer, or zero
6903 native_encode_vector (tree expr, unsigned char *ptr, int len)
6905 int i, size, offset, count;
6906 tree elem, elements;
6910 elements = TREE_VECTOR_CST_ELTS (expr);
6911 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6912 for (i = 0; i < count; i++)
6916 elem = TREE_VALUE (elements);
6917 elements = TREE_CHAIN (elements);
6924 size = native_encode_expr (elem, ptr+offset, len-offset);
6930 if (offset + size > len)
6932 memset (ptr+offset, 0, size);
6942 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6943 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6944 buffer PTR of length LEN bytes. Return the number of bytes
6945 placed in the buffer, or zero upon failure. */
6948 native_encode_expr (tree expr, unsigned char *ptr, int len)
6950 switch (TREE_CODE (expr))
6953 return native_encode_int (expr, ptr, len);
6956 return native_encode_real (expr, ptr, len);
6959 return native_encode_complex (expr, ptr, len);
6962 return native_encode_vector (expr, ptr, len);
6970 /* Subroutine of native_interpret_expr. Interpret the contents of
6971 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6972 If the buffer cannot be interpreted, return NULL_TREE. */
6975 native_interpret_int (tree type, unsigned char *ptr, int len)
6977 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6978 int byte, offset, word, words;
6979 unsigned char value;
6980 unsigned int HOST_WIDE_INT lo = 0;
6981 HOST_WIDE_INT hi = 0;
6983 if (total_bytes > len)
6985 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6987 words = total_bytes / UNITS_PER_WORD;
6989 for (byte = 0; byte < total_bytes; byte++)
6991 int bitpos = byte * BITS_PER_UNIT;
6992 if (total_bytes > UNITS_PER_WORD)
6994 word = byte / UNITS_PER_WORD;
6995 if (WORDS_BIG_ENDIAN)
6996 word = (words - 1) - word;
6997 offset = word * UNITS_PER_WORD;
6998 if (BYTES_BIG_ENDIAN)
6999 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7001 offset += byte % UNITS_PER_WORD;
7004 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7005 value = ptr[offset];
7007 if (bitpos < HOST_BITS_PER_WIDE_INT)
7008 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7010 hi |= (unsigned HOST_WIDE_INT) value
7011 << (bitpos - HOST_BITS_PER_WIDE_INT);
7014 return force_fit_type (build_int_cst_wide (type, lo, hi),
7019 /* Subroutine of native_interpret_expr. Interpret the contents of
7020 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7021 If the buffer cannot be interpreted, return NULL_TREE. */
7024 native_interpret_real (tree type, unsigned char *ptr, int len)
7026 enum machine_mode mode = TYPE_MODE (type);
7027 int total_bytes = GET_MODE_SIZE (mode);
7028 int byte, offset, word, words;
7029 unsigned char value;
7030 /* There are always 32 bits in each long, no matter the size of
7031 the hosts long. We handle floating point representations with
7036 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7037 if (total_bytes > len || total_bytes > 24)
7039 words = total_bytes / UNITS_PER_WORD;
7041 memset (tmp, 0, sizeof (tmp));
7042 for (byte = 0; byte < total_bytes; byte++)
7044 int bitpos = byte * BITS_PER_UNIT;
7045 if (total_bytes > UNITS_PER_WORD)
7047 word = byte / UNITS_PER_WORD;
7048 if (FLOAT_WORDS_BIG_ENDIAN)
7049 word = (words - 1) - word;
7050 offset = word * UNITS_PER_WORD;
7051 if (BYTES_BIG_ENDIAN)
7052 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7054 offset += byte % UNITS_PER_WORD;
7057 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7058 value = ptr[offset];
7060 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7063 real_from_target (&r, tmp, mode);
7064 return build_real (type, r);
7068 /* Subroutine of native_interpret_expr. Interpret the contents of
7069 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7070 If the buffer cannot be interpreted, return NULL_TREE. */
7073 native_interpret_complex (tree type, unsigned char *ptr, int len)
7075 tree etype, rpart, ipart;
7078 etype = TREE_TYPE (type);
7079 size = GET_MODE_SIZE (TYPE_MODE (etype));
7082 rpart = native_interpret_expr (etype, ptr, size);
7085 ipart = native_interpret_expr (etype, ptr+size, size);
7088 return build_complex (type, rpart, ipart);
7092 /* Subroutine of native_interpret_expr. Interpret the contents of
7093 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7094 If the buffer cannot be interpreted, return NULL_TREE. */
7097 native_interpret_vector (tree type, unsigned char *ptr, int len)
7099 tree etype, elem, elements;
7102 etype = TREE_TYPE (type);
7103 size = GET_MODE_SIZE (TYPE_MODE (etype));
7104 count = TYPE_VECTOR_SUBPARTS (type);
7105 if (size * count > len)
7108 elements = NULL_TREE;
7109 for (i = count - 1; i >= 0; i--)
7111 elem = native_interpret_expr (etype, ptr+(i*size), size);
7114 elements = tree_cons (NULL_TREE, elem, elements);
7116 return build_vector (type, elements);
7120 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7121 the buffer PTR of length LEN as a constant of type TYPE. For
7122 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7123 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7124 return NULL_TREE. */
7127 native_interpret_expr (tree type, unsigned char *ptr, int len)
7129 switch (TREE_CODE (type))
7134 return native_interpret_int (type, ptr, len);
7137 return native_interpret_real (type, ptr, len);
7140 return native_interpret_complex (type, ptr, len);
7143 return native_interpret_vector (type, ptr, len);
7151 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7152 TYPE at compile-time. If we're unable to perform the conversion
7153 return NULL_TREE. */
7156 fold_view_convert_expr (tree type, tree expr)
7158 /* We support up to 512-bit values (for V8DFmode). */
7159 unsigned char buffer[64];
7162 /* Check that the host and target are sane. */
7163 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7166 len = native_encode_expr (expr, buffer, sizeof (buffer));
7170 return native_interpret_expr (type, buffer, len);
7174 /* Fold a unary expression of code CODE and type TYPE with operand
7175 OP0. Return the folded expression if folding is successful.
7176 Otherwise, return NULL_TREE. */
7179 fold_unary (enum tree_code code, tree type, tree op0)
7183 enum tree_code_class kind = TREE_CODE_CLASS (code);
7185 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7186 && TREE_CODE_LENGTH (code) == 1);
7191 if (code == NOP_EXPR || code == CONVERT_EXPR
7192 || code == FLOAT_EXPR || code == ABS_EXPR)
7194 /* Don't use STRIP_NOPS, because signedness of argument type
7196 STRIP_SIGN_NOPS (arg0);
7200 /* Strip any conversions that don't change the mode. This
7201 is safe for every expression, except for a comparison
7202 expression because its signedness is derived from its
7205 Note that this is done as an internal manipulation within
7206 the constant folder, in order to find the simplest
7207 representation of the arguments so that their form can be
7208 studied. In any cases, the appropriate type conversions
7209 should be put back in the tree that will get out of the
7215 if (TREE_CODE_CLASS (code) == tcc_unary)
7217 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7218 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7219 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7220 else if (TREE_CODE (arg0) == COND_EXPR)
7222 tree arg01 = TREE_OPERAND (arg0, 1);
7223 tree arg02 = TREE_OPERAND (arg0, 2);
7224 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7225 arg01 = fold_build1 (code, type, arg01);
7226 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7227 arg02 = fold_build1 (code, type, arg02);
7228 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7231 /* If this was a conversion, and all we did was to move into
7232 inside the COND_EXPR, bring it back out. But leave it if
7233 it is a conversion from integer to integer and the
7234 result precision is no wider than a word since such a
7235 conversion is cheap and may be optimized away by combine,
7236 while it couldn't if it were outside the COND_EXPR. Then return
7237 so we don't get into an infinite recursion loop taking the
7238 conversion out and then back in. */
7240 if ((code == NOP_EXPR || code == CONVERT_EXPR
7241 || code == NON_LVALUE_EXPR)
7242 && TREE_CODE (tem) == COND_EXPR
7243 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7244 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7245 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7246 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7247 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7248 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7249 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7251 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7252 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7253 || flag_syntax_only))
7254 tem = build1 (code, type,
7256 TREE_TYPE (TREE_OPERAND
7257 (TREE_OPERAND (tem, 1), 0)),
7258 TREE_OPERAND (tem, 0),
7259 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7260 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7263 else if (COMPARISON_CLASS_P (arg0))
7265 if (TREE_CODE (type) == BOOLEAN_TYPE)
7267 arg0 = copy_node (arg0);
7268 TREE_TYPE (arg0) = type;
7271 else if (TREE_CODE (type) != INTEGER_TYPE)
7272 return fold_build3 (COND_EXPR, type, arg0,
7273 fold_build1 (code, type,
7275 fold_build1 (code, type,
7276 integer_zero_node));
7285 case FIX_TRUNC_EXPR:
7287 case FIX_FLOOR_EXPR:
7288 case FIX_ROUND_EXPR:
7289 if (TREE_TYPE (op0) == type)
7292 /* If we have (type) (a CMP b) and type is an integral type, return
7293 new expression involving the new type. */
7294 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7295 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7296 TREE_OPERAND (op0, 1));
7298 /* Handle cases of two conversions in a row. */
7299 if (TREE_CODE (op0) == NOP_EXPR
7300 || TREE_CODE (op0) == CONVERT_EXPR)
7302 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7303 tree inter_type = TREE_TYPE (op0);
7304 int inside_int = INTEGRAL_TYPE_P (inside_type);
7305 int inside_ptr = POINTER_TYPE_P (inside_type);
7306 int inside_float = FLOAT_TYPE_P (inside_type);
7307 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7308 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7309 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7310 int inter_int = INTEGRAL_TYPE_P (inter_type);
7311 int inter_ptr = POINTER_TYPE_P (inter_type);
7312 int inter_float = FLOAT_TYPE_P (inter_type);
7313 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7314 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7315 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7316 int final_int = INTEGRAL_TYPE_P (type);
7317 int final_ptr = POINTER_TYPE_P (type);
7318 int final_float = FLOAT_TYPE_P (type);
7319 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7320 unsigned int final_prec = TYPE_PRECISION (type);
7321 int final_unsignedp = TYPE_UNSIGNED (type);
7323 /* In addition to the cases of two conversions in a row
7324 handled below, if we are converting something to its own
7325 type via an object of identical or wider precision, neither
7326 conversion is needed. */
7327 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7328 && (((inter_int || inter_ptr) && final_int)
7329 || (inter_float && final_float))
7330 && inter_prec >= final_prec)
7331 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7333 /* Likewise, if the intermediate and final types are either both
7334 float or both integer, we don't need the middle conversion if
7335 it is wider than the final type and doesn't change the signedness
7336 (for integers). Avoid this if the final type is a pointer
7337 since then we sometimes need the inner conversion. Likewise if
7338 the outer has a precision not equal to the size of its mode. */
7339 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7340 || (inter_float && inside_float)
7341 || (inter_vec && inside_vec))
7342 && inter_prec >= inside_prec
7343 && (inter_float || inter_vec
7344 || inter_unsignedp == inside_unsignedp)
7345 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7346 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7348 && (! final_vec || inter_prec == inside_prec))
7349 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7351 /* If we have a sign-extension of a zero-extended value, we can
7352 replace that by a single zero-extension. */
7353 if (inside_int && inter_int && final_int
7354 && inside_prec < inter_prec && inter_prec < final_prec
7355 && inside_unsignedp && !inter_unsignedp)
7356 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7358 /* Two conversions in a row are not needed unless:
7359 - some conversion is floating-point (overstrict for now), or
7360 - some conversion is a vector (overstrict for now), or
7361 - the intermediate type is narrower than both initial and
7363 - the intermediate type and innermost type differ in signedness,
7364 and the outermost type is wider than the intermediate, or
7365 - the initial type is a pointer type and the precisions of the
7366 intermediate and final types differ, or
7367 - the final type is a pointer type and the precisions of the
7368 initial and intermediate types differ.
7369 - the final type is a pointer type and the initial type not
7370 - the initial type is a pointer to an array and the final type
7372 if (! inside_float && ! inter_float && ! final_float
7373 && ! inside_vec && ! inter_vec && ! final_vec
7374 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7375 && ! (inside_int && inter_int
7376 && inter_unsignedp != inside_unsignedp
7377 && inter_prec < final_prec)
7378 && ((inter_unsignedp && inter_prec > inside_prec)
7379 == (final_unsignedp && final_prec > inter_prec))
7380 && ! (inside_ptr && inter_prec != final_prec)
7381 && ! (final_ptr && inside_prec != inter_prec)
7382 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7383 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7384 && final_ptr == inside_ptr
7386 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7387 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7388 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7391 /* Handle (T *)&A.B.C for A being of type T and B and C
7392 living at offset zero. This occurs frequently in
7393 C++ upcasting and then accessing the base. */
7394 if (TREE_CODE (op0) == ADDR_EXPR
7395 && POINTER_TYPE_P (type)
7396 && handled_component_p (TREE_OPERAND (op0, 0)))
7398 HOST_WIDE_INT bitsize, bitpos;
7400 enum machine_mode mode;
7401 int unsignedp, volatilep;
7402 tree base = TREE_OPERAND (op0, 0);
7403 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7404 &mode, &unsignedp, &volatilep, false);
7405 /* If the reference was to a (constant) zero offset, we can use
7406 the address of the base if it has the same base type
7407 as the result type. */
7408 if (! offset && bitpos == 0
7409 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7410 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7411 return fold_convert (type, build_fold_addr_expr (base));
7414 if (TREE_CODE (op0) == MODIFY_EXPR
7415 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7416 /* Detect assigning a bitfield. */
7417 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7418 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7420 /* Don't leave an assignment inside a conversion
7421 unless assigning a bitfield. */
7422 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7423 /* First do the assignment, then return converted constant. */
7424 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7425 TREE_NO_WARNING (tem) = 1;
7426 TREE_USED (tem) = 1;
7430 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7431 constants (if x has signed type, the sign bit cannot be set
7432 in c). This folds extension into the BIT_AND_EXPR. */
7433 if (INTEGRAL_TYPE_P (type)
7434 && TREE_CODE (type) != BOOLEAN_TYPE
7435 && TREE_CODE (op0) == BIT_AND_EXPR
7436 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7439 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7442 if (TYPE_UNSIGNED (TREE_TYPE (and))
7443 || (TYPE_PRECISION (type)
7444 <= TYPE_PRECISION (TREE_TYPE (and))))
7446 else if (TYPE_PRECISION (TREE_TYPE (and1))
7447 <= HOST_BITS_PER_WIDE_INT
7448 && host_integerp (and1, 1))
7450 unsigned HOST_WIDE_INT cst;
7452 cst = tree_low_cst (and1, 1);
7453 cst &= (HOST_WIDE_INT) -1
7454 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7455 change = (cst == 0);
7456 #ifdef LOAD_EXTEND_OP
7458 && !flag_syntax_only
7459 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7462 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7463 and0 = fold_convert (uns, and0);
7464 and1 = fold_convert (uns, and1);
7470 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7471 TREE_INT_CST_HIGH (and1));
7472 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7473 TREE_CONSTANT_OVERFLOW (and1));
7474 return fold_build2 (BIT_AND_EXPR, type,
7475 fold_convert (type, and0), tem);
7479 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7480 T2 being pointers to types of the same size. */
7481 if (POINTER_TYPE_P (type)
7482 && BINARY_CLASS_P (arg0)
7483 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7484 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7486 tree arg00 = TREE_OPERAND (arg0, 0);
7488 tree t1 = TREE_TYPE (arg00);
7489 tree tt0 = TREE_TYPE (t0);
7490 tree tt1 = TREE_TYPE (t1);
7491 tree s0 = TYPE_SIZE (tt0);
7492 tree s1 = TYPE_SIZE (tt1);
7494 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7495 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7496 TREE_OPERAND (arg0, 1));
7499 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7500 of the same precision, and X is a integer type not narrower than
7501 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7502 if (INTEGRAL_TYPE_P (type)
7503 && TREE_CODE (op0) == BIT_NOT_EXPR
7504 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7505 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7506 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7507 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7509 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7510 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7511 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7512 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7515 tem = fold_convert_const (code, type, arg0);
7516 return tem ? tem : NULL_TREE;
7518 case VIEW_CONVERT_EXPR:
7519 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7520 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7521 return fold_view_convert_expr (type, op0);
7524 if (negate_expr_p (arg0))
7525 return fold_convert (type, negate_expr (arg0));
7529 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7530 return fold_abs_const (arg0, type);
7531 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7532 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7533 /* Convert fabs((double)float) into (double)fabsf(float). */
7534 else if (TREE_CODE (arg0) == NOP_EXPR
7535 && TREE_CODE (type) == REAL_TYPE)
7537 tree targ0 = strip_float_extensions (arg0);
7539 return fold_convert (type, fold_build1 (ABS_EXPR,
7543 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7544 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7547 /* Strip sign ops from argument. */
7548 if (TREE_CODE (type) == REAL_TYPE)
7550 tem = fold_strip_sign_ops (arg0);
7552 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7557 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7558 return fold_convert (type, arg0);
7559 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7561 tree itype = TREE_TYPE (type);
7562 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7563 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7564 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7566 if (TREE_CODE (arg0) == COMPLEX_CST)
7568 tree itype = TREE_TYPE (type);
7569 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7570 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7571 return build_complex (type, rpart, negate_expr (ipart));
7573 if (TREE_CODE (arg0) == CONJ_EXPR)
7574 return fold_convert (type, TREE_OPERAND (arg0, 0));
7578 if (TREE_CODE (arg0) == INTEGER_CST)
7579 return fold_not_const (arg0, type);
7580 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7581 return TREE_OPERAND (arg0, 0);
7582 /* Convert ~ (-A) to A - 1. */
7583 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7584 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7585 build_int_cst (type, 1));
7586 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7587 else if (INTEGRAL_TYPE_P (type)
7588 && ((TREE_CODE (arg0) == MINUS_EXPR
7589 && integer_onep (TREE_OPERAND (arg0, 1)))
7590 || (TREE_CODE (arg0) == PLUS_EXPR
7591 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7592 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7593 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7594 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7595 && (tem = fold_unary (BIT_NOT_EXPR, type,
7597 TREE_OPERAND (arg0, 0)))))
7598 return fold_build2 (BIT_XOR_EXPR, type, tem,
7599 fold_convert (type, TREE_OPERAND (arg0, 1)));
7600 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7601 && (tem = fold_unary (BIT_NOT_EXPR, type,
7603 TREE_OPERAND (arg0, 1)))))
7604 return fold_build2 (BIT_XOR_EXPR, type,
7605 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7609 case TRUTH_NOT_EXPR:
7610 /* The argument to invert_truthvalue must have Boolean type. */
7611 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7612 arg0 = fold_convert (boolean_type_node, arg0);
7614 /* Note that the operand of this must be an int
7615 and its values must be 0 or 1.
7616 ("true" is a fixed value perhaps depending on the language,
7617 but we don't handle values other than 1 correctly yet.) */
7618 tem = invert_truthvalue (arg0);
7619 /* Avoid infinite recursion. */
7620 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7622 return fold_convert (type, tem);
7625 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7626 return fold_convert (type, arg0);
7627 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7628 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7629 TREE_OPERAND (arg0, 1));
7630 if (TREE_CODE (arg0) == COMPLEX_CST)
7631 return fold_convert (type, TREE_REALPART (arg0));
7632 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7634 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7635 tem = fold_build2 (TREE_CODE (arg0), itype,
7636 fold_build1 (REALPART_EXPR, itype,
7637 TREE_OPERAND (arg0, 0)),
7638 fold_build1 (REALPART_EXPR, itype,
7639 TREE_OPERAND (arg0, 1)));
7640 return fold_convert (type, tem);
7642 if (TREE_CODE (arg0) == CONJ_EXPR)
7644 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7645 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7646 return fold_convert (type, tem);
7651 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7652 return fold_convert (type, integer_zero_node);
7653 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7654 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7655 TREE_OPERAND (arg0, 0));
7656 if (TREE_CODE (arg0) == COMPLEX_CST)
7657 return fold_convert (type, TREE_IMAGPART (arg0));
7658 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7660 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7661 tem = fold_build2 (TREE_CODE (arg0), itype,
7662 fold_build1 (IMAGPART_EXPR, itype,
7663 TREE_OPERAND (arg0, 0)),
7664 fold_build1 (IMAGPART_EXPR, itype,
7665 TREE_OPERAND (arg0, 1)));
7666 return fold_convert (type, tem);
7668 if (TREE_CODE (arg0) == CONJ_EXPR)
7670 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7671 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7672 return fold_convert (type, negate_expr (tem));
7678 } /* switch (code) */
7681 /* Fold a binary expression of code CODE and type TYPE with operands
7682 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7683 Return the folded expression if folding is successful. Otherwise,
7684 return NULL_TREE. */
7687 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7689 enum tree_code compl_code;
7691 if (code == MIN_EXPR)
7692 compl_code = MAX_EXPR;
7693 else if (code == MAX_EXPR)
7694 compl_code = MIN_EXPR;
7698 /* MIN (MAX (a, b), b) == b. Â */
7699 if (TREE_CODE (op0) == compl_code
7700 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7701 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7703 /* MIN (MAX (b, a), b) == b. Â */
7704 if (TREE_CODE (op0) == compl_code
7705 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7706 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7707 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7709 /* MIN (a, MAX (a, b)) == a. Â */
7710 if (TREE_CODE (op1) == compl_code
7711 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7712 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7713 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7715 /* MIN (a, MAX (b, a)) == a. Â */
7716 if (TREE_CODE (op1) == compl_code
7717 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7718 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7719 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7724 /* Subroutine of fold_binary. This routine performs all of the
7725 transformations that are common to the equality/inequality
7726 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7727 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7728 fold_binary should call fold_binary. Fold a comparison with
7729 tree code CODE and type TYPE with operands OP0 and OP1. Return
7730 the folded comparison or NULL_TREE. */
7733 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7735 tree arg0, arg1, tem;
7740 STRIP_SIGN_NOPS (arg0);
7741 STRIP_SIGN_NOPS (arg1);
7743 tem = fold_relational_const (code, type, arg0, arg1);
7744 if (tem != NULL_TREE)
7747 /* If one arg is a real or integer constant, put it last. */
7748 if (tree_swap_operands_p (arg0, arg1, true))
7749 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7751 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7752 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7753 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7754 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7755 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7756 && !(flag_wrapv || flag_trapv))
7757 && (TREE_CODE (arg1) == INTEGER_CST
7758 && !TREE_OVERFLOW (arg1)))
7760 tree const1 = TREE_OPERAND (arg0, 1);
7762 tree variable = TREE_OPERAND (arg0, 0);
7765 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7767 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7768 TREE_TYPE (arg1), const2, const1);
7769 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7770 && (TREE_CODE (lhs) != INTEGER_CST
7771 || !TREE_OVERFLOW (lhs)))
7772 return fold_build2 (code, type, variable, lhs);
7775 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
7776 same object, then we can fold this to a comparison of the two offsets in
7777 signed size type. This is possible because pointer arithmetic is
7778 restricted to retain within an object and overflow on pointer differences
7779 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
7780 if (POINTER_TYPE_P (TREE_TYPE (arg0))
7781 && !flag_wrapv && !flag_trapv)
7783 tree base0, offset0, base1, offset1;
7785 if (extract_array_ref (arg0, &base0, &offset0)
7786 && extract_array_ref (arg1, &base1, &offset1)
7787 && operand_equal_p (base0, base1, 0))
7789 tree signed_size_type_node;
7790 signed_size_type_node = signed_type_for (size_type_node);
7792 /* By converting to signed size type we cover middle-end pointer
7793 arithmetic which operates on unsigned pointer types of size
7794 type size and ARRAY_REF offsets which are properly sign or
7795 zero extended from their type in case it is narrower than
7797 if (offset0 == NULL_TREE)
7798 offset0 = build_int_cst (signed_size_type_node, 0);
7800 offset0 = fold_convert (signed_size_type_node, offset0);
7801 if (offset1 == NULL_TREE)
7802 offset1 = build_int_cst (signed_size_type_node, 0);
7804 offset1 = fold_convert (signed_size_type_node, offset1);
7806 return fold_build2 (code, type, offset0, offset1);
7810 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7812 tree targ0 = strip_float_extensions (arg0);
7813 tree targ1 = strip_float_extensions (arg1);
7814 tree newtype = TREE_TYPE (targ0);
7816 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7817 newtype = TREE_TYPE (targ1);
7819 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7820 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7821 return fold_build2 (code, type, fold_convert (newtype, targ0),
7822 fold_convert (newtype, targ1));
7824 /* (-a) CMP (-b) -> b CMP a */
7825 if (TREE_CODE (arg0) == NEGATE_EXPR
7826 && TREE_CODE (arg1) == NEGATE_EXPR)
7827 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7828 TREE_OPERAND (arg0, 0));
7830 if (TREE_CODE (arg1) == REAL_CST)
7832 REAL_VALUE_TYPE cst;
7833 cst = TREE_REAL_CST (arg1);
7835 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7836 if (TREE_CODE (arg0) == NEGATE_EXPR)
7837 return fold_build2 (swap_tree_comparison (code), type,
7838 TREE_OPERAND (arg0, 0),
7839 build_real (TREE_TYPE (arg1),
7840 REAL_VALUE_NEGATE (cst)));
7842 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7843 /* a CMP (-0) -> a CMP 0 */
7844 if (REAL_VALUE_MINUS_ZERO (cst))
7845 return fold_build2 (code, type, arg0,
7846 build_real (TREE_TYPE (arg1), dconst0));
7848 /* x != NaN is always true, other ops are always false. */
7849 if (REAL_VALUE_ISNAN (cst)
7850 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7852 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7853 return omit_one_operand (type, tem, arg0);
7856 /* Fold comparisons against infinity. */
7857 if (REAL_VALUE_ISINF (cst))
7859 tem = fold_inf_compare (code, type, arg0, arg1);
7860 if (tem != NULL_TREE)
7865 /* If this is a comparison of a real constant with a PLUS_EXPR
7866 or a MINUS_EXPR of a real constant, we can convert it into a
7867 comparison with a revised real constant as long as no overflow
7868 occurs when unsafe_math_optimizations are enabled. */
7869 if (flag_unsafe_math_optimizations
7870 && TREE_CODE (arg1) == REAL_CST
7871 && (TREE_CODE (arg0) == PLUS_EXPR
7872 || TREE_CODE (arg0) == MINUS_EXPR)
7873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7874 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7875 ? MINUS_EXPR : PLUS_EXPR,
7876 arg1, TREE_OPERAND (arg0, 1), 0))
7877 && ! TREE_CONSTANT_OVERFLOW (tem))
7878 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7880 /* Likewise, we can simplify a comparison of a real constant with
7881 a MINUS_EXPR whose first operand is also a real constant, i.e.
7882 (c1 - x) < c2 becomes x > c1-c2. */
7883 if (flag_unsafe_math_optimizations
7884 && TREE_CODE (arg1) == REAL_CST
7885 && TREE_CODE (arg0) == MINUS_EXPR
7886 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7887 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7889 && ! TREE_CONSTANT_OVERFLOW (tem))
7890 return fold_build2 (swap_tree_comparison (code), type,
7891 TREE_OPERAND (arg0, 1), tem);
7893 /* Fold comparisons against built-in math functions. */
7894 if (TREE_CODE (arg1) == REAL_CST
7895 && flag_unsafe_math_optimizations
7896 && ! flag_errno_math)
7898 enum built_in_function fcode = builtin_mathfn_code (arg0);
7900 if (fcode != END_BUILTINS)
7902 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7903 if (tem != NULL_TREE)
7909 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7910 if (TREE_CONSTANT (arg1)
7911 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7912 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7913 /* This optimization is invalid for ordered comparisons
7914 if CONST+INCR overflows or if foo+incr might overflow.
7915 This optimization is invalid for floating point due to rounding.
7916 For pointer types we assume overflow doesn't happen. */
7917 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7918 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7919 && (code == EQ_EXPR || code == NE_EXPR))))
7921 tree varop, newconst;
7923 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7925 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7926 arg1, TREE_OPERAND (arg0, 1));
7927 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7928 TREE_OPERAND (arg0, 0),
7929 TREE_OPERAND (arg0, 1));
7933 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7934 arg1, TREE_OPERAND (arg0, 1));
7935 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7936 TREE_OPERAND (arg0, 0),
7937 TREE_OPERAND (arg0, 1));
7941 /* If VAROP is a reference to a bitfield, we must mask
7942 the constant by the width of the field. */
7943 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7944 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7945 && host_integerp (DECL_SIZE (TREE_OPERAND
7946 (TREE_OPERAND (varop, 0), 1)), 1))
7948 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7949 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7950 tree folded_compare, shift;
7952 /* First check whether the comparison would come out
7953 always the same. If we don't do that we would
7954 change the meaning with the masking. */
7955 folded_compare = fold_build2 (code, type,
7956 TREE_OPERAND (varop, 0), arg1);
7957 if (TREE_CODE (folded_compare) == INTEGER_CST)
7958 return omit_one_operand (type, folded_compare, varop);
7960 shift = build_int_cst (NULL_TREE,
7961 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7962 shift = fold_convert (TREE_TYPE (varop), shift);
7963 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7965 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7969 return fold_build2 (code, type, varop, newconst);
7972 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7973 && (TREE_CODE (arg0) == NOP_EXPR
7974 || TREE_CODE (arg0) == CONVERT_EXPR))
7976 /* If we are widening one operand of an integer comparison,
7977 see if the other operand is similarly being widened. Perhaps we
7978 can do the comparison in the narrower type. */
7979 tem = fold_widened_comparison (code, type, arg0, arg1);
7983 /* Or if we are changing signedness. */
7984 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7989 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7990 constant, we can simplify it. */
7991 if (TREE_CODE (arg1) == INTEGER_CST
7992 && (TREE_CODE (arg0) == MIN_EXPR
7993 || TREE_CODE (arg0) == MAX_EXPR)
7994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7996 tem = optimize_minmax_comparison (code, type, op0, op1);
8001 /* Simplify comparison of something with itself. (For IEEE
8002 floating-point, we can only do some of these simplifications.) */
8003 if (operand_equal_p (arg0, arg1, 0))
8008 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8009 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8010 return constant_boolean_node (1, type);
8015 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8016 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8017 return constant_boolean_node (1, type);
8018 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8021 /* For NE, we can only do this simplification if integer
8022 or we don't honor IEEE floating point NaNs. */
8023 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8024 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8026 /* ... fall through ... */
8029 return constant_boolean_node (0, type);
8035 /* If we are comparing an expression that just has comparisons
8036 of two integer values, arithmetic expressions of those comparisons,
8037 and constants, we can simplify it. There are only three cases
8038 to check: the two values can either be equal, the first can be
8039 greater, or the second can be greater. Fold the expression for
8040 those three values. Since each value must be 0 or 1, we have
8041 eight possibilities, each of which corresponds to the constant 0
8042 or 1 or one of the six possible comparisons.
8044 This handles common cases like (a > b) == 0 but also handles
8045 expressions like ((x > y) - (y > x)) > 0, which supposedly
8046 occur in macroized code. */
8048 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8050 tree cval1 = 0, cval2 = 0;
8053 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8054 /* Don't handle degenerate cases here; they should already
8055 have been handled anyway. */
8056 && cval1 != 0 && cval2 != 0
8057 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8058 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8059 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8060 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8061 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8062 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8063 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8065 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8066 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8068 /* We can't just pass T to eval_subst in case cval1 or cval2
8069 was the same as ARG1. */
8072 = fold_build2 (code, type,
8073 eval_subst (arg0, cval1, maxval,
8077 = fold_build2 (code, type,
8078 eval_subst (arg0, cval1, maxval,
8082 = fold_build2 (code, type,
8083 eval_subst (arg0, cval1, minval,
8087 /* All three of these results should be 0 or 1. Confirm they are.
8088 Then use those values to select the proper code to use. */
8090 if (TREE_CODE (high_result) == INTEGER_CST
8091 && TREE_CODE (equal_result) == INTEGER_CST
8092 && TREE_CODE (low_result) == INTEGER_CST)
8094 /* Make a 3-bit mask with the high-order bit being the
8095 value for `>', the next for '=', and the low for '<'. */
8096 switch ((integer_onep (high_result) * 4)
8097 + (integer_onep (equal_result) * 2)
8098 + integer_onep (low_result))
8102 return omit_one_operand (type, integer_zero_node, arg0);
8123 return omit_one_operand (type, integer_one_node, arg0);
8127 return save_expr (build2 (code, type, cval1, cval2));
8128 return fold_build2 (code, type, cval1, cval2);
8133 /* Fold a comparison of the address of COMPONENT_REFs with the same
8134 type and component to a comparison of the address of the base
8135 object. In short, &x->a OP &y->a to x OP y and
8136 &x->a OP &y.a to x OP &y */
8137 if (TREE_CODE (arg0) == ADDR_EXPR
8138 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8139 && TREE_CODE (arg1) == ADDR_EXPR
8140 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8142 tree cref0 = TREE_OPERAND (arg0, 0);
8143 tree cref1 = TREE_OPERAND (arg1, 0);
8144 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8146 tree op0 = TREE_OPERAND (cref0, 0);
8147 tree op1 = TREE_OPERAND (cref1, 0);
8148 return fold_build2 (code, type,
8149 build_fold_addr_expr (op0),
8150 build_fold_addr_expr (op1));
8154 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8155 into a single range test. */
8156 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8157 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8158 && TREE_CODE (arg1) == INTEGER_CST
8159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8160 && !integer_zerop (TREE_OPERAND (arg0, 1))
8161 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8162 && !TREE_OVERFLOW (arg1))
8164 tem = fold_div_compare (code, type, arg0, arg1);
8165 if (tem != NULL_TREE)
8173 /* Subroutine of fold_binary. Optimize complex multiplications of the
8174 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8175 argument EXPR represents the expression "z" of type TYPE. */
8178 fold_mult_zconjz (tree type, tree expr)
8180 tree itype = TREE_TYPE (type);
8181 tree rpart, ipart, tem;
8183 if (TREE_CODE (expr) == COMPLEX_EXPR)
8185 rpart = TREE_OPERAND (expr, 0);
8186 ipart = TREE_OPERAND (expr, 1);
8188 else if (TREE_CODE (expr) == COMPLEX_CST)
8190 rpart = TREE_REALPART (expr);
8191 ipart = TREE_IMAGPART (expr);
8195 expr = save_expr (expr);
8196 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8197 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8200 rpart = save_expr (rpart);
8201 ipart = save_expr (ipart);
8202 tem = fold_build2 (PLUS_EXPR, itype,
8203 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8204 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8205 return fold_build2 (COMPLEX_EXPR, type, tem,
8206 fold_convert (itype, integer_zero_node));
8210 /* Fold a binary expression of code CODE and type TYPE with operands
8211 OP0 and OP1. Return the folded expression if folding is
8212 successful. Otherwise, return NULL_TREE. */
8215 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8217 enum tree_code_class kind = TREE_CODE_CLASS (code);
8218 tree arg0, arg1, tem;
8219 tree t1 = NULL_TREE;
8221 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8222 && TREE_CODE_LENGTH (code) == 2
8224 && op1 != NULL_TREE);
8229 /* Strip any conversions that don't change the mode. This is
8230 safe for every expression, except for a comparison expression
8231 because its signedness is derived from its operands. So, in
8232 the latter case, only strip conversions that don't change the
8235 Note that this is done as an internal manipulation within the
8236 constant folder, in order to find the simplest representation
8237 of the arguments so that their form can be studied. In any
8238 cases, the appropriate type conversions should be put back in
8239 the tree that will get out of the constant folder. */
8241 if (kind == tcc_comparison)
8243 STRIP_SIGN_NOPS (arg0);
8244 STRIP_SIGN_NOPS (arg1);
8252 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8253 constant but we can't do arithmetic on them. */
8254 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8255 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8256 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8257 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8259 if (kind == tcc_binary)
8260 tem = const_binop (code, arg0, arg1, 0);
8261 else if (kind == tcc_comparison)
8262 tem = fold_relational_const (code, type, arg0, arg1);
8266 if (tem != NULL_TREE)
8268 if (TREE_TYPE (tem) != type)
8269 tem = fold_convert (type, tem);
8274 /* If this is a commutative operation, and ARG0 is a constant, move it
8275 to ARG1 to reduce the number of tests below. */
8276 if (commutative_tree_code (code)
8277 && tree_swap_operands_p (arg0, arg1, true))
8278 return fold_build2 (code, type, op1, op0);
8280 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8282 First check for cases where an arithmetic operation is applied to a
8283 compound, conditional, or comparison operation. Push the arithmetic
8284 operation inside the compound or conditional to see if any folding
8285 can then be done. Convert comparison to conditional for this purpose.
8286 The also optimizes non-constant cases that used to be done in
8289 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8290 one of the operands is a comparison and the other is a comparison, a
8291 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8292 code below would make the expression more complex. Change it to a
8293 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8294 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8296 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8297 || code == EQ_EXPR || code == NE_EXPR)
8298 && ((truth_value_p (TREE_CODE (arg0))
8299 && (truth_value_p (TREE_CODE (arg1))
8300 || (TREE_CODE (arg1) == BIT_AND_EXPR
8301 && integer_onep (TREE_OPERAND (arg1, 1)))))
8302 || (truth_value_p (TREE_CODE (arg1))
8303 && (truth_value_p (TREE_CODE (arg0))
8304 || (TREE_CODE (arg0) == BIT_AND_EXPR
8305 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8307 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8308 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8311 fold_convert (boolean_type_node, arg0),
8312 fold_convert (boolean_type_node, arg1));
8314 if (code == EQ_EXPR)
8315 tem = invert_truthvalue (tem);
8317 return fold_convert (type, tem);
8320 if (TREE_CODE_CLASS (code) == tcc_binary
8321 || TREE_CODE_CLASS (code) == tcc_comparison)
8323 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8324 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8325 fold_build2 (code, type,
8326 TREE_OPERAND (arg0, 1), op1));
8327 if (TREE_CODE (arg1) == COMPOUND_EXPR
8328 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8329 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8330 fold_build2 (code, type,
8331 op0, TREE_OPERAND (arg1, 1)));
8333 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8335 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8337 /*cond_first_p=*/1);
8338 if (tem != NULL_TREE)
8342 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8344 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8346 /*cond_first_p=*/0);
8347 if (tem != NULL_TREE)
8355 /* A + (-B) -> A - B */
8356 if (TREE_CODE (arg1) == NEGATE_EXPR)
8357 return fold_build2 (MINUS_EXPR, type,
8358 fold_convert (type, arg0),
8359 fold_convert (type, TREE_OPERAND (arg1, 0)));
8360 /* (-A) + B -> B - A */
8361 if (TREE_CODE (arg0) == NEGATE_EXPR
8362 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8363 return fold_build2 (MINUS_EXPR, type,
8364 fold_convert (type, arg1),
8365 fold_convert (type, TREE_OPERAND (arg0, 0)));
8366 /* Convert ~A + 1 to -A. */
8367 if (INTEGRAL_TYPE_P (type)
8368 && TREE_CODE (arg0) == BIT_NOT_EXPR
8369 && integer_onep (arg1))
8370 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8372 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8374 if ((TREE_CODE (arg0) == MULT_EXPR
8375 || TREE_CODE (arg1) == MULT_EXPR)
8376 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8378 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8383 if (! FLOAT_TYPE_P (type))
8385 if (integer_zerop (arg1))
8386 return non_lvalue (fold_convert (type, arg0));
8388 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8389 with a constant, and the two constants have no bits in common,
8390 we should treat this as a BIT_IOR_EXPR since this may produce more
8392 if (TREE_CODE (arg0) == BIT_AND_EXPR
8393 && TREE_CODE (arg1) == BIT_AND_EXPR
8394 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8395 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8396 && integer_zerop (const_binop (BIT_AND_EXPR,
8397 TREE_OPERAND (arg0, 1),
8398 TREE_OPERAND (arg1, 1), 0)))
8400 code = BIT_IOR_EXPR;
8404 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8405 (plus (plus (mult) (mult)) (foo)) so that we can
8406 take advantage of the factoring cases below. */
8407 if (((TREE_CODE (arg0) == PLUS_EXPR
8408 || TREE_CODE (arg0) == MINUS_EXPR)
8409 && TREE_CODE (arg1) == MULT_EXPR)
8410 || ((TREE_CODE (arg1) == PLUS_EXPR
8411 || TREE_CODE (arg1) == MINUS_EXPR)
8412 && TREE_CODE (arg0) == MULT_EXPR))
8414 tree parg0, parg1, parg, marg;
8415 enum tree_code pcode;
8417 if (TREE_CODE (arg1) == MULT_EXPR)
8418 parg = arg0, marg = arg1;
8420 parg = arg1, marg = arg0;
8421 pcode = TREE_CODE (parg);
8422 parg0 = TREE_OPERAND (parg, 0);
8423 parg1 = TREE_OPERAND (parg, 1);
8427 if (TREE_CODE (parg0) == MULT_EXPR
8428 && TREE_CODE (parg1) != MULT_EXPR)
8429 return fold_build2 (pcode, type,
8430 fold_build2 (PLUS_EXPR, type,
8431 fold_convert (type, parg0),
8432 fold_convert (type, marg)),
8433 fold_convert (type, parg1));
8434 if (TREE_CODE (parg0) != MULT_EXPR
8435 && TREE_CODE (parg1) == MULT_EXPR)
8436 return fold_build2 (PLUS_EXPR, type,
8437 fold_convert (type, parg0),
8438 fold_build2 (pcode, type,
8439 fold_convert (type, marg),
8444 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8445 of the array. Loop optimizer sometimes produce this type of
8447 if (TREE_CODE (arg0) == ADDR_EXPR)
8449 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8451 return fold_convert (type, tem);
8453 else if (TREE_CODE (arg1) == ADDR_EXPR)
8455 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8457 return fold_convert (type, tem);
8462 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8463 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8464 return non_lvalue (fold_convert (type, arg0));
8466 /* Likewise if the operands are reversed. */
8467 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8468 return non_lvalue (fold_convert (type, arg1));
8470 /* Convert X + -C into X - C. */
8471 if (TREE_CODE (arg1) == REAL_CST
8472 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8474 tem = fold_negate_const (arg1, type);
8475 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8476 return fold_build2 (MINUS_EXPR, type,
8477 fold_convert (type, arg0),
8478 fold_convert (type, tem));
8481 if (flag_unsafe_math_optimizations
8482 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8483 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8484 && (tem = distribute_real_division (code, type, arg0, arg1)))
8487 /* Convert x+x into x*2.0. */
8488 if (operand_equal_p (arg0, arg1, 0)
8489 && SCALAR_FLOAT_TYPE_P (type))
8490 return fold_build2 (MULT_EXPR, type, arg0,
8491 build_real (type, dconst2));
8493 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8494 if (flag_unsafe_math_optimizations
8495 && TREE_CODE (arg1) == PLUS_EXPR
8496 && TREE_CODE (arg0) != MULT_EXPR)
8498 tree tree10 = TREE_OPERAND (arg1, 0);
8499 tree tree11 = TREE_OPERAND (arg1, 1);
8500 if (TREE_CODE (tree11) == MULT_EXPR
8501 && TREE_CODE (tree10) == MULT_EXPR)
8504 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8505 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8508 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8509 if (flag_unsafe_math_optimizations
8510 && TREE_CODE (arg0) == PLUS_EXPR
8511 && TREE_CODE (arg1) != MULT_EXPR)
8513 tree tree00 = TREE_OPERAND (arg0, 0);
8514 tree tree01 = TREE_OPERAND (arg0, 1);
8515 if (TREE_CODE (tree01) == MULT_EXPR
8516 && TREE_CODE (tree00) == MULT_EXPR)
8519 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8520 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8526 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8527 is a rotate of A by C1 bits. */
8528 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8529 is a rotate of A by B bits. */
8531 enum tree_code code0, code1;
8532 code0 = TREE_CODE (arg0);
8533 code1 = TREE_CODE (arg1);
8534 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8535 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8536 && operand_equal_p (TREE_OPERAND (arg0, 0),
8537 TREE_OPERAND (arg1, 0), 0)
8538 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8540 tree tree01, tree11;
8541 enum tree_code code01, code11;
8543 tree01 = TREE_OPERAND (arg0, 1);
8544 tree11 = TREE_OPERAND (arg1, 1);
8545 STRIP_NOPS (tree01);
8546 STRIP_NOPS (tree11);
8547 code01 = TREE_CODE (tree01);
8548 code11 = TREE_CODE (tree11);
8549 if (code01 == INTEGER_CST
8550 && code11 == INTEGER_CST
8551 && TREE_INT_CST_HIGH (tree01) == 0
8552 && TREE_INT_CST_HIGH (tree11) == 0
8553 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8554 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8555 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8556 code0 == LSHIFT_EXPR ? tree01 : tree11);
8557 else if (code11 == MINUS_EXPR)
8559 tree tree110, tree111;
8560 tree110 = TREE_OPERAND (tree11, 0);
8561 tree111 = TREE_OPERAND (tree11, 1);
8562 STRIP_NOPS (tree110);
8563 STRIP_NOPS (tree111);
8564 if (TREE_CODE (tree110) == INTEGER_CST
8565 && 0 == compare_tree_int (tree110,
8567 (TREE_TYPE (TREE_OPERAND
8569 && operand_equal_p (tree01, tree111, 0))
8570 return build2 ((code0 == LSHIFT_EXPR
8573 type, TREE_OPERAND (arg0, 0), tree01);
8575 else if (code01 == MINUS_EXPR)
8577 tree tree010, tree011;
8578 tree010 = TREE_OPERAND (tree01, 0);
8579 tree011 = TREE_OPERAND (tree01, 1);
8580 STRIP_NOPS (tree010);
8581 STRIP_NOPS (tree011);
8582 if (TREE_CODE (tree010) == INTEGER_CST
8583 && 0 == compare_tree_int (tree010,
8585 (TREE_TYPE (TREE_OPERAND
8587 && operand_equal_p (tree11, tree011, 0))
8588 return build2 ((code0 != LSHIFT_EXPR
8591 type, TREE_OPERAND (arg0, 0), tree11);
8597 /* In most languages, can't associate operations on floats through
8598 parentheses. Rather than remember where the parentheses were, we
8599 don't associate floats at all, unless the user has specified
8600 -funsafe-math-optimizations. */
8602 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8604 tree var0, con0, lit0, minus_lit0;
8605 tree var1, con1, lit1, minus_lit1;
8607 /* Split both trees into variables, constants, and literals. Then
8608 associate each group together, the constants with literals,
8609 then the result with variables. This increases the chances of
8610 literals being recombined later and of generating relocatable
8611 expressions for the sum of a constant and literal. */
8612 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8613 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8614 code == MINUS_EXPR);
8616 /* Only do something if we found more than two objects. Otherwise,
8617 nothing has changed and we risk infinite recursion. */
8618 if (2 < ((var0 != 0) + (var1 != 0)
8619 + (con0 != 0) + (con1 != 0)
8620 + (lit0 != 0) + (lit1 != 0)
8621 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8623 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8624 if (code == MINUS_EXPR)
8627 var0 = associate_trees (var0, var1, code, type);
8628 con0 = associate_trees (con0, con1, code, type);
8629 lit0 = associate_trees (lit0, lit1, code, type);
8630 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8632 /* Preserve the MINUS_EXPR if the negative part of the literal is
8633 greater than the positive part. Otherwise, the multiplicative
8634 folding code (i.e extract_muldiv) may be fooled in case
8635 unsigned constants are subtracted, like in the following
8636 example: ((X*2 + 4) - 8U)/2. */
8637 if (minus_lit0 && lit0)
8639 if (TREE_CODE (lit0) == INTEGER_CST
8640 && TREE_CODE (minus_lit0) == INTEGER_CST
8641 && tree_int_cst_lt (lit0, minus_lit0))
8643 minus_lit0 = associate_trees (minus_lit0, lit0,
8649 lit0 = associate_trees (lit0, minus_lit0,
8657 return fold_convert (type,
8658 associate_trees (var0, minus_lit0,
8662 con0 = associate_trees (con0, minus_lit0,
8664 return fold_convert (type,
8665 associate_trees (var0, con0,
8670 con0 = associate_trees (con0, lit0, code, type);
8671 return fold_convert (type, associate_trees (var0, con0,
8679 /* A - (-B) -> A + B */
8680 if (TREE_CODE (arg1) == NEGATE_EXPR)
8681 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8682 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8683 if (TREE_CODE (arg0) == NEGATE_EXPR
8684 && (FLOAT_TYPE_P (type)
8685 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8686 && negate_expr_p (arg1)
8687 && reorder_operands_p (arg0, arg1))
8688 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8689 TREE_OPERAND (arg0, 0));
8690 /* Convert -A - 1 to ~A. */
8691 if (INTEGRAL_TYPE_P (type)
8692 && TREE_CODE (arg0) == NEGATE_EXPR
8693 && integer_onep (arg1))
8694 return fold_build1 (BIT_NOT_EXPR, type,
8695 fold_convert (type, TREE_OPERAND (arg0, 0)));
8697 /* Convert -1 - A to ~A. */
8698 if (INTEGRAL_TYPE_P (type)
8699 && integer_all_onesp (arg0))
8700 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8702 if (! FLOAT_TYPE_P (type))
8704 if (integer_zerop (arg0))
8705 return negate_expr (fold_convert (type, arg1));
8706 if (integer_zerop (arg1))
8707 return non_lvalue (fold_convert (type, arg0));
8709 /* Fold A - (A & B) into ~B & A. */
8710 if (!TREE_SIDE_EFFECTS (arg0)
8711 && TREE_CODE (arg1) == BIT_AND_EXPR)
8713 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8714 return fold_build2 (BIT_AND_EXPR, type,
8715 fold_build1 (BIT_NOT_EXPR, type,
8716 TREE_OPERAND (arg1, 0)),
8718 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8719 return fold_build2 (BIT_AND_EXPR, type,
8720 fold_build1 (BIT_NOT_EXPR, type,
8721 TREE_OPERAND (arg1, 1)),
8725 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8726 any power of 2 minus 1. */
8727 if (TREE_CODE (arg0) == BIT_AND_EXPR
8728 && TREE_CODE (arg1) == BIT_AND_EXPR
8729 && operand_equal_p (TREE_OPERAND (arg0, 0),
8730 TREE_OPERAND (arg1, 0), 0))
8732 tree mask0 = TREE_OPERAND (arg0, 1);
8733 tree mask1 = TREE_OPERAND (arg1, 1);
8734 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8736 if (operand_equal_p (tem, mask1, 0))
8738 tem = fold_build2 (BIT_XOR_EXPR, type,
8739 TREE_OPERAND (arg0, 0), mask1);
8740 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8745 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8746 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8747 return non_lvalue (fold_convert (type, arg0));
8749 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8750 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8751 (-ARG1 + ARG0) reduces to -ARG1. */
8752 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8753 return negate_expr (fold_convert (type, arg1));
8755 /* Fold &x - &x. This can happen from &x.foo - &x.
8756 This is unsafe for certain floats even in non-IEEE formats.
8757 In IEEE, it is unsafe because it does wrong for NaNs.
8758 Also note that operand_equal_p is always false if an operand
8761 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8762 && operand_equal_p (arg0, arg1, 0))
8763 return fold_convert (type, integer_zero_node);
8765 /* A - B -> A + (-B) if B is easily negatable. */
8766 if (negate_expr_p (arg1)
8767 && ((FLOAT_TYPE_P (type)
8768 /* Avoid this transformation if B is a positive REAL_CST. */
8769 && (TREE_CODE (arg1) != REAL_CST
8770 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8771 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8772 return fold_build2 (PLUS_EXPR, type,
8773 fold_convert (type, arg0),
8774 fold_convert (type, negate_expr (arg1)));
8776 /* Try folding difference of addresses. */
8780 if ((TREE_CODE (arg0) == ADDR_EXPR
8781 || TREE_CODE (arg1) == ADDR_EXPR)
8782 && ptr_difference_const (arg0, arg1, &diff))
8783 return build_int_cst_type (type, diff);
8786 /* Fold &a[i] - &a[j] to i-j. */
8787 if (TREE_CODE (arg0) == ADDR_EXPR
8788 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8789 && TREE_CODE (arg1) == ADDR_EXPR
8790 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8792 tree aref0 = TREE_OPERAND (arg0, 0);
8793 tree aref1 = TREE_OPERAND (arg1, 0);
8794 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8795 TREE_OPERAND (aref1, 0), 0))
8797 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8798 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8799 tree esz = array_ref_element_size (aref0);
8800 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8801 return fold_build2 (MULT_EXPR, type, diff,
8802 fold_convert (type, esz));
8807 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8808 of the array. Loop optimizer sometimes produce this type of
8810 if (TREE_CODE (arg0) == ADDR_EXPR)
8812 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8814 return fold_convert (type, tem);
8817 if (flag_unsafe_math_optimizations
8818 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8819 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8820 && (tem = distribute_real_division (code, type, arg0, arg1)))
8823 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8825 if ((TREE_CODE (arg0) == MULT_EXPR
8826 || TREE_CODE (arg1) == MULT_EXPR)
8827 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8829 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8837 /* (-A) * (-B) -> A * B */
8838 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8839 return fold_build2 (MULT_EXPR, type,
8840 TREE_OPERAND (arg0, 0),
8841 negate_expr (arg1));
8842 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8843 return fold_build2 (MULT_EXPR, type,
8845 TREE_OPERAND (arg1, 0));
8847 if (! FLOAT_TYPE_P (type))
8849 if (integer_zerop (arg1))
8850 return omit_one_operand (type, arg1, arg0);
8851 if (integer_onep (arg1))
8852 return non_lvalue (fold_convert (type, arg0));
8853 /* Transform x * -1 into -x. */
8854 if (integer_all_onesp (arg1))
8855 return fold_convert (type, negate_expr (arg0));
8857 /* (a * (1 << b)) is (a << b) */
8858 if (TREE_CODE (arg1) == LSHIFT_EXPR
8859 && integer_onep (TREE_OPERAND (arg1, 0)))
8860 return fold_build2 (LSHIFT_EXPR, type, arg0,
8861 TREE_OPERAND (arg1, 1));
8862 if (TREE_CODE (arg0) == LSHIFT_EXPR
8863 && integer_onep (TREE_OPERAND (arg0, 0)))
8864 return fold_build2 (LSHIFT_EXPR, type, arg1,
8865 TREE_OPERAND (arg0, 1));
8867 if (TREE_CODE (arg1) == INTEGER_CST
8868 && 0 != (tem = extract_muldiv (op0,
8869 fold_convert (type, arg1),
8871 return fold_convert (type, tem);
8873 /* Optimize z * conj(z) for integer complex numbers. */
8874 if (TREE_CODE (arg0) == CONJ_EXPR
8875 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8876 return fold_mult_zconjz (type, arg1);
8877 if (TREE_CODE (arg1) == CONJ_EXPR
8878 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8879 return fold_mult_zconjz (type, arg0);
8883 /* Maybe fold x * 0 to 0. The expressions aren't the same
8884 when x is NaN, since x * 0 is also NaN. Nor are they the
8885 same in modes with signed zeros, since multiplying a
8886 negative value by 0 gives -0, not +0. */
8887 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8888 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8889 && real_zerop (arg1))
8890 return omit_one_operand (type, arg1, arg0);
8891 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8892 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8893 && real_onep (arg1))
8894 return non_lvalue (fold_convert (type, arg0));
8896 /* Transform x * -1.0 into -x. */
8897 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8898 && real_minus_onep (arg1))
8899 return fold_convert (type, negate_expr (arg0));
8901 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8902 if (flag_unsafe_math_optimizations
8903 && TREE_CODE (arg0) == RDIV_EXPR
8904 && TREE_CODE (arg1) == REAL_CST
8905 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8907 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8910 return fold_build2 (RDIV_EXPR, type, tem,
8911 TREE_OPERAND (arg0, 1));
8914 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8915 if (operand_equal_p (arg0, arg1, 0))
8917 tree tem = fold_strip_sign_ops (arg0);
8918 if (tem != NULL_TREE)
8920 tem = fold_convert (type, tem);
8921 return fold_build2 (MULT_EXPR, type, tem, tem);
8925 /* Optimize z * conj(z) for floating point complex numbers.
8926 Guarded by flag_unsafe_math_optimizations as non-finite
8927 imaginary components don't produce scalar results. */
8928 if (flag_unsafe_math_optimizations
8929 && TREE_CODE (arg0) == CONJ_EXPR
8930 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8931 return fold_mult_zconjz (type, arg1);
8932 if (flag_unsafe_math_optimizations
8933 && TREE_CODE (arg1) == CONJ_EXPR
8934 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8935 return fold_mult_zconjz (type, arg0);
8937 if (flag_unsafe_math_optimizations)
8939 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8940 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8942 /* Optimizations of root(...)*root(...). */
8943 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8945 tree rootfn, arg, arglist;
8946 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8947 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8949 /* Optimize sqrt(x)*sqrt(x) as x. */
8950 if (BUILTIN_SQRT_P (fcode0)
8951 && operand_equal_p (arg00, arg10, 0)
8952 && ! HONOR_SNANS (TYPE_MODE (type)))
8955 /* Optimize root(x)*root(y) as root(x*y). */
8956 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8957 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8958 arglist = build_tree_list (NULL_TREE, arg);
8959 return build_function_call_expr (rootfn, arglist);
8962 /* Optimize expN(x)*expN(y) as expN(x+y). */
8963 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8965 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8966 tree arg = fold_build2 (PLUS_EXPR, type,
8967 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8968 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8969 tree arglist = build_tree_list (NULL_TREE, arg);
8970 return build_function_call_expr (expfn, arglist);
8973 /* Optimizations of pow(...)*pow(...). */
8974 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8975 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8976 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8978 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8979 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8981 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8982 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8985 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8986 if (operand_equal_p (arg01, arg11, 0))
8988 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8989 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8990 tree arglist = tree_cons (NULL_TREE, arg,
8991 build_tree_list (NULL_TREE,
8993 return build_function_call_expr (powfn, arglist);
8996 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8997 if (operand_equal_p (arg00, arg10, 0))
8999 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9000 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9001 tree arglist = tree_cons (NULL_TREE, arg00,
9002 build_tree_list (NULL_TREE,
9004 return build_function_call_expr (powfn, arglist);
9008 /* Optimize tan(x)*cos(x) as sin(x). */
9009 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9010 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9011 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9012 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9013 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9014 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9015 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9016 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9018 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9020 if (sinfn != NULL_TREE)
9021 return build_function_call_expr (sinfn,
9022 TREE_OPERAND (arg0, 1));
9025 /* Optimize x*pow(x,c) as pow(x,c+1). */
9026 if (fcode1 == BUILT_IN_POW
9027 || fcode1 == BUILT_IN_POWF
9028 || fcode1 == BUILT_IN_POWL)
9030 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9031 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9033 if (TREE_CODE (arg11) == REAL_CST
9034 && ! TREE_CONSTANT_OVERFLOW (arg11)
9035 && operand_equal_p (arg0, arg10, 0))
9037 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9041 c = TREE_REAL_CST (arg11);
9042 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9043 arg = build_real (type, c);
9044 arglist = build_tree_list (NULL_TREE, arg);
9045 arglist = tree_cons (NULL_TREE, arg0, arglist);
9046 return build_function_call_expr (powfn, arglist);
9050 /* Optimize pow(x,c)*x as pow(x,c+1). */
9051 if (fcode0 == BUILT_IN_POW
9052 || fcode0 == BUILT_IN_POWF
9053 || fcode0 == BUILT_IN_POWL)
9055 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9056 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9058 if (TREE_CODE (arg01) == REAL_CST
9059 && ! TREE_CONSTANT_OVERFLOW (arg01)
9060 && operand_equal_p (arg1, arg00, 0))
9062 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9066 c = TREE_REAL_CST (arg01);
9067 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9068 arg = build_real (type, c);
9069 arglist = build_tree_list (NULL_TREE, arg);
9070 arglist = tree_cons (NULL_TREE, arg1, arglist);
9071 return build_function_call_expr (powfn, arglist);
9075 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9077 && operand_equal_p (arg0, arg1, 0))
9079 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9083 tree arg = build_real (type, dconst2);
9084 tree arglist = build_tree_list (NULL_TREE, arg);
9085 arglist = tree_cons (NULL_TREE, arg0, arglist);
9086 return build_function_call_expr (powfn, arglist);
9095 if (integer_all_onesp (arg1))
9096 return omit_one_operand (type, arg1, arg0);
9097 if (integer_zerop (arg1))
9098 return non_lvalue (fold_convert (type, arg0));
9099 if (operand_equal_p (arg0, arg1, 0))
9100 return non_lvalue (fold_convert (type, arg0));
9103 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9104 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9106 t1 = build_int_cst (type, -1);
9107 t1 = force_fit_type (t1, 0, false, false);
9108 return omit_one_operand (type, t1, arg1);
9112 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9113 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9115 t1 = build_int_cst (type, -1);
9116 t1 = force_fit_type (t1, 0, false, false);
9117 return omit_one_operand (type, t1, arg0);
9120 /* Canonicalize (X & C1) | C2. */
9121 if (TREE_CODE (arg0) == BIT_AND_EXPR
9122 && TREE_CODE (arg1) == INTEGER_CST
9123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9125 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9126 int width = TYPE_PRECISION (type);
9127 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9128 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9129 hi2 = TREE_INT_CST_HIGH (arg1);
9130 lo2 = TREE_INT_CST_LOW (arg1);
9132 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9133 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9134 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9136 if (width > HOST_BITS_PER_WIDE_INT)
9138 mhi = (unsigned HOST_WIDE_INT) -1
9139 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9145 mlo = (unsigned HOST_WIDE_INT) -1
9146 >> (HOST_BITS_PER_WIDE_INT - width);
9149 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9150 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9151 return fold_build2 (BIT_IOR_EXPR, type,
9152 TREE_OPERAND (arg0, 0), arg1);
9154 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9157 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9158 return fold_build2 (BIT_IOR_EXPR, type,
9159 fold_build2 (BIT_AND_EXPR, type,
9160 TREE_OPERAND (arg0, 0),
9161 build_int_cst_wide (type,
9167 /* (X & Y) | Y is (X, Y). */
9168 if (TREE_CODE (arg0) == BIT_AND_EXPR
9169 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9170 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9171 /* (X & Y) | X is (Y, X). */
9172 if (TREE_CODE (arg0) == BIT_AND_EXPR
9173 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9174 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9175 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9176 /* X | (X & Y) is (Y, X). */
9177 if (TREE_CODE (arg1) == BIT_AND_EXPR
9178 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9179 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9180 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9181 /* X | (Y & X) is (Y, X). */
9182 if (TREE_CODE (arg1) == BIT_AND_EXPR
9183 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9184 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9185 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9187 t1 = distribute_bit_expr (code, type, arg0, arg1);
9188 if (t1 != NULL_TREE)
9191 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9193 This results in more efficient code for machines without a NAND
9194 instruction. Combine will canonicalize to the first form
9195 which will allow use of NAND instructions provided by the
9196 backend if they exist. */
9197 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9198 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9200 return fold_build1 (BIT_NOT_EXPR, type,
9201 build2 (BIT_AND_EXPR, type,
9202 TREE_OPERAND (arg0, 0),
9203 TREE_OPERAND (arg1, 0)));
9206 /* See if this can be simplified into a rotate first. If that
9207 is unsuccessful continue in the association code. */
9211 if (integer_zerop (arg1))
9212 return non_lvalue (fold_convert (type, arg0));
9213 if (integer_all_onesp (arg1))
9214 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9215 if (operand_equal_p (arg0, arg1, 0))
9216 return omit_one_operand (type, integer_zero_node, arg0);
9219 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9220 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9222 t1 = build_int_cst (type, -1);
9223 t1 = force_fit_type (t1, 0, false, false);
9224 return omit_one_operand (type, t1, arg1);
9228 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9229 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9231 t1 = build_int_cst (type, -1);
9232 t1 = force_fit_type (t1, 0, false, false);
9233 return omit_one_operand (type, t1, arg0);
9236 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9237 with a constant, and the two constants have no bits in common,
9238 we should treat this as a BIT_IOR_EXPR since this may produce more
9240 if (TREE_CODE (arg0) == BIT_AND_EXPR
9241 && TREE_CODE (arg1) == BIT_AND_EXPR
9242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9243 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9244 && integer_zerop (const_binop (BIT_AND_EXPR,
9245 TREE_OPERAND (arg0, 1),
9246 TREE_OPERAND (arg1, 1), 0)))
9248 code = BIT_IOR_EXPR;
9252 /* (X | Y) ^ X -> Y & ~ X*/
9253 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9254 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9256 tree t2 = TREE_OPERAND (arg0, 1);
9257 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9259 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9260 fold_convert (type, t1));
9264 /* (Y | X) ^ X -> Y & ~ X*/
9265 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9266 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9268 tree t2 = TREE_OPERAND (arg0, 0);
9269 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9271 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9272 fold_convert (type, t1));
9276 /* X ^ (X | Y) -> Y & ~ X*/
9277 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9278 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9280 tree t2 = TREE_OPERAND (arg1, 1);
9281 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9283 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9284 fold_convert (type, t1));
9288 /* X ^ (Y | X) -> Y & ~ X*/
9289 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9290 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9292 tree t2 = TREE_OPERAND (arg1, 0);
9293 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9295 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9296 fold_convert (type, t1));
9300 /* Convert ~X ^ ~Y to X ^ Y. */
9301 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9302 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9303 return fold_build2 (code, type,
9304 fold_convert (type, TREE_OPERAND (arg0, 0)),
9305 fold_convert (type, TREE_OPERAND (arg1, 0)));
9307 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9308 if (TREE_CODE (arg0) == BIT_AND_EXPR
9309 && integer_onep (TREE_OPERAND (arg0, 1))
9310 && integer_onep (arg1))
9311 return fold_build2 (EQ_EXPR, type, arg0,
9312 build_int_cst (TREE_TYPE (arg0), 0));
9314 /* Fold (X & Y) ^ Y as ~X & Y. */
9315 if (TREE_CODE (arg0) == BIT_AND_EXPR
9316 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9318 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9319 return fold_build2 (BIT_AND_EXPR, type,
9320 fold_build1 (BIT_NOT_EXPR, type, tem),
9321 fold_convert (type, arg1));
9323 /* Fold (X & Y) ^ X as ~Y & X. */
9324 if (TREE_CODE (arg0) == BIT_AND_EXPR
9325 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9326 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9328 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9329 return fold_build2 (BIT_AND_EXPR, type,
9330 fold_build1 (BIT_NOT_EXPR, type, tem),
9331 fold_convert (type, arg1));
9333 /* Fold X ^ (X & Y) as X & ~Y. */
9334 if (TREE_CODE (arg1) == BIT_AND_EXPR
9335 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9337 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9338 return fold_build2 (BIT_AND_EXPR, type,
9339 fold_convert (type, arg0),
9340 fold_build1 (BIT_NOT_EXPR, type, tem));
9342 /* Fold X ^ (Y & X) as ~Y & X. */
9343 if (TREE_CODE (arg1) == BIT_AND_EXPR
9344 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9345 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9347 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9348 return fold_build2 (BIT_AND_EXPR, type,
9349 fold_build1 (BIT_NOT_EXPR, type, tem),
9350 fold_convert (type, arg0));
9353 /* See if this can be simplified into a rotate first. If that
9354 is unsuccessful continue in the association code. */
9358 if (integer_all_onesp (arg1))
9359 return non_lvalue (fold_convert (type, arg0));
9360 if (integer_zerop (arg1))
9361 return omit_one_operand (type, arg1, arg0);
9362 if (operand_equal_p (arg0, arg1, 0))
9363 return non_lvalue (fold_convert (type, arg0));
9365 /* ~X & X is always zero. */
9366 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9367 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9368 return omit_one_operand (type, integer_zero_node, arg1);
9370 /* X & ~X is always zero. */
9371 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9372 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9373 return omit_one_operand (type, integer_zero_node, arg0);
9375 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9376 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9377 && TREE_CODE (arg1) == INTEGER_CST
9378 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9379 return fold_build2 (BIT_IOR_EXPR, type,
9380 fold_build2 (BIT_AND_EXPR, type,
9381 TREE_OPERAND (arg0, 0), arg1),
9382 fold_build2 (BIT_AND_EXPR, type,
9383 TREE_OPERAND (arg0, 1), arg1));
9385 /* (X | Y) & Y is (X, Y). */
9386 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9387 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9388 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9389 /* (X | Y) & X is (Y, X). */
9390 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9391 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9392 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9393 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9394 /* X & (X | Y) is (Y, X). */
9395 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9396 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9397 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9398 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9399 /* X & (Y | X) is (Y, X). */
9400 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9401 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9402 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9403 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9405 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9406 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9407 && integer_onep (TREE_OPERAND (arg0, 1))
9408 && integer_onep (arg1))
9410 tem = TREE_OPERAND (arg0, 0);
9411 return fold_build2 (EQ_EXPR, type,
9412 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9413 build_int_cst (TREE_TYPE (tem), 1)),
9414 build_int_cst (TREE_TYPE (tem), 0));
9416 /* Fold ~X & 1 as (X & 1) == 0. */
9417 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9418 && integer_onep (arg1))
9420 tem = TREE_OPERAND (arg0, 0);
9421 return fold_build2 (EQ_EXPR, type,
9422 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9423 build_int_cst (TREE_TYPE (tem), 1)),
9424 build_int_cst (TREE_TYPE (tem), 0));
9427 /* Fold (X ^ Y) & Y as ~X & Y. */
9428 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9429 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9431 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9432 return fold_build2 (BIT_AND_EXPR, type,
9433 fold_build1 (BIT_NOT_EXPR, type, tem),
9434 fold_convert (type, arg1));
9436 /* Fold (X ^ Y) & X as ~Y & X. */
9437 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9438 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9439 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9441 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9442 return fold_build2 (BIT_AND_EXPR, type,
9443 fold_build1 (BIT_NOT_EXPR, type, tem),
9444 fold_convert (type, arg1));
9446 /* Fold X & (X ^ Y) as X & ~Y. */
9447 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9448 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9450 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9451 return fold_build2 (BIT_AND_EXPR, type,
9452 fold_convert (type, arg0),
9453 fold_build1 (BIT_NOT_EXPR, type, tem));
9455 /* Fold X & (Y ^ X) as ~Y & X. */
9456 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9457 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9458 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9460 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9461 return fold_build2 (BIT_AND_EXPR, type,
9462 fold_build1 (BIT_NOT_EXPR, type, tem),
9463 fold_convert (type, arg0));
9466 t1 = distribute_bit_expr (code, type, arg0, arg1);
9467 if (t1 != NULL_TREE)
9469 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9470 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9471 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9474 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9476 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9477 && (~TREE_INT_CST_LOW (arg1)
9478 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9479 return fold_convert (type, TREE_OPERAND (arg0, 0));
9482 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9484 This results in more efficient code for machines without a NOR
9485 instruction. Combine will canonicalize to the first form
9486 which will allow use of NOR instructions provided by the
9487 backend if they exist. */
9488 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9489 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9491 return fold_build1 (BIT_NOT_EXPR, type,
9492 build2 (BIT_IOR_EXPR, type,
9493 TREE_OPERAND (arg0, 0),
9494 TREE_OPERAND (arg1, 0)));
9500 /* Don't touch a floating-point divide by zero unless the mode
9501 of the constant can represent infinity. */
9502 if (TREE_CODE (arg1) == REAL_CST
9503 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9504 && real_zerop (arg1))
9507 /* Optimize A / A to 1.0 if we don't care about
9508 NaNs or Infinities. Skip the transformation
9509 for non-real operands. */
9510 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9511 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9512 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9513 && operand_equal_p (arg0, arg1, 0))
9515 tree r = build_real (TREE_TYPE (arg0), dconst1);
9517 return omit_two_operands (type, r, arg0, arg1);
9520 /* The complex version of the above A / A optimization. */
9521 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9522 && operand_equal_p (arg0, arg1, 0))
9524 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9525 if (! HONOR_NANS (TYPE_MODE (elem_type))
9526 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9528 tree r = build_real (elem_type, dconst1);
9529 /* omit_two_operands will call fold_convert for us. */
9530 return omit_two_operands (type, r, arg0, arg1);
9534 /* (-A) / (-B) -> A / B */
9535 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9536 return fold_build2 (RDIV_EXPR, type,
9537 TREE_OPERAND (arg0, 0),
9538 negate_expr (arg1));
9539 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9540 return fold_build2 (RDIV_EXPR, type,
9542 TREE_OPERAND (arg1, 0));
9544 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9545 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9546 && real_onep (arg1))
9547 return non_lvalue (fold_convert (type, arg0));
9549 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9550 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9551 && real_minus_onep (arg1))
9552 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9554 /* If ARG1 is a constant, we can convert this to a multiply by the
9555 reciprocal. This does not have the same rounding properties,
9556 so only do this if -funsafe-math-optimizations. We can actually
9557 always safely do it if ARG1 is a power of two, but it's hard to
9558 tell if it is or not in a portable manner. */
9559 if (TREE_CODE (arg1) == REAL_CST)
9561 if (flag_unsafe_math_optimizations
9562 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9564 return fold_build2 (MULT_EXPR, type, arg0, tem);
9565 /* Find the reciprocal if optimizing and the result is exact. */
9569 r = TREE_REAL_CST (arg1);
9570 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9572 tem = build_real (type, r);
9573 return fold_build2 (MULT_EXPR, type,
9574 fold_convert (type, arg0), tem);
9578 /* Convert A/B/C to A/(B*C). */
9579 if (flag_unsafe_math_optimizations
9580 && TREE_CODE (arg0) == RDIV_EXPR)
9581 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9582 fold_build2 (MULT_EXPR, type,
9583 TREE_OPERAND (arg0, 1), arg1));
9585 /* Convert A/(B/C) to (A/B)*C. */
9586 if (flag_unsafe_math_optimizations
9587 && TREE_CODE (arg1) == RDIV_EXPR)
9588 return fold_build2 (MULT_EXPR, type,
9589 fold_build2 (RDIV_EXPR, type, arg0,
9590 TREE_OPERAND (arg1, 0)),
9591 TREE_OPERAND (arg1, 1));
9593 /* Convert C1/(X*C2) into (C1/C2)/X. */
9594 if (flag_unsafe_math_optimizations
9595 && TREE_CODE (arg1) == MULT_EXPR
9596 && TREE_CODE (arg0) == REAL_CST
9597 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9599 tree tem = const_binop (RDIV_EXPR, arg0,
9600 TREE_OPERAND (arg1, 1), 0);
9602 return fold_build2 (RDIV_EXPR, type, tem,
9603 TREE_OPERAND (arg1, 0));
9606 if (flag_unsafe_math_optimizations)
9608 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9609 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9611 /* Optimize sin(x)/cos(x) as tan(x). */
9612 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9613 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9614 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9615 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9616 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9618 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9620 if (tanfn != NULL_TREE)
9621 return build_function_call_expr (tanfn,
9622 TREE_OPERAND (arg0, 1));
9625 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9626 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9627 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9628 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9629 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9630 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9632 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9634 if (tanfn != NULL_TREE)
9636 tree tmp = TREE_OPERAND (arg0, 1);
9637 tmp = build_function_call_expr (tanfn, tmp);
9638 return fold_build2 (RDIV_EXPR, type,
9639 build_real (type, dconst1), tmp);
9643 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9644 NaNs or Infinities. */
9645 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9646 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9647 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9649 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9650 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9652 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9653 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9654 && operand_equal_p (arg00, arg01, 0))
9656 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9658 if (cosfn != NULL_TREE)
9659 return build_function_call_expr (cosfn,
9660 TREE_OPERAND (arg0, 1));
9664 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9665 NaNs or Infinities. */
9666 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9667 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9668 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9670 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9671 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9673 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9674 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9675 && operand_equal_p (arg00, arg01, 0))
9677 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9679 if (cosfn != NULL_TREE)
9681 tree tmp = TREE_OPERAND (arg0, 1);
9682 tmp = build_function_call_expr (cosfn, tmp);
9683 return fold_build2 (RDIV_EXPR, type,
9684 build_real (type, dconst1),
9690 /* Optimize pow(x,c)/x as pow(x,c-1). */
9691 if (fcode0 == BUILT_IN_POW
9692 || fcode0 == BUILT_IN_POWF
9693 || fcode0 == BUILT_IN_POWL)
9695 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9696 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9697 if (TREE_CODE (arg01) == REAL_CST
9698 && ! TREE_CONSTANT_OVERFLOW (arg01)
9699 && operand_equal_p (arg1, arg00, 0))
9701 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9705 c = TREE_REAL_CST (arg01);
9706 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9707 arg = build_real (type, c);
9708 arglist = build_tree_list (NULL_TREE, arg);
9709 arglist = tree_cons (NULL_TREE, arg1, arglist);
9710 return build_function_call_expr (powfn, arglist);
9714 /* Optimize x/expN(y) into x*expN(-y). */
9715 if (BUILTIN_EXPONENT_P (fcode1))
9717 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9718 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9719 tree arglist = build_tree_list (NULL_TREE,
9720 fold_convert (type, arg));
9721 arg1 = build_function_call_expr (expfn, arglist);
9722 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9725 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9726 if (fcode1 == BUILT_IN_POW
9727 || fcode1 == BUILT_IN_POWF
9728 || fcode1 == BUILT_IN_POWL)
9730 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9731 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9732 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9733 tree neg11 = fold_convert (type, negate_expr (arg11));
9734 tree arglist = tree_cons(NULL_TREE, arg10,
9735 build_tree_list (NULL_TREE, neg11));
9736 arg1 = build_function_call_expr (powfn, arglist);
9737 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9742 case TRUNC_DIV_EXPR:
9743 case FLOOR_DIV_EXPR:
9744 /* Simplify A / (B << N) where A and B are positive and B is
9745 a power of 2, to A >> (N + log2(B)). */
9746 if (TREE_CODE (arg1) == LSHIFT_EXPR
9747 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9749 tree sval = TREE_OPERAND (arg1, 0);
9750 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9752 tree sh_cnt = TREE_OPERAND (arg1, 1);
9753 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9755 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9756 sh_cnt, build_int_cst (NULL_TREE, pow2));
9757 return fold_build2 (RSHIFT_EXPR, type,
9758 fold_convert (type, arg0), sh_cnt);
9763 case ROUND_DIV_EXPR:
9765 case EXACT_DIV_EXPR:
9766 if (integer_onep (arg1))
9767 return non_lvalue (fold_convert (type, arg0));
9768 if (integer_zerop (arg1))
9771 if (!TYPE_UNSIGNED (type)
9772 && TREE_CODE (arg1) == INTEGER_CST
9773 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9774 && TREE_INT_CST_HIGH (arg1) == -1)
9775 return fold_convert (type, negate_expr (arg0));
9777 /* Convert -A / -B to A / B when the type is signed and overflow is
9779 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9780 && TREE_CODE (arg0) == NEGATE_EXPR
9781 && negate_expr_p (arg1))
9782 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9783 negate_expr (arg1));
9784 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9785 && TREE_CODE (arg1) == NEGATE_EXPR
9786 && negate_expr_p (arg0))
9787 return fold_build2 (code, type, negate_expr (arg0),
9788 TREE_OPERAND (arg1, 0));
9790 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9791 operation, EXACT_DIV_EXPR.
9793 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9794 At one time others generated faster code, it's not clear if they do
9795 after the last round to changes to the DIV code in expmed.c. */
9796 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9797 && multiple_of_p (type, arg0, arg1))
9798 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9800 if (TREE_CODE (arg1) == INTEGER_CST
9801 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9802 return fold_convert (type, tem);
9807 case FLOOR_MOD_EXPR:
9808 case ROUND_MOD_EXPR:
9809 case TRUNC_MOD_EXPR:
9810 /* X % 1 is always zero, but be sure to preserve any side
9812 if (integer_onep (arg1))
9813 return omit_one_operand (type, integer_zero_node, arg0);
9815 /* X % 0, return X % 0 unchanged so that we can get the
9816 proper warnings and errors. */
9817 if (integer_zerop (arg1))
9820 /* 0 % X is always zero, but be sure to preserve any side
9821 effects in X. Place this after checking for X == 0. */
9822 if (integer_zerop (arg0))
9823 return omit_one_operand (type, integer_zero_node, arg1);
9825 /* X % -1 is zero. */
9826 if (!TYPE_UNSIGNED (type)
9827 && TREE_CODE (arg1) == INTEGER_CST
9828 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9829 && TREE_INT_CST_HIGH (arg1) == -1)
9830 return omit_one_operand (type, integer_zero_node, arg0);
9832 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9833 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9834 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9835 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9838 /* Also optimize A % (C << N) where C is a power of 2,
9839 to A & ((C << N) - 1). */
9840 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9841 c = TREE_OPERAND (arg1, 0);
9843 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9845 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9846 arg1, integer_one_node);
9847 return fold_build2 (BIT_AND_EXPR, type,
9848 fold_convert (type, arg0),
9849 fold_convert (type, mask));
9853 /* X % -C is the same as X % C. */
9854 if (code == TRUNC_MOD_EXPR
9855 && !TYPE_UNSIGNED (type)
9856 && TREE_CODE (arg1) == INTEGER_CST
9857 && !TREE_CONSTANT_OVERFLOW (arg1)
9858 && TREE_INT_CST_HIGH (arg1) < 0
9860 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9861 && !sign_bit_p (arg1, arg1))
9862 return fold_build2 (code, type, fold_convert (type, arg0),
9863 fold_convert (type, negate_expr (arg1)));
9865 /* X % -Y is the same as X % Y. */
9866 if (code == TRUNC_MOD_EXPR
9867 && !TYPE_UNSIGNED (type)
9868 && TREE_CODE (arg1) == NEGATE_EXPR
9870 return fold_build2 (code, type, fold_convert (type, arg0),
9871 fold_convert (type, TREE_OPERAND (arg1, 0)));
9873 if (TREE_CODE (arg1) == INTEGER_CST
9874 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9875 return fold_convert (type, tem);
9881 if (integer_all_onesp (arg0))
9882 return omit_one_operand (type, arg0, arg1);
9886 /* Optimize -1 >> x for arithmetic right shifts. */
9887 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9888 return omit_one_operand (type, arg0, arg1);
9889 /* ... fall through ... */
9893 if (integer_zerop (arg1))
9894 return non_lvalue (fold_convert (type, arg0));
9895 if (integer_zerop (arg0))
9896 return omit_one_operand (type, arg0, arg1);
9898 /* Since negative shift count is not well-defined,
9899 don't try to compute it in the compiler. */
9900 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9903 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9904 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
9905 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9906 && host_integerp (TREE_OPERAND (arg0, 1), false)
9907 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9909 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9910 + TREE_INT_CST_LOW (arg1));
9912 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9913 being well defined. */
9914 if (low >= TYPE_PRECISION (type))
9916 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9917 low = low % TYPE_PRECISION (type);
9918 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9919 return build_int_cst (type, 0);
9921 low = TYPE_PRECISION (type) - 1;
9924 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9925 build_int_cst (type, low));
9928 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9929 into x & ((unsigned)-1 >> c) for unsigned types. */
9930 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9931 || (TYPE_UNSIGNED (type)
9932 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9933 && host_integerp (arg1, false)
9934 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9935 && host_integerp (TREE_OPERAND (arg0, 1), false)
9936 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9938 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9939 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9945 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9947 lshift = build_int_cst (type, -1);
9948 lshift = int_const_binop (code, lshift, arg1, 0);
9950 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9954 /* Rewrite an LROTATE_EXPR by a constant into an
9955 RROTATE_EXPR by a new constant. */
9956 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9958 tree tem = build_int_cst (NULL_TREE,
9959 GET_MODE_BITSIZE (TYPE_MODE (type)));
9960 tem = fold_convert (TREE_TYPE (arg1), tem);
9961 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9962 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9965 /* If we have a rotate of a bit operation with the rotate count and
9966 the second operand of the bit operation both constant,
9967 permute the two operations. */
9968 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9969 && (TREE_CODE (arg0) == BIT_AND_EXPR
9970 || TREE_CODE (arg0) == BIT_IOR_EXPR
9971 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9973 return fold_build2 (TREE_CODE (arg0), type,
9974 fold_build2 (code, type,
9975 TREE_OPERAND (arg0, 0), arg1),
9976 fold_build2 (code, type,
9977 TREE_OPERAND (arg0, 1), arg1));
9979 /* Two consecutive rotates adding up to the width of the mode can
9981 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9982 && TREE_CODE (arg0) == RROTATE_EXPR
9983 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9984 && TREE_INT_CST_HIGH (arg1) == 0
9985 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9986 && ((TREE_INT_CST_LOW (arg1)
9987 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9988 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9989 return TREE_OPERAND (arg0, 0);
9994 if (operand_equal_p (arg0, arg1, 0))
9995 return omit_one_operand (type, arg0, arg1);
9996 if (INTEGRAL_TYPE_P (type)
9997 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9998 return omit_one_operand (type, arg1, arg0);
9999 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10005 if (operand_equal_p (arg0, arg1, 0))
10006 return omit_one_operand (type, arg0, arg1);
10007 if (INTEGRAL_TYPE_P (type)
10008 && TYPE_MAX_VALUE (type)
10009 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10010 return omit_one_operand (type, arg1, arg0);
10011 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10016 case TRUTH_ANDIF_EXPR:
10017 /* Note that the operands of this must be ints
10018 and their values must be 0 or 1.
10019 ("true" is a fixed value perhaps depending on the language.) */
10020 /* If first arg is constant zero, return it. */
10021 if (integer_zerop (arg0))
10022 return fold_convert (type, arg0);
10023 case TRUTH_AND_EXPR:
10024 /* If either arg is constant true, drop it. */
10025 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10026 return non_lvalue (fold_convert (type, arg1));
10027 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10028 /* Preserve sequence points. */
10029 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10030 return non_lvalue (fold_convert (type, arg0));
10031 /* If second arg is constant zero, result is zero, but first arg
10032 must be evaluated. */
10033 if (integer_zerop (arg1))
10034 return omit_one_operand (type, arg1, arg0);
10035 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10036 case will be handled here. */
10037 if (integer_zerop (arg0))
10038 return omit_one_operand (type, arg0, arg1);
10040 /* !X && X is always false. */
10041 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10043 return omit_one_operand (type, integer_zero_node, arg1);
10044 /* X && !X is always false. */
10045 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10046 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10047 return omit_one_operand (type, integer_zero_node, arg0);
10049 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10050 means A >= Y && A != MAX, but in this case we know that
10053 if (!TREE_SIDE_EFFECTS (arg0)
10054 && !TREE_SIDE_EFFECTS (arg1))
10056 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10057 if (tem && !operand_equal_p (tem, arg0, 0))
10058 return fold_build2 (code, type, tem, arg1);
10060 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10061 if (tem && !operand_equal_p (tem, arg1, 0))
10062 return fold_build2 (code, type, arg0, tem);
10066 /* We only do these simplifications if we are optimizing. */
10070 /* Check for things like (A || B) && (A || C). We can convert this
10071 to A || (B && C). Note that either operator can be any of the four
10072 truth and/or operations and the transformation will still be
10073 valid. Also note that we only care about order for the
10074 ANDIF and ORIF operators. If B contains side effects, this
10075 might change the truth-value of A. */
10076 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10077 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10078 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10079 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10080 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10081 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10083 tree a00 = TREE_OPERAND (arg0, 0);
10084 tree a01 = TREE_OPERAND (arg0, 1);
10085 tree a10 = TREE_OPERAND (arg1, 0);
10086 tree a11 = TREE_OPERAND (arg1, 1);
10087 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10088 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10089 && (code == TRUTH_AND_EXPR
10090 || code == TRUTH_OR_EXPR));
10092 if (operand_equal_p (a00, a10, 0))
10093 return fold_build2 (TREE_CODE (arg0), type, a00,
10094 fold_build2 (code, type, a01, a11));
10095 else if (commutative && operand_equal_p (a00, a11, 0))
10096 return fold_build2 (TREE_CODE (arg0), type, a00,
10097 fold_build2 (code, type, a01, a10));
10098 else if (commutative && operand_equal_p (a01, a10, 0))
10099 return fold_build2 (TREE_CODE (arg0), type, a01,
10100 fold_build2 (code, type, a00, a11));
10102 /* This case if tricky because we must either have commutative
10103 operators or else A10 must not have side-effects. */
10105 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10106 && operand_equal_p (a01, a11, 0))
10107 return fold_build2 (TREE_CODE (arg0), type,
10108 fold_build2 (code, type, a00, a10),
10112 /* See if we can build a range comparison. */
10113 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10116 /* Check for the possibility of merging component references. If our
10117 lhs is another similar operation, try to merge its rhs with our
10118 rhs. Then try to merge our lhs and rhs. */
10119 if (TREE_CODE (arg0) == code
10120 && 0 != (tem = fold_truthop (code, type,
10121 TREE_OPERAND (arg0, 1), arg1)))
10122 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10124 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10129 case TRUTH_ORIF_EXPR:
10130 /* Note that the operands of this must be ints
10131 and their values must be 0 or true.
10132 ("true" is a fixed value perhaps depending on the language.) */
10133 /* If first arg is constant true, return it. */
10134 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10135 return fold_convert (type, arg0);
10136 case TRUTH_OR_EXPR:
10137 /* If either arg is constant zero, drop it. */
10138 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10139 return non_lvalue (fold_convert (type, arg1));
10140 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10141 /* Preserve sequence points. */
10142 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10143 return non_lvalue (fold_convert (type, arg0));
10144 /* If second arg is constant true, result is true, but we must
10145 evaluate first arg. */
10146 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10147 return omit_one_operand (type, arg1, arg0);
10148 /* Likewise for first arg, but note this only occurs here for
10150 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10151 return omit_one_operand (type, arg0, arg1);
10153 /* !X || X is always true. */
10154 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10156 return omit_one_operand (type, integer_one_node, arg1);
10157 /* X || !X is always true. */
10158 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10159 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10160 return omit_one_operand (type, integer_one_node, arg0);
10164 case TRUTH_XOR_EXPR:
10165 /* If the second arg is constant zero, drop it. */
10166 if (integer_zerop (arg1))
10167 return non_lvalue (fold_convert (type, arg0));
10168 /* If the second arg is constant true, this is a logical inversion. */
10169 if (integer_onep (arg1))
10171 /* Only call invert_truthvalue if operand is a truth value. */
10172 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10173 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10175 tem = invert_truthvalue (arg0);
10176 return non_lvalue (fold_convert (type, tem));
10178 /* Identical arguments cancel to zero. */
10179 if (operand_equal_p (arg0, arg1, 0))
10180 return omit_one_operand (type, integer_zero_node, arg0);
10182 /* !X ^ X is always true. */
10183 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10185 return omit_one_operand (type, integer_one_node, arg1);
10187 /* X ^ !X is always true. */
10188 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10189 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10190 return omit_one_operand (type, integer_one_node, arg0);
10196 tem = fold_comparison (code, type, op0, op1);
10197 if (tem != NULL_TREE)
10200 /* bool_var != 0 becomes bool_var. */
10201 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10202 && code == NE_EXPR)
10203 return non_lvalue (fold_convert (type, arg0));
10205 /* bool_var == 1 becomes bool_var. */
10206 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10207 && code == EQ_EXPR)
10208 return non_lvalue (fold_convert (type, arg0));
10210 /* bool_var != 1 becomes !bool_var. */
10211 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10212 && code == NE_EXPR)
10213 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10215 /* bool_var == 0 becomes !bool_var. */
10216 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10217 && code == EQ_EXPR)
10218 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10220 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10221 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10222 && TREE_CODE (arg1) == INTEGER_CST)
10223 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10224 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10227 /* If this is an equality comparison of the address of a non-weak
10228 object against zero, then we know the result. */
10229 if (TREE_CODE (arg0) == ADDR_EXPR
10230 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10231 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10232 && integer_zerop (arg1))
10233 return constant_boolean_node (code != EQ_EXPR, type);
10235 /* If this is an equality comparison of the address of two non-weak,
10236 unaliased symbols neither of which are extern (since we do not
10237 have access to attributes for externs), then we know the result. */
10238 if (TREE_CODE (arg0) == ADDR_EXPR
10239 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10240 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10241 && ! lookup_attribute ("alias",
10242 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10243 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10244 && TREE_CODE (arg1) == ADDR_EXPR
10245 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10246 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10247 && ! lookup_attribute ("alias",
10248 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10249 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10251 /* We know that we're looking at the address of two
10252 non-weak, unaliased, static _DECL nodes.
10254 It is both wasteful and incorrect to call operand_equal_p
10255 to compare the two ADDR_EXPR nodes. It is wasteful in that
10256 all we need to do is test pointer equality for the arguments
10257 to the two ADDR_EXPR nodes. It is incorrect to use
10258 operand_equal_p as that function is NOT equivalent to a
10259 C equality test. It can in fact return false for two
10260 objects which would test as equal using the C equality
10262 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10263 return constant_boolean_node (equal
10264 ? code == EQ_EXPR : code != EQ_EXPR,
10268 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10269 a MINUS_EXPR of a constant, we can convert it into a comparison with
10270 a revised constant as long as no overflow occurs. */
10271 if (TREE_CODE (arg1) == INTEGER_CST
10272 && (TREE_CODE (arg0) == PLUS_EXPR
10273 || TREE_CODE (arg0) == MINUS_EXPR)
10274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10275 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10276 ? MINUS_EXPR : PLUS_EXPR,
10277 arg1, TREE_OPERAND (arg0, 1), 0))
10278 && ! TREE_CONSTANT_OVERFLOW (tem))
10279 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10281 /* Similarly for a NEGATE_EXPR. */
10282 if (TREE_CODE (arg0) == NEGATE_EXPR
10283 && TREE_CODE (arg1) == INTEGER_CST
10284 && 0 != (tem = negate_expr (arg1))
10285 && TREE_CODE (tem) == INTEGER_CST
10286 && ! TREE_CONSTANT_OVERFLOW (tem))
10287 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10289 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10290 for !=. Don't do this for ordered comparisons due to overflow. */
10291 if (TREE_CODE (arg0) == MINUS_EXPR
10292 && integer_zerop (arg1))
10293 return fold_build2 (code, type,
10294 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10296 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10297 if (TREE_CODE (arg0) == ABS_EXPR
10298 && (integer_zerop (arg1) || real_zerop (arg1)))
10299 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10301 /* If this is an EQ or NE comparison with zero and ARG0 is
10302 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10303 two operations, but the latter can be done in one less insn
10304 on machines that have only two-operand insns or on which a
10305 constant cannot be the first operand. */
10306 if (TREE_CODE (arg0) == BIT_AND_EXPR
10307 && integer_zerop (arg1))
10309 tree arg00 = TREE_OPERAND (arg0, 0);
10310 tree arg01 = TREE_OPERAND (arg0, 1);
10311 if (TREE_CODE (arg00) == LSHIFT_EXPR
10312 && integer_onep (TREE_OPERAND (arg00, 0)))
10314 fold_build2 (code, type,
10315 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10316 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10317 arg01, TREE_OPERAND (arg00, 1)),
10318 fold_convert (TREE_TYPE (arg0),
10319 integer_one_node)),
10321 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10322 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10324 fold_build2 (code, type,
10325 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10326 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10327 arg00, TREE_OPERAND (arg01, 1)),
10328 fold_convert (TREE_TYPE (arg0),
10329 integer_one_node)),
10333 /* If this is an NE or EQ comparison of zero against the result of a
10334 signed MOD operation whose second operand is a power of 2, make
10335 the MOD operation unsigned since it is simpler and equivalent. */
10336 if (integer_zerop (arg1)
10337 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10338 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10339 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10340 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10341 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10342 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10344 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10345 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10346 fold_convert (newtype,
10347 TREE_OPERAND (arg0, 0)),
10348 fold_convert (newtype,
10349 TREE_OPERAND (arg0, 1)));
10351 return fold_build2 (code, type, newmod,
10352 fold_convert (newtype, arg1));
10355 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10356 C1 is a valid shift constant, and C2 is a power of two, i.e.
10358 if (TREE_CODE (arg0) == BIT_AND_EXPR
10359 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10360 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10362 && integer_pow2p (TREE_OPERAND (arg0, 1))
10363 && integer_zerop (arg1))
10365 tree itype = TREE_TYPE (arg0);
10366 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10367 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10369 /* Check for a valid shift count. */
10370 if (TREE_INT_CST_HIGH (arg001) == 0
10371 && TREE_INT_CST_LOW (arg001) < prec)
10373 tree arg01 = TREE_OPERAND (arg0, 1);
10374 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10375 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10376 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10377 can be rewritten as (X & (C2 << C1)) != 0. */
10378 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10380 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10381 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10382 return fold_build2 (code, type, tem, arg1);
10384 /* Otherwise, for signed (arithmetic) shifts,
10385 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10386 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10387 else if (!TYPE_UNSIGNED (itype))
10388 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10389 arg000, build_int_cst (itype, 0));
10390 /* Otherwise, of unsigned (logical) shifts,
10391 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10392 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10394 return omit_one_operand (type,
10395 code == EQ_EXPR ? integer_one_node
10396 : integer_zero_node,
10401 /* If this is an NE comparison of zero with an AND of one, remove the
10402 comparison since the AND will give the correct value. */
10403 if (code == NE_EXPR
10404 && integer_zerop (arg1)
10405 && TREE_CODE (arg0) == BIT_AND_EXPR
10406 && integer_onep (TREE_OPERAND (arg0, 1)))
10407 return fold_convert (type, arg0);
10409 /* If we have (A & C) == C where C is a power of 2, convert this into
10410 (A & C) != 0. Similarly for NE_EXPR. */
10411 if (TREE_CODE (arg0) == BIT_AND_EXPR
10412 && integer_pow2p (TREE_OPERAND (arg0, 1))
10413 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10414 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10415 arg0, fold_convert (TREE_TYPE (arg0),
10416 integer_zero_node));
10418 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10419 bit, then fold the expression into A < 0 or A >= 0. */
10420 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10424 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10425 Similarly for NE_EXPR. */
10426 if (TREE_CODE (arg0) == BIT_AND_EXPR
10427 && TREE_CODE (arg1) == INTEGER_CST
10428 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10430 tree notc = fold_build1 (BIT_NOT_EXPR,
10431 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10432 TREE_OPERAND (arg0, 1));
10433 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10435 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10436 if (integer_nonzerop (dandnotc))
10437 return omit_one_operand (type, rslt, arg0);
10440 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10441 Similarly for NE_EXPR. */
10442 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10443 && TREE_CODE (arg1) == INTEGER_CST
10444 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10446 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10447 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10448 TREE_OPERAND (arg0, 1), notd);
10449 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10450 if (integer_nonzerop (candnotd))
10451 return omit_one_operand (type, rslt, arg0);
10454 /* If this is a comparison of a field, we may be able to simplify it. */
10455 if (((TREE_CODE (arg0) == COMPONENT_REF
10456 && lang_hooks.can_use_bit_fields_p ())
10457 || TREE_CODE (arg0) == BIT_FIELD_REF)
10458 /* Handle the constant case even without -O
10459 to make sure the warnings are given. */
10460 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10462 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10467 /* Optimize comparisons of strlen vs zero to a compare of the
10468 first character of the string vs zero. To wit,
10469 strlen(ptr) == 0 => *ptr == 0
10470 strlen(ptr) != 0 => *ptr != 0
10471 Other cases should reduce to one of these two (or a constant)
10472 due to the return value of strlen being unsigned. */
10473 if (TREE_CODE (arg0) == CALL_EXPR
10474 && integer_zerop (arg1))
10476 tree fndecl = get_callee_fndecl (arg0);
10480 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10481 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10482 && (arglist = TREE_OPERAND (arg0, 1))
10483 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10484 && ! TREE_CHAIN (arglist))
10486 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10487 return fold_build2 (code, type, iref,
10488 build_int_cst (TREE_TYPE (iref), 0));
10492 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10493 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10494 if (TREE_CODE (arg0) == RSHIFT_EXPR
10495 && integer_zerop (arg1)
10496 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10498 tree arg00 = TREE_OPERAND (arg0, 0);
10499 tree arg01 = TREE_OPERAND (arg0, 1);
10500 tree itype = TREE_TYPE (arg00);
10501 if (TREE_INT_CST_HIGH (arg01) == 0
10502 && TREE_INT_CST_LOW (arg01)
10503 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10505 if (TYPE_UNSIGNED (itype))
10507 itype = lang_hooks.types.signed_type (itype);
10508 arg00 = fold_convert (itype, arg00);
10510 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10511 type, arg00, build_int_cst (itype, 0));
10515 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10516 if (integer_zerop (arg1)
10517 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10518 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10519 TREE_OPERAND (arg0, 1));
10521 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10522 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10523 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10524 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10525 build_int_cst (TREE_TYPE (arg1), 0));
10526 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10527 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10528 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10529 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10530 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10531 build_int_cst (TREE_TYPE (arg1), 0));
10533 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10534 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10535 && TREE_CODE (arg1) == INTEGER_CST
10536 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10537 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10538 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10539 TREE_OPERAND (arg0, 1), arg1));
10541 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10542 (X & C) == 0 when C is a single bit. */
10543 if (TREE_CODE (arg0) == BIT_AND_EXPR
10544 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10545 && integer_zerop (arg1)
10546 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10548 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10549 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10550 TREE_OPERAND (arg0, 1));
10551 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10555 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10556 constant C is a power of two, i.e. a single bit. */
10557 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10558 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10559 && integer_zerop (arg1)
10560 && integer_pow2p (TREE_OPERAND (arg0, 1))
10561 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10562 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10564 tree arg00 = TREE_OPERAND (arg0, 0);
10565 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10566 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10569 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10570 when is C is a power of two, i.e. a single bit. */
10571 if (TREE_CODE (arg0) == BIT_AND_EXPR
10572 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10573 && integer_zerop (arg1)
10574 && integer_pow2p (TREE_OPERAND (arg0, 1))
10575 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10576 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10578 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10579 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10580 arg000, TREE_OPERAND (arg0, 1));
10581 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10582 tem, build_int_cst (TREE_TYPE (tem), 0));
10585 if (integer_zerop (arg1)
10586 && tree_expr_nonzero_p (arg0))
10588 tree res = constant_boolean_node (code==NE_EXPR, type);
10589 return omit_one_operand (type, res, arg0);
10597 tem = fold_comparison (code, type, op0, op1);
10598 if (tem != NULL_TREE)
10601 /* Transform comparisons of the form X +- C CMP X. */
10602 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10603 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10604 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10605 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10606 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10607 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10608 && !(flag_wrapv || flag_trapv))))
10610 tree arg01 = TREE_OPERAND (arg0, 1);
10611 enum tree_code code0 = TREE_CODE (arg0);
10614 if (TREE_CODE (arg01) == REAL_CST)
10615 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10617 is_positive = tree_int_cst_sgn (arg01);
10619 /* (X - c) > X becomes false. */
10620 if (code == GT_EXPR
10621 && ((code0 == MINUS_EXPR && is_positive >= 0)
10622 || (code0 == PLUS_EXPR && is_positive <= 0)))
10623 return constant_boolean_node (0, type);
10625 /* Likewise (X + c) < X becomes false. */
10626 if (code == LT_EXPR
10627 && ((code0 == PLUS_EXPR && is_positive >= 0)
10628 || (code0 == MINUS_EXPR && is_positive <= 0)))
10629 return constant_boolean_node (0, type);
10631 /* Convert (X - c) <= X to true. */
10632 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10634 && ((code0 == MINUS_EXPR && is_positive >= 0)
10635 || (code0 == PLUS_EXPR && is_positive <= 0)))
10636 return constant_boolean_node (1, type);
10638 /* Convert (X + c) >= X to true. */
10639 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10641 && ((code0 == PLUS_EXPR && is_positive >= 0)
10642 || (code0 == MINUS_EXPR && is_positive <= 0)))
10643 return constant_boolean_node (1, type);
10645 if (TREE_CODE (arg01) == INTEGER_CST)
10647 /* Convert X + c > X and X - c < X to true for integers. */
10648 if (code == GT_EXPR
10649 && ((code0 == PLUS_EXPR && is_positive > 0)
10650 || (code0 == MINUS_EXPR && is_positive < 0)))
10651 return constant_boolean_node (1, type);
10653 if (code == LT_EXPR
10654 && ((code0 == MINUS_EXPR && is_positive > 0)
10655 || (code0 == PLUS_EXPR && is_positive < 0)))
10656 return constant_boolean_node (1, type);
10658 /* Convert X + c <= X and X - c >= X to false for integers. */
10659 if (code == LE_EXPR
10660 && ((code0 == PLUS_EXPR && is_positive > 0)
10661 || (code0 == MINUS_EXPR && is_positive < 0)))
10662 return constant_boolean_node (0, type);
10664 if (code == GE_EXPR
10665 && ((code0 == MINUS_EXPR && is_positive > 0)
10666 || (code0 == PLUS_EXPR && is_positive < 0)))
10667 return constant_boolean_node (0, type);
10671 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10672 This transformation affects the cases which are handled in later
10673 optimizations involving comparisons with non-negative constants. */
10674 if (TREE_CODE (arg1) == INTEGER_CST
10675 && TREE_CODE (arg0) != INTEGER_CST
10676 && tree_int_cst_sgn (arg1) > 0)
10678 if (code == GE_EXPR)
10680 arg1 = const_binop (MINUS_EXPR, arg1,
10681 build_int_cst (TREE_TYPE (arg1), 1), 0);
10682 return fold_build2 (GT_EXPR, type, arg0,
10683 fold_convert (TREE_TYPE (arg0), arg1));
10685 if (code == LT_EXPR)
10687 arg1 = const_binop (MINUS_EXPR, arg1,
10688 build_int_cst (TREE_TYPE (arg1), 1), 0);
10689 return fold_build2 (LE_EXPR, type, arg0,
10690 fold_convert (TREE_TYPE (arg0), arg1));
10694 /* Comparisons with the highest or lowest possible integer of
10695 the specified size will have known values. */
10697 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10699 if (TREE_CODE (arg1) == INTEGER_CST
10700 && ! TREE_CONSTANT_OVERFLOW (arg1)
10701 && width <= 2 * HOST_BITS_PER_WIDE_INT
10702 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10703 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10705 HOST_WIDE_INT signed_max_hi;
10706 unsigned HOST_WIDE_INT signed_max_lo;
10707 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10709 if (width <= HOST_BITS_PER_WIDE_INT)
10711 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10716 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10718 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10724 max_lo = signed_max_lo;
10725 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10731 width -= HOST_BITS_PER_WIDE_INT;
10732 signed_max_lo = -1;
10733 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10738 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10740 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10745 max_hi = signed_max_hi;
10746 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10750 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10751 && TREE_INT_CST_LOW (arg1) == max_lo)
10755 return omit_one_operand (type, integer_zero_node, arg0);
10758 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10761 return omit_one_operand (type, integer_one_node, arg0);
10764 return fold_build2 (NE_EXPR, type, arg0, arg1);
10766 /* The GE_EXPR and LT_EXPR cases above are not normally
10767 reached because of previous transformations. */
10772 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10774 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10778 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10779 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10781 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10782 return fold_build2 (NE_EXPR, type, arg0, arg1);
10786 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10788 && TREE_INT_CST_LOW (arg1) == min_lo)
10792 return omit_one_operand (type, integer_zero_node, arg0);
10795 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10798 return omit_one_operand (type, integer_one_node, arg0);
10801 return fold_build2 (NE_EXPR, type, op0, op1);
10806 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10808 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10812 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10813 return fold_build2 (NE_EXPR, type, arg0, arg1);
10815 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10816 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10821 else if (!in_gimple_form
10822 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10823 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10824 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10825 /* signed_type does not work on pointer types. */
10826 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10828 /* The following case also applies to X < signed_max+1
10829 and X >= signed_max+1 because previous transformations. */
10830 if (code == LE_EXPR || code == GT_EXPR)
10833 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10834 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10835 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10836 type, fold_convert (st0, arg0),
10837 build_int_cst (st1, 0));
10843 /* If we are comparing an ABS_EXPR with a constant, we can
10844 convert all the cases into explicit comparisons, but they may
10845 well not be faster than doing the ABS and one comparison.
10846 But ABS (X) <= C is a range comparison, which becomes a subtraction
10847 and a comparison, and is probably faster. */
10848 if (code == LE_EXPR
10849 && TREE_CODE (arg1) == INTEGER_CST
10850 && TREE_CODE (arg0) == ABS_EXPR
10851 && ! TREE_SIDE_EFFECTS (arg0)
10852 && (0 != (tem = negate_expr (arg1)))
10853 && TREE_CODE (tem) == INTEGER_CST
10854 && ! TREE_CONSTANT_OVERFLOW (tem))
10855 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10856 build2 (GE_EXPR, type,
10857 TREE_OPERAND (arg0, 0), tem),
10858 build2 (LE_EXPR, type,
10859 TREE_OPERAND (arg0, 0), arg1));
10861 /* Convert ABS_EXPR<x> >= 0 to true. */
10862 if (code == GE_EXPR
10863 && tree_expr_nonnegative_p (arg0)
10864 && (integer_zerop (arg1)
10865 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10866 && real_zerop (arg1))))
10867 return omit_one_operand (type, integer_one_node, arg0);
10869 /* Convert ABS_EXPR<x> < 0 to false. */
10870 if (code == LT_EXPR
10871 && tree_expr_nonnegative_p (arg0)
10872 && (integer_zerop (arg1) || real_zerop (arg1)))
10873 return omit_one_operand (type, integer_zero_node, arg0);
10875 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10876 and similarly for >= into !=. */
10877 if ((code == LT_EXPR || code == GE_EXPR)
10878 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10879 && TREE_CODE (arg1) == LSHIFT_EXPR
10880 && integer_onep (TREE_OPERAND (arg1, 0)))
10881 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10882 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10883 TREE_OPERAND (arg1, 1)),
10884 build_int_cst (TREE_TYPE (arg0), 0));
10886 if ((code == LT_EXPR || code == GE_EXPR)
10887 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10888 && (TREE_CODE (arg1) == NOP_EXPR
10889 || TREE_CODE (arg1) == CONVERT_EXPR)
10890 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10891 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10893 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10894 fold_convert (TREE_TYPE (arg0),
10895 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10896 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10898 build_int_cst (TREE_TYPE (arg0), 0));
10902 case UNORDERED_EXPR:
10910 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10912 t1 = fold_relational_const (code, type, arg0, arg1);
10913 if (t1 != NULL_TREE)
10917 /* If the first operand is NaN, the result is constant. */
10918 if (TREE_CODE (arg0) == REAL_CST
10919 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10920 && (code != LTGT_EXPR || ! flag_trapping_math))
10922 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10923 ? integer_zero_node
10924 : integer_one_node;
10925 return omit_one_operand (type, t1, arg1);
10928 /* If the second operand is NaN, the result is constant. */
10929 if (TREE_CODE (arg1) == REAL_CST
10930 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10931 && (code != LTGT_EXPR || ! flag_trapping_math))
10933 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10934 ? integer_zero_node
10935 : integer_one_node;
10936 return omit_one_operand (type, t1, arg0);
10939 /* Simplify unordered comparison of something with itself. */
10940 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10941 && operand_equal_p (arg0, arg1, 0))
10942 return constant_boolean_node (1, type);
10944 if (code == LTGT_EXPR
10945 && !flag_trapping_math
10946 && operand_equal_p (arg0, arg1, 0))
10947 return constant_boolean_node (0, type);
10949 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10951 tree targ0 = strip_float_extensions (arg0);
10952 tree targ1 = strip_float_extensions (arg1);
10953 tree newtype = TREE_TYPE (targ0);
10955 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10956 newtype = TREE_TYPE (targ1);
10958 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10959 return fold_build2 (code, type, fold_convert (newtype, targ0),
10960 fold_convert (newtype, targ1));
10965 case COMPOUND_EXPR:
10966 /* When pedantic, a compound expression can be neither an lvalue
10967 nor an integer constant expression. */
10968 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10970 /* Don't let (0, 0) be null pointer constant. */
10971 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10972 : fold_convert (type, arg1);
10973 return pedantic_non_lvalue (tem);
10976 if ((TREE_CODE (arg0) == REAL_CST
10977 && TREE_CODE (arg1) == REAL_CST)
10978 || (TREE_CODE (arg0) == INTEGER_CST
10979 && TREE_CODE (arg1) == INTEGER_CST))
10980 return build_complex (type, arg0, arg1);
10984 /* An ASSERT_EXPR should never be passed to fold_binary. */
10985 gcc_unreachable ();
10989 } /* switch (code) */
10992 /* Callback for walk_tree, looking for LABEL_EXPR.
10993 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10994 Do not check the sub-tree of GOTO_EXPR. */
10997 contains_label_1 (tree *tp,
10998 int *walk_subtrees,
10999 void *data ATTRIBUTE_UNUSED)
11001 switch (TREE_CODE (*tp))
11006 *walk_subtrees = 0;
11013 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11014 accessible from outside the sub-tree. Returns NULL_TREE if no
11015 addressable label is found. */
11018 contains_label_p (tree st)
11020 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11023 /* Fold a ternary expression of code CODE and type TYPE with operands
11024 OP0, OP1, and OP2. Return the folded expression if folding is
11025 successful. Otherwise, return NULL_TREE. */
11028 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11031 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11032 enum tree_code_class kind = TREE_CODE_CLASS (code);
11034 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11035 && TREE_CODE_LENGTH (code) == 3);
11037 /* Strip any conversions that don't change the mode. This is safe
11038 for every expression, except for a comparison expression because
11039 its signedness is derived from its operands. So, in the latter
11040 case, only strip conversions that don't change the signedness.
11042 Note that this is done as an internal manipulation within the
11043 constant folder, in order to find the simplest representation of
11044 the arguments so that their form can be studied. In any cases,
11045 the appropriate type conversions should be put back in the tree
11046 that will get out of the constant folder. */
11061 case COMPONENT_REF:
11062 if (TREE_CODE (arg0) == CONSTRUCTOR
11063 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11065 unsigned HOST_WIDE_INT idx;
11067 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11074 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11075 so all simple results must be passed through pedantic_non_lvalue. */
11076 if (TREE_CODE (arg0) == INTEGER_CST)
11078 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11079 tem = integer_zerop (arg0) ? op2 : op1;
11080 /* Only optimize constant conditions when the selected branch
11081 has the same type as the COND_EXPR. This avoids optimizing
11082 away "c ? x : throw", where the throw has a void type.
11083 Avoid throwing away that operand which contains label. */
11084 if ((!TREE_SIDE_EFFECTS (unused_op)
11085 || !contains_label_p (unused_op))
11086 && (! VOID_TYPE_P (TREE_TYPE (tem))
11087 || VOID_TYPE_P (type)))
11088 return pedantic_non_lvalue (tem);
11091 if (operand_equal_p (arg1, op2, 0))
11092 return pedantic_omit_one_operand (type, arg1, arg0);
11094 /* If we have A op B ? A : C, we may be able to convert this to a
11095 simpler expression, depending on the operation and the values
11096 of B and C. Signed zeros prevent all of these transformations,
11097 for reasons given above each one.
11099 Also try swapping the arguments and inverting the conditional. */
11100 if (COMPARISON_CLASS_P (arg0)
11101 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11102 arg1, TREE_OPERAND (arg0, 1))
11103 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11105 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11110 if (COMPARISON_CLASS_P (arg0)
11111 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11113 TREE_OPERAND (arg0, 1))
11114 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11116 tem = invert_truthvalue (arg0);
11117 if (COMPARISON_CLASS_P (tem))
11119 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11125 /* If the second operand is simpler than the third, swap them
11126 since that produces better jump optimization results. */
11127 if (truth_value_p (TREE_CODE (arg0))
11128 && tree_swap_operands_p (op1, op2, false))
11130 /* See if this can be inverted. If it can't, possibly because
11131 it was a floating-point inequality comparison, don't do
11133 tem = invert_truthvalue (arg0);
11135 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11136 return fold_build3 (code, type, tem, op2, op1);
11139 /* Convert A ? 1 : 0 to simply A. */
11140 if (integer_onep (op1)
11141 && integer_zerop (op2)
11142 /* If we try to convert OP0 to our type, the
11143 call to fold will try to move the conversion inside
11144 a COND, which will recurse. In that case, the COND_EXPR
11145 is probably the best choice, so leave it alone. */
11146 && type == TREE_TYPE (arg0))
11147 return pedantic_non_lvalue (arg0);
11149 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11150 over COND_EXPR in cases such as floating point comparisons. */
11151 if (integer_zerop (op1)
11152 && integer_onep (op2)
11153 && truth_value_p (TREE_CODE (arg0)))
11154 return pedantic_non_lvalue (fold_convert (type,
11155 invert_truthvalue (arg0)));
11157 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11158 if (TREE_CODE (arg0) == LT_EXPR
11159 && integer_zerop (TREE_OPERAND (arg0, 1))
11160 && integer_zerop (op2)
11161 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11162 return fold_convert (type,
11163 fold_build2 (BIT_AND_EXPR,
11164 TREE_TYPE (tem), tem,
11165 fold_convert (TREE_TYPE (tem), arg1)));
11167 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11168 already handled above. */
11169 if (TREE_CODE (arg0) == BIT_AND_EXPR
11170 && integer_onep (TREE_OPERAND (arg0, 1))
11171 && integer_zerop (op2)
11172 && integer_pow2p (arg1))
11174 tree tem = TREE_OPERAND (arg0, 0);
11176 if (TREE_CODE (tem) == RSHIFT_EXPR
11177 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11178 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11179 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11180 return fold_build2 (BIT_AND_EXPR, type,
11181 TREE_OPERAND (tem, 0), arg1);
11184 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11185 is probably obsolete because the first operand should be a
11186 truth value (that's why we have the two cases above), but let's
11187 leave it in until we can confirm this for all front-ends. */
11188 if (integer_zerop (op2)
11189 && TREE_CODE (arg0) == NE_EXPR
11190 && integer_zerop (TREE_OPERAND (arg0, 1))
11191 && integer_pow2p (arg1)
11192 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11193 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11194 arg1, OEP_ONLY_CONST))
11195 return pedantic_non_lvalue (fold_convert (type,
11196 TREE_OPERAND (arg0, 0)));
11198 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11199 if (integer_zerop (op2)
11200 && truth_value_p (TREE_CODE (arg0))
11201 && truth_value_p (TREE_CODE (arg1)))
11202 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11203 fold_convert (type, arg0),
11206 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11207 if (integer_onep (op2)
11208 && truth_value_p (TREE_CODE (arg0))
11209 && truth_value_p (TREE_CODE (arg1)))
11211 /* Only perform transformation if ARG0 is easily inverted. */
11212 tem = invert_truthvalue (arg0);
11213 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11214 return fold_build2 (TRUTH_ORIF_EXPR, type,
11215 fold_convert (type, tem),
11219 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11220 if (integer_zerop (arg1)
11221 && truth_value_p (TREE_CODE (arg0))
11222 && truth_value_p (TREE_CODE (op2)))
11224 /* Only perform transformation if ARG0 is easily inverted. */
11225 tem = invert_truthvalue (arg0);
11226 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11227 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11228 fold_convert (type, tem),
11232 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11233 if (integer_onep (arg1)
11234 && truth_value_p (TREE_CODE (arg0))
11235 && truth_value_p (TREE_CODE (op2)))
11236 return fold_build2 (TRUTH_ORIF_EXPR, type,
11237 fold_convert (type, arg0),
11243 /* Check for a built-in function. */
11244 if (TREE_CODE (op0) == ADDR_EXPR
11245 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11246 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11247 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11250 case BIT_FIELD_REF:
11251 if (TREE_CODE (arg0) == VECTOR_CST
11252 && type == TREE_TYPE (TREE_TYPE (arg0))
11253 && host_integerp (arg1, 1)
11254 && host_integerp (op2, 1))
11256 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11257 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11260 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11261 && (idx % width) == 0
11262 && (idx = idx / width)
11263 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11265 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11266 while (idx-- > 0 && elements)
11267 elements = TREE_CHAIN (elements);
11269 return TREE_VALUE (elements);
11271 return fold_convert (type, integer_zero_node);
11278 } /* switch (code) */
11281 /* Perform constant folding and related simplification of EXPR.
11282 The related simplifications include x*1 => x, x*0 => 0, etc.,
11283 and application of the associative law.
11284 NOP_EXPR conversions may be removed freely (as long as we
11285 are careful not to change the type of the overall expression).
11286 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11287 but we can constant-fold them if they have constant operands. */
11289 #ifdef ENABLE_FOLD_CHECKING
11290 # define fold(x) fold_1 (x)
11291 static tree fold_1 (tree);
11297 const tree t = expr;
11298 enum tree_code code = TREE_CODE (t);
11299 enum tree_code_class kind = TREE_CODE_CLASS (code);
11302 /* Return right away if a constant. */
11303 if (kind == tcc_constant)
11306 if (IS_EXPR_CODE_CLASS (kind))
11308 tree type = TREE_TYPE (t);
11309 tree op0, op1, op2;
11311 switch (TREE_CODE_LENGTH (code))
11314 op0 = TREE_OPERAND (t, 0);
11315 tem = fold_unary (code, type, op0);
11316 return tem ? tem : expr;
11318 op0 = TREE_OPERAND (t, 0);
11319 op1 = TREE_OPERAND (t, 1);
11320 tem = fold_binary (code, type, op0, op1);
11321 return tem ? tem : expr;
11323 op0 = TREE_OPERAND (t, 0);
11324 op1 = TREE_OPERAND (t, 1);
11325 op2 = TREE_OPERAND (t, 2);
11326 tem = fold_ternary (code, type, op0, op1, op2);
11327 return tem ? tem : expr;
11336 return fold (DECL_INITIAL (t));
11340 } /* switch (code) */
11343 #ifdef ENABLE_FOLD_CHECKING
11346 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11347 static void fold_check_failed (tree, tree);
11348 void print_fold_checksum (tree);
11350 /* When --enable-checking=fold, compute a digest of expr before
11351 and after actual fold call to see if fold did not accidentally
11352 change original expr. */
11358 struct md5_ctx ctx;
11359 unsigned char checksum_before[16], checksum_after[16];
11362 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11363 md5_init_ctx (&ctx);
11364 fold_checksum_tree (expr, &ctx, ht);
11365 md5_finish_ctx (&ctx, checksum_before);
11368 ret = fold_1 (expr);
11370 md5_init_ctx (&ctx);
11371 fold_checksum_tree (expr, &ctx, ht);
11372 md5_finish_ctx (&ctx, checksum_after);
11375 if (memcmp (checksum_before, checksum_after, 16))
11376 fold_check_failed (expr, ret);
11382 print_fold_checksum (tree expr)
11384 struct md5_ctx ctx;
11385 unsigned char checksum[16], cnt;
11388 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11389 md5_init_ctx (&ctx);
11390 fold_checksum_tree (expr, &ctx, ht);
11391 md5_finish_ctx (&ctx, checksum);
11393 for (cnt = 0; cnt < 16; ++cnt)
11394 fprintf (stderr, "%02x", checksum[cnt]);
11395 putc ('\n', stderr);
11399 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11401 internal_error ("fold check: original tree changed by fold");
11405 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11408 enum tree_code code;
11409 struct tree_function_decl buf;
11414 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11415 <= sizeof (struct tree_function_decl))
11416 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11419 slot = htab_find_slot (ht, expr, INSERT);
11423 code = TREE_CODE (expr);
11424 if (TREE_CODE_CLASS (code) == tcc_declaration
11425 && DECL_ASSEMBLER_NAME_SET_P (expr))
11427 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11428 memcpy ((char *) &buf, expr, tree_size (expr));
11429 expr = (tree) &buf;
11430 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11432 else if (TREE_CODE_CLASS (code) == tcc_type
11433 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11434 || TYPE_CACHED_VALUES_P (expr)
11435 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11437 /* Allow these fields to be modified. */
11438 memcpy ((char *) &buf, expr, tree_size (expr));
11439 expr = (tree) &buf;
11440 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11441 TYPE_POINTER_TO (expr) = NULL;
11442 TYPE_REFERENCE_TO (expr) = NULL;
11443 if (TYPE_CACHED_VALUES_P (expr))
11445 TYPE_CACHED_VALUES_P (expr) = 0;
11446 TYPE_CACHED_VALUES (expr) = NULL;
11449 md5_process_bytes (expr, tree_size (expr), ctx);
11450 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11451 if (TREE_CODE_CLASS (code) != tcc_type
11452 && TREE_CODE_CLASS (code) != tcc_declaration
11453 && code != TREE_LIST)
11454 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11455 switch (TREE_CODE_CLASS (code))
11461 md5_process_bytes (TREE_STRING_POINTER (expr),
11462 TREE_STRING_LENGTH (expr), ctx);
11465 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11466 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11469 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11475 case tcc_exceptional:
11479 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11480 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11481 expr = TREE_CHAIN (expr);
11482 goto recursive_label;
11485 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11486 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11492 case tcc_expression:
11493 case tcc_reference:
11494 case tcc_comparison:
11497 case tcc_statement:
11498 len = TREE_CODE_LENGTH (code);
11499 for (i = 0; i < len; ++i)
11500 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11502 case tcc_declaration:
11503 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11504 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11505 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11507 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11508 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11509 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11510 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11511 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11513 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11514 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11516 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11518 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11519 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11520 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11524 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11525 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11526 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11527 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11528 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11529 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11530 if (INTEGRAL_TYPE_P (expr)
11531 || SCALAR_FLOAT_TYPE_P (expr))
11533 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11534 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11536 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11537 if (TREE_CODE (expr) == RECORD_TYPE
11538 || TREE_CODE (expr) == UNION_TYPE
11539 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11540 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11541 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11550 /* Fold a unary tree expression with code CODE of type TYPE with an
11551 operand OP0. Return a folded expression if successful. Otherwise,
11552 return a tree expression with code CODE of type TYPE with an
11556 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11559 #ifdef ENABLE_FOLD_CHECKING
11560 unsigned char checksum_before[16], checksum_after[16];
11561 struct md5_ctx ctx;
11564 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11565 md5_init_ctx (&ctx);
11566 fold_checksum_tree (op0, &ctx, ht);
11567 md5_finish_ctx (&ctx, checksum_before);
11571 tem = fold_unary (code, type, op0);
11573 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11575 #ifdef ENABLE_FOLD_CHECKING
11576 md5_init_ctx (&ctx);
11577 fold_checksum_tree (op0, &ctx, ht);
11578 md5_finish_ctx (&ctx, checksum_after);
11581 if (memcmp (checksum_before, checksum_after, 16))
11582 fold_check_failed (op0, tem);
11587 /* Fold a binary tree expression with code CODE of type TYPE with
11588 operands OP0 and OP1. Return a folded expression if successful.
11589 Otherwise, return a tree expression with code CODE of type TYPE
11590 with operands OP0 and OP1. */
11593 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11597 #ifdef ENABLE_FOLD_CHECKING
11598 unsigned char checksum_before_op0[16],
11599 checksum_before_op1[16],
11600 checksum_after_op0[16],
11601 checksum_after_op1[16];
11602 struct md5_ctx ctx;
11605 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11606 md5_init_ctx (&ctx);
11607 fold_checksum_tree (op0, &ctx, ht);
11608 md5_finish_ctx (&ctx, checksum_before_op0);
11611 md5_init_ctx (&ctx);
11612 fold_checksum_tree (op1, &ctx, ht);
11613 md5_finish_ctx (&ctx, checksum_before_op1);
11617 tem = fold_binary (code, type, op0, op1);
11619 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11621 #ifdef ENABLE_FOLD_CHECKING
11622 md5_init_ctx (&ctx);
11623 fold_checksum_tree (op0, &ctx, ht);
11624 md5_finish_ctx (&ctx, checksum_after_op0);
11627 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11628 fold_check_failed (op0, tem);
11630 md5_init_ctx (&ctx);
11631 fold_checksum_tree (op1, &ctx, ht);
11632 md5_finish_ctx (&ctx, checksum_after_op1);
11635 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11636 fold_check_failed (op1, tem);
11641 /* Fold a ternary tree expression with code CODE of type TYPE with
11642 operands OP0, OP1, and OP2. Return a folded expression if
11643 successful. Otherwise, return a tree expression with code CODE of
11644 type TYPE with operands OP0, OP1, and OP2. */
11647 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11651 #ifdef ENABLE_FOLD_CHECKING
11652 unsigned char checksum_before_op0[16],
11653 checksum_before_op1[16],
11654 checksum_before_op2[16],
11655 checksum_after_op0[16],
11656 checksum_after_op1[16],
11657 checksum_after_op2[16];
11658 struct md5_ctx ctx;
11661 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11662 md5_init_ctx (&ctx);
11663 fold_checksum_tree (op0, &ctx, ht);
11664 md5_finish_ctx (&ctx, checksum_before_op0);
11667 md5_init_ctx (&ctx);
11668 fold_checksum_tree (op1, &ctx, ht);
11669 md5_finish_ctx (&ctx, checksum_before_op1);
11672 md5_init_ctx (&ctx);
11673 fold_checksum_tree (op2, &ctx, ht);
11674 md5_finish_ctx (&ctx, checksum_before_op2);
11678 tem = fold_ternary (code, type, op0, op1, op2);
11680 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11682 #ifdef ENABLE_FOLD_CHECKING
11683 md5_init_ctx (&ctx);
11684 fold_checksum_tree (op0, &ctx, ht);
11685 md5_finish_ctx (&ctx, checksum_after_op0);
11688 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11689 fold_check_failed (op0, tem);
11691 md5_init_ctx (&ctx);
11692 fold_checksum_tree (op1, &ctx, ht);
11693 md5_finish_ctx (&ctx, checksum_after_op1);
11696 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11697 fold_check_failed (op1, tem);
11699 md5_init_ctx (&ctx);
11700 fold_checksum_tree (op2, &ctx, ht);
11701 md5_finish_ctx (&ctx, checksum_after_op2);
11704 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11705 fold_check_failed (op2, tem);
11710 /* Perform constant folding and related simplification of initializer
11711 expression EXPR. These behave identically to "fold_buildN" but ignore
11712 potential run-time traps and exceptions that fold must preserve. */
11714 #define START_FOLD_INIT \
11715 int saved_signaling_nans = flag_signaling_nans;\
11716 int saved_trapping_math = flag_trapping_math;\
11717 int saved_rounding_math = flag_rounding_math;\
11718 int saved_trapv = flag_trapv;\
11719 int saved_folding_initializer = folding_initializer;\
11720 flag_signaling_nans = 0;\
11721 flag_trapping_math = 0;\
11722 flag_rounding_math = 0;\
11724 folding_initializer = 1;
11726 #define END_FOLD_INIT \
11727 flag_signaling_nans = saved_signaling_nans;\
11728 flag_trapping_math = saved_trapping_math;\
11729 flag_rounding_math = saved_rounding_math;\
11730 flag_trapv = saved_trapv;\
11731 folding_initializer = saved_folding_initializer;
11734 fold_build1_initializer (enum tree_code code, tree type, tree op)
11739 result = fold_build1 (code, type, op);
11746 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11751 result = fold_build2 (code, type, op0, op1);
11758 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11764 result = fold_build3 (code, type, op0, op1, op2);
11770 #undef START_FOLD_INIT
11771 #undef END_FOLD_INIT
11773 /* Determine if first argument is a multiple of second argument. Return 0 if
11774 it is not, or we cannot easily determined it to be.
11776 An example of the sort of thing we care about (at this point; this routine
11777 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11778 fold cases do now) is discovering that
11780 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11786 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11788 This code also handles discovering that
11790 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11792 is a multiple of 8 so we don't have to worry about dealing with a
11793 possible remainder.
11795 Note that we *look* inside a SAVE_EXPR only to determine how it was
11796 calculated; it is not safe for fold to do much of anything else with the
11797 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11798 at run time. For example, the latter example above *cannot* be implemented
11799 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11800 evaluation time of the original SAVE_EXPR is not necessarily the same at
11801 the time the new expression is evaluated. The only optimization of this
11802 sort that would be valid is changing
11804 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11808 SAVE_EXPR (I) * SAVE_EXPR (J)
11810 (where the same SAVE_EXPR (J) is used in the original and the
11811 transformed version). */
11814 multiple_of_p (tree type, tree top, tree bottom)
11816 if (operand_equal_p (top, bottom, 0))
11819 if (TREE_CODE (type) != INTEGER_TYPE)
11822 switch (TREE_CODE (top))
11825 /* Bitwise and provides a power of two multiple. If the mask is
11826 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11827 if (!integer_pow2p (bottom))
11832 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11833 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11837 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11838 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11841 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11845 op1 = TREE_OPERAND (top, 1);
11846 /* const_binop may not detect overflow correctly,
11847 so check for it explicitly here. */
11848 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11849 > TREE_INT_CST_LOW (op1)
11850 && TREE_INT_CST_HIGH (op1) == 0
11851 && 0 != (t1 = fold_convert (type,
11852 const_binop (LSHIFT_EXPR,
11855 && ! TREE_OVERFLOW (t1))
11856 return multiple_of_p (type, t1, bottom);
11861 /* Can't handle conversions from non-integral or wider integral type. */
11862 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11863 || (TYPE_PRECISION (type)
11864 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11867 /* .. fall through ... */
11870 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11873 if (TREE_CODE (bottom) != INTEGER_CST
11874 || (TYPE_UNSIGNED (type)
11875 && (tree_int_cst_sgn (top) < 0
11876 || tree_int_cst_sgn (bottom) < 0)))
11878 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11886 /* Return true if `t' is known to be non-negative. */
11889 tree_expr_nonnegative_p (tree t)
11891 if (t == error_mark_node)
11894 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11897 switch (TREE_CODE (t))
11900 /* Query VRP to see if it has recorded any information about
11901 the range of this object. */
11902 return ssa_name_nonnegative_p (t);
11905 /* We can't return 1 if flag_wrapv is set because
11906 ABS_EXPR<INT_MIN> = INT_MIN. */
11907 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11912 return tree_int_cst_sgn (t) >= 0;
11915 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11918 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11919 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11920 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11922 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11923 both unsigned and at least 2 bits shorter than the result. */
11924 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11925 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11926 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11928 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11929 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11930 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11931 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11933 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11934 TYPE_PRECISION (inner2)) + 1;
11935 return prec < TYPE_PRECISION (TREE_TYPE (t));
11941 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11943 /* x * x for floating point x is always non-negative. */
11944 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11946 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11947 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11950 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11951 both unsigned and their total bits is shorter than the result. */
11952 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11953 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11954 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11956 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11957 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11958 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11959 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11960 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11961 < TYPE_PRECISION (TREE_TYPE (t));
11967 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11968 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11974 case TRUNC_DIV_EXPR:
11975 case CEIL_DIV_EXPR:
11976 case FLOOR_DIV_EXPR:
11977 case ROUND_DIV_EXPR:
11978 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11979 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11981 case TRUNC_MOD_EXPR:
11982 case CEIL_MOD_EXPR:
11983 case FLOOR_MOD_EXPR:
11984 case ROUND_MOD_EXPR:
11986 case NON_LVALUE_EXPR:
11988 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11990 case COMPOUND_EXPR:
11992 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11995 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11998 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11999 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12003 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12004 tree outer_type = TREE_TYPE (t);
12006 if (TREE_CODE (outer_type) == REAL_TYPE)
12008 if (TREE_CODE (inner_type) == REAL_TYPE)
12009 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12010 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12012 if (TYPE_UNSIGNED (inner_type))
12014 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12017 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12019 if (TREE_CODE (inner_type) == REAL_TYPE)
12020 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12021 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12022 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12023 && TYPE_UNSIGNED (inner_type);
12030 tree temp = TARGET_EXPR_SLOT (t);
12031 t = TARGET_EXPR_INITIAL (t);
12033 /* If the initializer is non-void, then it's a normal expression
12034 that will be assigned to the slot. */
12035 if (!VOID_TYPE_P (t))
12036 return tree_expr_nonnegative_p (t);
12038 /* Otherwise, the initializer sets the slot in some way. One common
12039 way is an assignment statement at the end of the initializer. */
12042 if (TREE_CODE (t) == BIND_EXPR)
12043 t = expr_last (BIND_EXPR_BODY (t));
12044 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12045 || TREE_CODE (t) == TRY_CATCH_EXPR)
12046 t = expr_last (TREE_OPERAND (t, 0));
12047 else if (TREE_CODE (t) == STATEMENT_LIST)
12052 if (TREE_CODE (t) == MODIFY_EXPR
12053 && TREE_OPERAND (t, 0) == temp)
12054 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12061 tree fndecl = get_callee_fndecl (t);
12062 tree arglist = TREE_OPERAND (t, 1);
12063 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12064 switch (DECL_FUNCTION_CODE (fndecl))
12066 CASE_FLT_FN (BUILT_IN_ACOS):
12067 CASE_FLT_FN (BUILT_IN_ACOSH):
12068 CASE_FLT_FN (BUILT_IN_CABS):
12069 CASE_FLT_FN (BUILT_IN_COSH):
12070 CASE_FLT_FN (BUILT_IN_ERFC):
12071 CASE_FLT_FN (BUILT_IN_EXP):
12072 CASE_FLT_FN (BUILT_IN_EXP10):
12073 CASE_FLT_FN (BUILT_IN_EXP2):
12074 CASE_FLT_FN (BUILT_IN_FABS):
12075 CASE_FLT_FN (BUILT_IN_FDIM):
12076 CASE_FLT_FN (BUILT_IN_HYPOT):
12077 CASE_FLT_FN (BUILT_IN_POW10):
12078 CASE_INT_FN (BUILT_IN_FFS):
12079 CASE_INT_FN (BUILT_IN_PARITY):
12080 CASE_INT_FN (BUILT_IN_POPCOUNT):
12084 CASE_FLT_FN (BUILT_IN_SQRT):
12085 /* sqrt(-0.0) is -0.0. */
12086 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12088 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12090 CASE_FLT_FN (BUILT_IN_ASINH):
12091 CASE_FLT_FN (BUILT_IN_ATAN):
12092 CASE_FLT_FN (BUILT_IN_ATANH):
12093 CASE_FLT_FN (BUILT_IN_CBRT):
12094 CASE_FLT_FN (BUILT_IN_CEIL):
12095 CASE_FLT_FN (BUILT_IN_ERF):
12096 CASE_FLT_FN (BUILT_IN_EXPM1):
12097 CASE_FLT_FN (BUILT_IN_FLOOR):
12098 CASE_FLT_FN (BUILT_IN_FMOD):
12099 CASE_FLT_FN (BUILT_IN_FREXP):
12100 CASE_FLT_FN (BUILT_IN_LCEIL):
12101 CASE_FLT_FN (BUILT_IN_LDEXP):
12102 CASE_FLT_FN (BUILT_IN_LFLOOR):
12103 CASE_FLT_FN (BUILT_IN_LLCEIL):
12104 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12105 CASE_FLT_FN (BUILT_IN_LLRINT):
12106 CASE_FLT_FN (BUILT_IN_LLROUND):
12107 CASE_FLT_FN (BUILT_IN_LRINT):
12108 CASE_FLT_FN (BUILT_IN_LROUND):
12109 CASE_FLT_FN (BUILT_IN_MODF):
12110 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12111 CASE_FLT_FN (BUILT_IN_POW):
12112 CASE_FLT_FN (BUILT_IN_RINT):
12113 CASE_FLT_FN (BUILT_IN_ROUND):
12114 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12115 CASE_FLT_FN (BUILT_IN_SINH):
12116 CASE_FLT_FN (BUILT_IN_TANH):
12117 CASE_FLT_FN (BUILT_IN_TRUNC):
12118 /* True if the 1st argument is nonnegative. */
12119 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12121 CASE_FLT_FN (BUILT_IN_FMAX):
12122 /* True if the 1st OR 2nd arguments are nonnegative. */
12123 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12124 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12126 CASE_FLT_FN (BUILT_IN_FMIN):
12127 /* True if the 1st AND 2nd arguments are nonnegative. */
12128 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12129 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12131 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12132 /* True if the 2nd argument is nonnegative. */
12133 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12140 /* ... fall through ... */
12143 if (truth_value_p (TREE_CODE (t)))
12144 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12148 /* We don't know sign of `t', so be conservative and return false. */
12152 /* Return true when T is an address and is known to be nonzero.
12153 For floating point we further ensure that T is not denormal.
12154 Similar logic is present in nonzero_address in rtlanal.h. */
12157 tree_expr_nonzero_p (tree t)
12159 tree type = TREE_TYPE (t);
12161 /* Doing something useful for floating point would need more work. */
12162 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12165 switch (TREE_CODE (t))
12168 /* Query VRP to see if it has recorded any information about
12169 the range of this object. */
12170 return ssa_name_nonzero_p (t);
12173 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12176 /* We used to test for !integer_zerop here. This does not work correctly
12177 if TREE_CONSTANT_OVERFLOW (t). */
12178 return (TREE_INT_CST_LOW (t) != 0
12179 || TREE_INT_CST_HIGH (t) != 0);
12182 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12184 /* With the presence of negative values it is hard
12185 to say something. */
12186 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12187 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12189 /* One of operands must be positive and the other non-negative. */
12190 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12191 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12196 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12198 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12199 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12205 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12206 tree outer_type = TREE_TYPE (t);
12208 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12209 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12215 tree base = get_base_address (TREE_OPERAND (t, 0));
12220 /* Weak declarations may link to NULL. */
12221 if (VAR_OR_FUNCTION_DECL_P (base))
12222 return !DECL_WEAK (base);
12224 /* Constants are never weak. */
12225 if (CONSTANT_CLASS_P (base))
12232 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12233 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12236 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12237 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12240 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12242 /* When both operands are nonzero, then MAX must be too. */
12243 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12246 /* MAX where operand 0 is positive is positive. */
12247 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12249 /* MAX where operand 1 is positive is positive. */
12250 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12251 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12255 case COMPOUND_EXPR:
12258 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12261 case NON_LVALUE_EXPR:
12262 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12265 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12266 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12269 return alloca_call_p (t);
12277 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12278 attempt to fold the expression to a constant without modifying TYPE,
12281 If the expression could be simplified to a constant, then return
12282 the constant. If the expression would not be simplified to a
12283 constant, then return NULL_TREE. */
12286 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12288 tree tem = fold_binary (code, type, op0, op1);
12289 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12292 /* Given the components of a unary expression CODE, TYPE and OP0,
12293 attempt to fold the expression to a constant without modifying
12296 If the expression could be simplified to a constant, then return
12297 the constant. If the expression would not be simplified to a
12298 constant, then return NULL_TREE. */
12301 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12303 tree tem = fold_unary (code, type, op0);
12304 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12307 /* If EXP represents referencing an element in a constant string
12308 (either via pointer arithmetic or array indexing), return the
12309 tree representing the value accessed, otherwise return NULL. */
12312 fold_read_from_constant_string (tree exp)
12314 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12316 tree exp1 = TREE_OPERAND (exp, 0);
12320 if (TREE_CODE (exp) == INDIRECT_REF)
12321 string = string_constant (exp1, &index);
12324 tree low_bound = array_ref_low_bound (exp);
12325 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12327 /* Optimize the special-case of a zero lower bound.
12329 We convert the low_bound to sizetype to avoid some problems
12330 with constant folding. (E.g. suppose the lower bound is 1,
12331 and its mode is QI. Without the conversion,l (ARRAY
12332 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12333 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12334 if (! integer_zerop (low_bound))
12335 index = size_diffop (index, fold_convert (sizetype, low_bound));
12341 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12342 && TREE_CODE (string) == STRING_CST
12343 && TREE_CODE (index) == INTEGER_CST
12344 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12345 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12347 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12348 return fold_convert (TREE_TYPE (exp),
12349 build_int_cst (NULL_TREE,
12350 (TREE_STRING_POINTER (string)
12351 [TREE_INT_CST_LOW (index)])));
12356 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12357 an integer constant or real constant.
12359 TYPE is the type of the result. */
12362 fold_negate_const (tree arg0, tree type)
12364 tree t = NULL_TREE;
12366 switch (TREE_CODE (arg0))
12370 unsigned HOST_WIDE_INT low;
12371 HOST_WIDE_INT high;
12372 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12373 TREE_INT_CST_HIGH (arg0),
12375 t = build_int_cst_wide (type, low, high);
12376 t = force_fit_type (t, 1,
12377 (overflow | TREE_OVERFLOW (arg0))
12378 && !TYPE_UNSIGNED (type),
12379 TREE_CONSTANT_OVERFLOW (arg0));
12384 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12388 gcc_unreachable ();
12394 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12395 an integer constant or real constant.
12397 TYPE is the type of the result. */
12400 fold_abs_const (tree arg0, tree type)
12402 tree t = NULL_TREE;
12404 switch (TREE_CODE (arg0))
12407 /* If the value is unsigned, then the absolute value is
12408 the same as the ordinary value. */
12409 if (TYPE_UNSIGNED (type))
12411 /* Similarly, if the value is non-negative. */
12412 else if (INT_CST_LT (integer_minus_one_node, arg0))
12414 /* If the value is negative, then the absolute value is
12418 unsigned HOST_WIDE_INT low;
12419 HOST_WIDE_INT high;
12420 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12421 TREE_INT_CST_HIGH (arg0),
12423 t = build_int_cst_wide (type, low, high);
12424 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12425 TREE_CONSTANT_OVERFLOW (arg0));
12430 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12431 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12437 gcc_unreachable ();
12443 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12444 constant. TYPE is the type of the result. */
12447 fold_not_const (tree arg0, tree type)
12449 tree t = NULL_TREE;
12451 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12453 t = build_int_cst_wide (type,
12454 ~ TREE_INT_CST_LOW (arg0),
12455 ~ TREE_INT_CST_HIGH (arg0));
12456 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12457 TREE_CONSTANT_OVERFLOW (arg0));
12462 /* Given CODE, a relational operator, the target type, TYPE and two
12463 constant operands OP0 and OP1, return the result of the
12464 relational operation. If the result is not a compile time
12465 constant, then return NULL_TREE. */
12468 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12470 int result, invert;
12472 /* From here on, the only cases we handle are when the result is
12473 known to be a constant. */
12475 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12477 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12478 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12480 /* Handle the cases where either operand is a NaN. */
12481 if (real_isnan (c0) || real_isnan (c1))
12491 case UNORDERED_EXPR:
12505 if (flag_trapping_math)
12511 gcc_unreachable ();
12514 return constant_boolean_node (result, type);
12517 return constant_boolean_node (real_compare (code, c0, c1), type);
12520 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12522 To compute GT, swap the arguments and do LT.
12523 To compute GE, do LT and invert the result.
12524 To compute LE, swap the arguments, do LT and invert the result.
12525 To compute NE, do EQ and invert the result.
12527 Therefore, the code below must handle only EQ and LT. */
12529 if (code == LE_EXPR || code == GT_EXPR)
12534 code = swap_tree_comparison (code);
12537 /* Note that it is safe to invert for real values here because we
12538 have already handled the one case that it matters. */
12541 if (code == NE_EXPR || code == GE_EXPR)
12544 code = invert_tree_comparison (code, false);
12547 /* Compute a result for LT or EQ if args permit;
12548 Otherwise return T. */
12549 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12551 if (code == EQ_EXPR)
12552 result = tree_int_cst_equal (op0, op1);
12553 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12554 result = INT_CST_LT_UNSIGNED (op0, op1);
12556 result = INT_CST_LT (op0, op1);
12563 return constant_boolean_node (result, type);
12566 /* Build an expression for the a clean point containing EXPR with type TYPE.
12567 Don't build a cleanup point expression for EXPR which don't have side
12571 fold_build_cleanup_point_expr (tree type, tree expr)
12573 /* If the expression does not have side effects then we don't have to wrap
12574 it with a cleanup point expression. */
12575 if (!TREE_SIDE_EFFECTS (expr))
12578 /* If the expression is a return, check to see if the expression inside the
12579 return has no side effects or the right hand side of the modify expression
12580 inside the return. If either don't have side effects set we don't need to
12581 wrap the expression in a cleanup point expression. Note we don't check the
12582 left hand side of the modify because it should always be a return decl. */
12583 if (TREE_CODE (expr) == RETURN_EXPR)
12585 tree op = TREE_OPERAND (expr, 0);
12586 if (!op || !TREE_SIDE_EFFECTS (op))
12588 op = TREE_OPERAND (op, 1);
12589 if (!TREE_SIDE_EFFECTS (op))
12593 return build1 (CLEANUP_POINT_EXPR, type, expr);
12596 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12597 avoid confusing the gimplify process. */
12600 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12602 /* The size of the object is not relevant when talking about its address. */
12603 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12604 t = TREE_OPERAND (t, 0);
12606 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12607 if (TREE_CODE (t) == INDIRECT_REF
12608 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12610 t = TREE_OPERAND (t, 0);
12611 if (TREE_TYPE (t) != ptrtype)
12612 t = build1 (NOP_EXPR, ptrtype, t);
12618 while (handled_component_p (base))
12619 base = TREE_OPERAND (base, 0);
12621 TREE_ADDRESSABLE (base) = 1;
12623 t = build1 (ADDR_EXPR, ptrtype, t);
12630 build_fold_addr_expr (tree t)
12632 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12635 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12636 of an indirection through OP0, or NULL_TREE if no simplification is
12640 fold_indirect_ref_1 (tree type, tree op0)
12646 subtype = TREE_TYPE (sub);
12647 if (!POINTER_TYPE_P (subtype))
12650 if (TREE_CODE (sub) == ADDR_EXPR)
12652 tree op = TREE_OPERAND (sub, 0);
12653 tree optype = TREE_TYPE (op);
12654 /* *&p => p; make sure to handle *&"str"[cst] here. */
12655 if (type == optype)
12657 tree fop = fold_read_from_constant_string (op);
12663 /* *(foo *)&fooarray => fooarray[0] */
12664 else if (TREE_CODE (optype) == ARRAY_TYPE
12665 && type == TREE_TYPE (optype))
12667 tree type_domain = TYPE_DOMAIN (optype);
12668 tree min_val = size_zero_node;
12669 if (type_domain && TYPE_MIN_VALUE (type_domain))
12670 min_val = TYPE_MIN_VALUE (type_domain);
12671 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12673 /* *(foo *)&complexfoo => __real__ complexfoo */
12674 else if (TREE_CODE (optype) == COMPLEX_TYPE
12675 && type == TREE_TYPE (optype))
12676 return fold_build1 (REALPART_EXPR, type, op);
12679 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12680 if (TREE_CODE (sub) == PLUS_EXPR
12681 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12683 tree op00 = TREE_OPERAND (sub, 0);
12684 tree op01 = TREE_OPERAND (sub, 1);
12688 op00type = TREE_TYPE (op00);
12689 if (TREE_CODE (op00) == ADDR_EXPR
12690 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12691 && type == TREE_TYPE (TREE_TYPE (op00type)))
12693 tree size = TYPE_SIZE_UNIT (type);
12694 if (tree_int_cst_equal (size, op01))
12695 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12699 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12700 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12701 && type == TREE_TYPE (TREE_TYPE (subtype)))
12704 tree min_val = size_zero_node;
12705 sub = build_fold_indirect_ref (sub);
12706 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12707 if (type_domain && TYPE_MIN_VALUE (type_domain))
12708 min_val = TYPE_MIN_VALUE (type_domain);
12709 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12715 /* Builds an expression for an indirection through T, simplifying some
12719 build_fold_indirect_ref (tree t)
12721 tree type = TREE_TYPE (TREE_TYPE (t));
12722 tree sub = fold_indirect_ref_1 (type, t);
12727 return build1 (INDIRECT_REF, type, t);
12730 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12733 fold_indirect_ref (tree t)
12735 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12743 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12744 whose result is ignored. The type of the returned tree need not be
12745 the same as the original expression. */
12748 fold_ignored_result (tree t)
12750 if (!TREE_SIDE_EFFECTS (t))
12751 return integer_zero_node;
12754 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12757 t = TREE_OPERAND (t, 0);
12761 case tcc_comparison:
12762 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12763 t = TREE_OPERAND (t, 0);
12764 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12765 t = TREE_OPERAND (t, 1);
12770 case tcc_expression:
12771 switch (TREE_CODE (t))
12773 case COMPOUND_EXPR:
12774 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12776 t = TREE_OPERAND (t, 0);
12780 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12781 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12783 t = TREE_OPERAND (t, 0);
12796 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12797 This can only be applied to objects of a sizetype. */
12800 round_up (tree value, int divisor)
12802 tree div = NULL_TREE;
12804 gcc_assert (divisor > 0);
12808 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12809 have to do anything. Only do this when we are not given a const,
12810 because in that case, this check is more expensive than just
12812 if (TREE_CODE (value) != INTEGER_CST)
12814 div = build_int_cst (TREE_TYPE (value), divisor);
12816 if (multiple_of_p (TREE_TYPE (value), value, div))
12820 /* If divisor is a power of two, simplify this to bit manipulation. */
12821 if (divisor == (divisor & -divisor))
12825 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12826 value = size_binop (PLUS_EXPR, value, t);
12827 t = build_int_cst (TREE_TYPE (value), -divisor);
12828 value = size_binop (BIT_AND_EXPR, value, t);
12833 div = build_int_cst (TREE_TYPE (value), divisor);
12834 value = size_binop (CEIL_DIV_EXPR, value, div);
12835 value = size_binop (MULT_EXPR, value, div);
12841 /* Likewise, but round down. */
12844 round_down (tree value, int divisor)
12846 tree div = NULL_TREE;
12848 gcc_assert (divisor > 0);
12852 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12853 have to do anything. Only do this when we are not given a const,
12854 because in that case, this check is more expensive than just
12856 if (TREE_CODE (value) != INTEGER_CST)
12858 div = build_int_cst (TREE_TYPE (value), divisor);
12860 if (multiple_of_p (TREE_TYPE (value), value, div))
12864 /* If divisor is a power of two, simplify this to bit manipulation. */
12865 if (divisor == (divisor & -divisor))
12869 t = build_int_cst (TREE_TYPE (value), -divisor);
12870 value = size_binop (BIT_AND_EXPR, value, t);
12875 div = build_int_cst (TREE_TYPE (value), divisor);
12876 value = size_binop (FLOOR_DIV_EXPR, value, div);
12877 value = size_binop (MULT_EXPR, value, div);
12883 /* Returns the pointer to the base of the object addressed by EXP and
12884 extracts the information about the offset of the access, storing it
12885 to PBITPOS and POFFSET. */
12888 split_address_to_core_and_offset (tree exp,
12889 HOST_WIDE_INT *pbitpos, tree *poffset)
12892 enum machine_mode mode;
12893 int unsignedp, volatilep;
12894 HOST_WIDE_INT bitsize;
12896 if (TREE_CODE (exp) == ADDR_EXPR)
12898 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12899 poffset, &mode, &unsignedp, &volatilep,
12901 core = build_fold_addr_expr (core);
12907 *poffset = NULL_TREE;
12913 /* Returns true if addresses of E1 and E2 differ by a constant, false
12914 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12917 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12920 HOST_WIDE_INT bitpos1, bitpos2;
12921 tree toffset1, toffset2, tdiff, type;
12923 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12924 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12926 if (bitpos1 % BITS_PER_UNIT != 0
12927 || bitpos2 % BITS_PER_UNIT != 0
12928 || !operand_equal_p (core1, core2, 0))
12931 if (toffset1 && toffset2)
12933 type = TREE_TYPE (toffset1);
12934 if (type != TREE_TYPE (toffset2))
12935 toffset2 = fold_convert (type, toffset2);
12937 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12938 if (!cst_and_fits_in_hwi (tdiff))
12941 *diff = int_cst_value (tdiff);
12943 else if (toffset1 || toffset2)
12945 /* If only one of the offsets is non-constant, the difference cannot
12952 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12956 /* Simplify the floating point expression EXP when the sign of the
12957 result is not significant. Return NULL_TREE if no simplification
12961 fold_strip_sign_ops (tree exp)
12965 switch (TREE_CODE (exp))
12969 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12970 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12974 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12976 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12977 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12978 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12979 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12980 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12981 arg1 ? arg1 : TREE_OPERAND (exp, 1));