1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree range_predecessor (tree);
112 static tree range_successor (tree);
113 static tree make_range (tree, int *, tree *, tree *);
114 static tree build_range_check (tree, tree, int, tree, tree);
115 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
117 static tree fold_range_test (enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree fold_truthop (enum tree_code, tree, tree, tree);
121 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
124 static int multiple_of_p (tree, tree, tree);
125 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
128 static bool fold_real_zero_addition_p (tree, tree, int);
129 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
131 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (tree, tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static int native_encode_expr (tree, unsigned char *, int);
138 static tree native_interpret_expr (tree, unsigned char *, int);
141 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
142 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
143 and SUM1. Then this yields nonzero if overflow occurred during the
146 Overflow occurs if A and B have the same sign, but A and SUM differ in
147 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
149 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
151 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
152 We do that by representing the two-word integer in 4 words, with only
153 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
154 number. The value of the word is LOWPART + HIGHPART * BASE. */
157 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
158 #define HIGHPART(x) \
159 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
160 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
162 /* Unpack a two-word integer into 4 words.
163 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
164 WORDS points to the array of HOST_WIDE_INTs. */
167 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
169 words[0] = LOWPART (low);
170 words[1] = HIGHPART (low);
171 words[2] = LOWPART (hi);
172 words[3] = HIGHPART (hi);
175 /* Pack an array of 4 words into a two-word integer.
176 WORDS points to the array of words.
177 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
180 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
183 *low = words[0] + words[1] * BASE;
184 *hi = words[2] + words[3] * BASE;
187 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
188 in overflow of the value, when >0 we are only interested in signed
189 overflow, for <0 we are interested in any overflow. OVERFLOWED
190 indicates whether overflow has already occurred. CONST_OVERFLOWED
191 indicates whether constant overflow has already occurred. We force
192 T's value to be within range of T's type (by setting to 0 or 1 all
193 the bits outside the type's range). We set TREE_OVERFLOWED if,
194 OVERFLOWED is nonzero,
195 or OVERFLOWABLE is >0 and signed overflow occurs
196 or OVERFLOWABLE is <0 and any overflow occurs
197 We set TREE_CONSTANT_OVERFLOWED if,
198 CONST_OVERFLOWED is nonzero
199 or we set TREE_OVERFLOWED.
200 We return either the original T, or a copy. */
203 force_fit_type (tree t, int overflowable,
204 bool overflowed, bool overflowed_const)
206 unsigned HOST_WIDE_INT low;
209 int sign_extended_type;
211 gcc_assert (TREE_CODE (t) == INTEGER_CST);
213 low = TREE_INT_CST_LOW (t);
214 high = TREE_INT_CST_HIGH (t);
216 if (POINTER_TYPE_P (TREE_TYPE (t))
217 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
220 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* Size types *are* sign extended. */
222 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
223 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
226 /* First clear all bits that are beyond the type's precision. */
228 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
230 else if (prec > HOST_BITS_PER_WIDE_INT)
231 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
235 if (prec < HOST_BITS_PER_WIDE_INT)
236 low &= ~((HOST_WIDE_INT) (-1) << prec);
239 if (!sign_extended_type)
240 /* No sign extension */;
241 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
242 /* Correct width already. */;
243 else if (prec > HOST_BITS_PER_WIDE_INT)
245 /* Sign extend top half? */
246 if (high & ((unsigned HOST_WIDE_INT)1
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
250 else if (prec == HOST_BITS_PER_WIDE_INT)
252 if ((HOST_WIDE_INT)low < 0)
257 /* Sign extend bottom half? */
258 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
261 low |= (HOST_WIDE_INT)(-1) << prec;
265 /* If the value changed, return a new node. */
266 if (overflowed || overflowed_const
267 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
269 t = build_int_cst_wide (TREE_TYPE (t), low, high);
273 || (overflowable > 0 && sign_extended_type))
276 TREE_OVERFLOW (t) = 1;
277 TREE_CONSTANT_OVERFLOW (t) = 1;
279 else if (overflowed_const)
282 TREE_CONSTANT_OVERFLOW (t) = 1;
289 /* Add two doubleword integers with doubleword result.
290 Each argument is given as two `HOST_WIDE_INT' pieces.
291 One argument is L1 and H1; the other, L2 and H2.
292 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
295 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
296 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
297 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
299 unsigned HOST_WIDE_INT l;
303 h = h1 + h2 + (l < l1);
307 return OVERFLOW_SUM_SIGN (h1, h2, h);
310 /* Negate a doubleword integer with doubleword result.
311 Return nonzero if the operation overflows, assuming it's signed.
312 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
313 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
316 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
317 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
323 return (*hv & h1) < 0;
333 /* Multiply two doubleword integers with doubleword result.
334 Return nonzero if the operation overflows, assuming it's signed.
335 Each argument is given as two `HOST_WIDE_INT' pieces.
336 One argument is L1 and H1; the other, L2 and H2.
337 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
340 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
341 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
342 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
344 HOST_WIDE_INT arg1[4];
345 HOST_WIDE_INT arg2[4];
346 HOST_WIDE_INT prod[4 * 2];
347 unsigned HOST_WIDE_INT carry;
349 unsigned HOST_WIDE_INT toplow, neglow;
350 HOST_WIDE_INT tophigh, neghigh;
352 encode (arg1, l1, h1);
353 encode (arg2, l2, h2);
355 memset (prod, 0, sizeof prod);
357 for (i = 0; i < 4; i++)
360 for (j = 0; j < 4; j++)
363 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
364 carry += arg1[i] * arg2[j];
365 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
367 prod[k] = LOWPART (carry);
368 carry = HIGHPART (carry);
373 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
375 /* Check for overflow by calculating the top half of the answer in full;
376 it should agree with the low half's sign bit. */
377 decode (prod + 4, &toplow, &tophigh);
380 neg_double (l2, h2, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
385 neg_double (l1, h1, &neglow, &neghigh);
386 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
388 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
391 /* Shift the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Shift right if COUNT is negative.
394 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
395 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
398 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
399 HOST_WIDE_INT count, unsigned int prec,
400 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
402 unsigned HOST_WIDE_INT signmask;
406 rshift_double (l1, h1, -count, prec, lv, hv, arith);
410 if (SHIFT_COUNT_TRUNCATED)
413 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
415 /* Shifting by the host word size is undefined according to the
416 ANSI standard, so we must handle this as a special case. */
420 else if (count >= HOST_BITS_PER_WIDE_INT)
422 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
427 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
428 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
432 /* Sign extend all bits that are beyond the precision. */
434 signmask = -((prec > HOST_BITS_PER_WIDE_INT
435 ? ((unsigned HOST_WIDE_INT) *hv
436 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
437 : (*lv >> (prec - 1))) & 1);
439 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
441 else if (prec >= HOST_BITS_PER_WIDE_INT)
443 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
444 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
449 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
450 *lv |= signmask << prec;
454 /* Shift the doubleword integer in L1, H1 right by COUNT places
455 keeping only PREC bits of result. COUNT must be positive.
456 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
457 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
460 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
461 HOST_WIDE_INT count, unsigned int prec,
462 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
465 unsigned HOST_WIDE_INT signmask;
468 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
471 if (SHIFT_COUNT_TRUNCATED)
474 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
476 /* Shifting by the host word size is undefined according to the
477 ANSI standard, so we must handle this as a special case. */
481 else if (count >= HOST_BITS_PER_WIDE_INT)
484 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
488 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
490 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
493 /* Zero / sign extend all bits that are beyond the precision. */
495 if (count >= (HOST_WIDE_INT)prec)
500 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
502 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
504 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
505 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
510 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
511 *lv |= signmask << (prec - count);
515 /* Rotate the doubleword integer in L1, H1 left by COUNT places
516 keeping only PREC bits of result.
517 Rotate right if COUNT is negative.
518 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
521 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
522 HOST_WIDE_INT count, unsigned int prec,
523 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
525 unsigned HOST_WIDE_INT s1l, s2l;
526 HOST_WIDE_INT s1h, s2h;
532 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
533 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
538 /* Rotate the doubleword integer in L1, H1 left by COUNT places
539 keeping only PREC bits of result. COUNT must be positive.
540 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
543 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
544 HOST_WIDE_INT count, unsigned int prec,
545 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
547 unsigned HOST_WIDE_INT s1l, s2l;
548 HOST_WIDE_INT s1h, s2h;
554 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
555 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
560 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
561 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
562 CODE is a tree code for a kind of division, one of
563 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
565 It controls how the quotient is rounded to an integer.
566 Return nonzero if the operation overflows.
567 UNS nonzero says do unsigned division. */
570 div_and_round_double (enum tree_code code, int uns,
571 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
572 HOST_WIDE_INT hnum_orig,
573 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
574 HOST_WIDE_INT hden_orig,
575 unsigned HOST_WIDE_INT *lquo,
576 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
580 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
581 HOST_WIDE_INT den[4], quo[4];
583 unsigned HOST_WIDE_INT work;
584 unsigned HOST_WIDE_INT carry = 0;
585 unsigned HOST_WIDE_INT lnum = lnum_orig;
586 HOST_WIDE_INT hnum = hnum_orig;
587 unsigned HOST_WIDE_INT lden = lden_orig;
588 HOST_WIDE_INT hden = hden_orig;
591 if (hden == 0 && lden == 0)
592 overflow = 1, lden = 1;
594 /* Calculate quotient sign and convert operands to unsigned. */
600 /* (minimum integer) / (-1) is the only overflow case. */
601 if (neg_double (lnum, hnum, &lnum, &hnum)
602 && ((HOST_WIDE_INT) lden & hden) == -1)
608 neg_double (lden, hden, &lden, &hden);
612 if (hnum == 0 && hden == 0)
613 { /* single precision */
615 /* This unsigned division rounds toward zero. */
621 { /* trivial case: dividend < divisor */
622 /* hden != 0 already checked. */
629 memset (quo, 0, sizeof quo);
631 memset (num, 0, sizeof num); /* to zero 9th element */
632 memset (den, 0, sizeof den);
634 encode (num, lnum, hnum);
635 encode (den, lden, hden);
637 /* Special code for when the divisor < BASE. */
638 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
640 /* hnum != 0 already checked. */
641 for (i = 4 - 1; i >= 0; i--)
643 work = num[i] + carry * BASE;
644 quo[i] = work / lden;
650 /* Full double precision division,
651 with thanks to Don Knuth's "Seminumerical Algorithms". */
652 int num_hi_sig, den_hi_sig;
653 unsigned HOST_WIDE_INT quo_est, scale;
655 /* Find the highest nonzero divisor digit. */
656 for (i = 4 - 1;; i--)
663 /* Insure that the first digit of the divisor is at least BASE/2.
664 This is required by the quotient digit estimation algorithm. */
666 scale = BASE / (den[den_hi_sig] + 1);
668 { /* scale divisor and dividend */
670 for (i = 0; i <= 4 - 1; i++)
672 work = (num[i] * scale) + carry;
673 num[i] = LOWPART (work);
674 carry = HIGHPART (work);
679 for (i = 0; i <= 4 - 1; i++)
681 work = (den[i] * scale) + carry;
682 den[i] = LOWPART (work);
683 carry = HIGHPART (work);
684 if (den[i] != 0) den_hi_sig = i;
691 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
693 /* Guess the next quotient digit, quo_est, by dividing the first
694 two remaining dividend digits by the high order quotient digit.
695 quo_est is never low and is at most 2 high. */
696 unsigned HOST_WIDE_INT tmp;
698 num_hi_sig = i + den_hi_sig + 1;
699 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
700 if (num[num_hi_sig] != den[den_hi_sig])
701 quo_est = work / den[den_hi_sig];
705 /* Refine quo_est so it's usually correct, and at most one high. */
706 tmp = work - quo_est * den[den_hi_sig];
708 && (den[den_hi_sig - 1] * quo_est
709 > (tmp * BASE + num[num_hi_sig - 2])))
712 /* Try QUO_EST as the quotient digit, by multiplying the
713 divisor by QUO_EST and subtracting from the remaining dividend.
714 Keep in mind that QUO_EST is the I - 1st digit. */
717 for (j = 0; j <= den_hi_sig; j++)
719 work = quo_est * den[j] + carry;
720 carry = HIGHPART (work);
721 work = num[i + j] - LOWPART (work);
722 num[i + j] = LOWPART (work);
723 carry += HIGHPART (work) != 0;
726 /* If quo_est was high by one, then num[i] went negative and
727 we need to correct things. */
728 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
731 carry = 0; /* add divisor back in */
732 for (j = 0; j <= den_hi_sig; j++)
734 work = num[i + j] + den[j] + carry;
735 carry = HIGHPART (work);
736 num[i + j] = LOWPART (work);
739 num [num_hi_sig] += carry;
742 /* Store the quotient digit. */
747 decode (quo, lquo, hquo);
750 /* If result is negative, make it so. */
752 neg_double (*lquo, *hquo, lquo, hquo);
754 /* Compute trial remainder: rem = num - (quo * den) */
755 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
756 neg_double (*lrem, *hrem, lrem, hrem);
757 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
762 case TRUNC_MOD_EXPR: /* round toward zero */
763 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
767 case FLOOR_MOD_EXPR: /* round toward negative infinity */
768 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
771 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
779 case CEIL_MOD_EXPR: /* round toward positive infinity */
780 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
782 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 case ROUND_MOD_EXPR: /* round to closest integer */
792 unsigned HOST_WIDE_INT labs_rem = *lrem;
793 HOST_WIDE_INT habs_rem = *hrem;
794 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
795 HOST_WIDE_INT habs_den = hden, htwice;
797 /* Get absolute values. */
799 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
801 neg_double (lden, hden, &labs_den, &habs_den);
803 /* If (2 * abs (lrem) >= abs (lden)) */
804 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
805 labs_rem, habs_rem, <wice, &htwice);
807 if (((unsigned HOST_WIDE_INT) habs_den
808 < (unsigned HOST_WIDE_INT) htwice)
809 || (((unsigned HOST_WIDE_INT) habs_den
810 == (unsigned HOST_WIDE_INT) htwice)
811 && (labs_den < ltwice)))
815 add_double (*lquo, *hquo,
816 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
819 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
831 /* Compute true remainder: rem = num - (quo * den) */
832 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
833 neg_double (*lrem, *hrem, lrem, hrem);
834 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
838 /* If ARG2 divides ARG1 with zero remainder, carries out the division
839 of type CODE and returns the quotient.
840 Otherwise returns NULL_TREE. */
843 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
845 unsigned HOST_WIDE_INT int1l, int2l;
846 HOST_WIDE_INT int1h, int2h;
847 unsigned HOST_WIDE_INT quol, reml;
848 HOST_WIDE_INT quoh, remh;
849 tree type = TREE_TYPE (arg1);
850 int uns = TYPE_UNSIGNED (type);
852 int1l = TREE_INT_CST_LOW (arg1);
853 int1h = TREE_INT_CST_HIGH (arg1);
854 int2l = TREE_INT_CST_LOW (arg2);
855 int2h = TREE_INT_CST_HIGH (arg2);
857 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
858 &quol, &quoh, &reml, &remh);
859 if (remh != 0 || reml != 0)
862 return build_int_cst_wide (type, quol, quoh);
865 /* Return true if the built-in mathematical function specified by CODE
866 is odd, i.e. -f(x) == f(-x). */
869 negate_mathfn_p (enum built_in_function code)
873 CASE_FLT_FN (BUILT_IN_ASIN):
874 CASE_FLT_FN (BUILT_IN_ASINH):
875 CASE_FLT_FN (BUILT_IN_ATAN):
876 CASE_FLT_FN (BUILT_IN_ATANH):
877 CASE_FLT_FN (BUILT_IN_CBRT):
878 CASE_FLT_FN (BUILT_IN_SIN):
879 CASE_FLT_FN (BUILT_IN_SINH):
880 CASE_FLT_FN (BUILT_IN_TAN):
881 CASE_FLT_FN (BUILT_IN_TANH):
890 /* Check whether we may negate an integer constant T without causing
894 may_negate_without_overflow_p (tree t)
896 unsigned HOST_WIDE_INT val;
900 gcc_assert (TREE_CODE (t) == INTEGER_CST);
902 type = TREE_TYPE (t);
903 if (TYPE_UNSIGNED (type))
906 prec = TYPE_PRECISION (type);
907 if (prec > HOST_BITS_PER_WIDE_INT)
909 if (TREE_INT_CST_LOW (t) != 0)
911 prec -= HOST_BITS_PER_WIDE_INT;
912 val = TREE_INT_CST_HIGH (t);
915 val = TREE_INT_CST_LOW (t);
916 if (prec < HOST_BITS_PER_WIDE_INT)
917 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
918 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
921 /* Determine whether an expression T can be cheaply negated using
922 the function negate_expr. */
925 negate_expr_p (tree t)
932 type = TREE_TYPE (t);
935 switch (TREE_CODE (t))
938 if (TYPE_UNSIGNED (type) || ! flag_trapv)
941 /* Check that -CST will not overflow type. */
942 return may_negate_without_overflow_p (t);
944 return INTEGRAL_TYPE_P (type);
951 return negate_expr_p (TREE_REALPART (t))
952 && negate_expr_p (TREE_IMAGPART (t));
955 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
957 /* -(A + B) -> (-B) - A. */
958 if (negate_expr_p (TREE_OPERAND (t, 1))
959 && reorder_operands_p (TREE_OPERAND (t, 0),
960 TREE_OPERAND (t, 1)))
962 /* -(A + B) -> (-A) - B. */
963 return negate_expr_p (TREE_OPERAND (t, 0));
966 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
967 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
968 && reorder_operands_p (TREE_OPERAND (t, 0),
969 TREE_OPERAND (t, 1));
972 if (TYPE_UNSIGNED (TREE_TYPE (t)))
978 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
979 return negate_expr_p (TREE_OPERAND (t, 1))
980 || negate_expr_p (TREE_OPERAND (t, 0));
988 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
994 /* Negate -((double)float) as (double)(-float). */
995 if (TREE_CODE (type) == REAL_TYPE)
997 tree tem = strip_float_extensions (t);
999 return negate_expr_p (tem);
1004 /* Negate -f(x) as f(-x). */
1005 if (negate_mathfn_p (builtin_mathfn_code (t)))
1006 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1010 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1011 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1013 tree op1 = TREE_OPERAND (t, 1);
1014 if (TREE_INT_CST_HIGH (op1) == 0
1015 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1016 == TREE_INT_CST_LOW (op1))
1027 /* Given T, an expression, return the negation of T. Allow for T to be
1028 null, in which case return null. */
1031 negate_expr (tree t)
1039 type = TREE_TYPE (t);
1040 STRIP_SIGN_NOPS (t);
1042 switch (TREE_CODE (t))
1044 /* Convert - (~A) to A + 1. */
1046 if (INTEGRAL_TYPE_P (type))
1047 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1048 build_int_cst (type, 1));
1052 tem = fold_negate_const (t, type);
1053 if (! TREE_OVERFLOW (tem)
1054 || TYPE_UNSIGNED (type)
1060 tem = fold_negate_const (t, type);
1061 /* Two's complement FP formats, such as c4x, may overflow. */
1062 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1063 return fold_convert (type, tem);
1068 tree rpart = negate_expr (TREE_REALPART (t));
1069 tree ipart = negate_expr (TREE_IMAGPART (t));
1071 if ((TREE_CODE (rpart) == REAL_CST
1072 && TREE_CODE (ipart) == REAL_CST)
1073 || (TREE_CODE (rpart) == INTEGER_CST
1074 && TREE_CODE (ipart) == INTEGER_CST))
1075 return build_complex (type, rpart, ipart);
1080 return fold_convert (type, TREE_OPERAND (t, 0));
1083 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1085 /* -(A + B) -> (-B) - A. */
1086 if (negate_expr_p (TREE_OPERAND (t, 1))
1087 && reorder_operands_p (TREE_OPERAND (t, 0),
1088 TREE_OPERAND (t, 1)))
1090 tem = negate_expr (TREE_OPERAND (t, 1));
1091 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092 tem, TREE_OPERAND (t, 0));
1093 return fold_convert (type, tem);
1096 /* -(A + B) -> (-A) - B. */
1097 if (negate_expr_p (TREE_OPERAND (t, 0)))
1099 tem = negate_expr (TREE_OPERAND (t, 0));
1100 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1101 tem, TREE_OPERAND (t, 1));
1102 return fold_convert (type, tem);
1108 /* - (A - B) -> B - A */
1109 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1110 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1111 return fold_convert (type,
1112 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1113 TREE_OPERAND (t, 1),
1114 TREE_OPERAND (t, 0)));
1118 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1124 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1126 tem = TREE_OPERAND (t, 1);
1127 if (negate_expr_p (tem))
1128 return fold_convert (type,
1129 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1130 TREE_OPERAND (t, 0),
1131 negate_expr (tem)));
1132 tem = TREE_OPERAND (t, 0);
1133 if (negate_expr_p (tem))
1134 return fold_convert (type,
1135 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1137 TREE_OPERAND (t, 1)));
1141 case TRUNC_DIV_EXPR:
1142 case ROUND_DIV_EXPR:
1143 case FLOOR_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 if (!TYPE_UNSIGNED (TREE_TYPE (t)) && !flag_wrapv)
1148 tem = TREE_OPERAND (t, 1);
1149 if (negate_expr_p (tem))
1150 return fold_convert (type,
1151 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1152 TREE_OPERAND (t, 0),
1153 negate_expr (tem)));
1154 tem = TREE_OPERAND (t, 0);
1155 if (negate_expr_p (tem))
1156 return fold_convert (type,
1157 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1159 TREE_OPERAND (t, 1)));
1164 /* Convert -((double)float) into (double)(-float). */
1165 if (TREE_CODE (type) == REAL_TYPE)
1167 tem = strip_float_extensions (t);
1168 if (tem != t && negate_expr_p (tem))
1169 return fold_convert (type, negate_expr (tem));
1174 /* Negate -f(x) as f(-x). */
1175 if (negate_mathfn_p (builtin_mathfn_code (t))
1176 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1178 tree fndecl, arg, arglist;
1180 fndecl = get_callee_fndecl (t);
1181 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1182 arglist = build_tree_list (NULL_TREE, arg);
1183 return build_function_call_expr (fndecl, arglist);
1188 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1189 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1191 tree op1 = TREE_OPERAND (t, 1);
1192 if (TREE_INT_CST_HIGH (op1) == 0
1193 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1194 == TREE_INT_CST_LOW (op1))
1196 tree ntype = TYPE_UNSIGNED (type)
1197 ? lang_hooks.types.signed_type (type)
1198 : lang_hooks.types.unsigned_type (type);
1199 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1200 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1201 return fold_convert (type, temp);
1210 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1211 return fold_convert (type, tem);
1214 /* Split a tree IN into a constant, literal and variable parts that could be
1215 combined with CODE to make IN. "constant" means an expression with
1216 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1217 commutative arithmetic operation. Store the constant part into *CONP,
1218 the literal in *LITP and return the variable part. If a part isn't
1219 present, set it to null. If the tree does not decompose in this way,
1220 return the entire tree as the variable part and the other parts as null.
1222 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1223 case, we negate an operand that was subtracted. Except if it is a
1224 literal for which we use *MINUS_LITP instead.
1226 If NEGATE_P is true, we are negating all of IN, again except a literal
1227 for which we use *MINUS_LITP instead.
1229 If IN is itself a literal or constant, return it as appropriate.
1231 Note that we do not guarantee that any of the three values will be the
1232 same type as IN, but they will have the same signedness and mode. */
1235 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1236 tree *minus_litp, int negate_p)
1244 /* Strip any conversions that don't change the machine mode or signedness. */
1245 STRIP_SIGN_NOPS (in);
1247 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1249 else if (TREE_CODE (in) == code
1250 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1251 /* We can associate addition and subtraction together (even
1252 though the C standard doesn't say so) for integers because
1253 the value is not affected. For reals, the value might be
1254 affected, so we can't. */
1255 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1256 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1258 tree op0 = TREE_OPERAND (in, 0);
1259 tree op1 = TREE_OPERAND (in, 1);
1260 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1261 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1263 /* First see if either of the operands is a literal, then a constant. */
1264 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1265 *litp = op0, op0 = 0;
1266 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1267 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1269 if (op0 != 0 && TREE_CONSTANT (op0))
1270 *conp = op0, op0 = 0;
1271 else if (op1 != 0 && TREE_CONSTANT (op1))
1272 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1274 /* If we haven't dealt with either operand, this is not a case we can
1275 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1276 if (op0 != 0 && op1 != 0)
1281 var = op1, neg_var_p = neg1_p;
1283 /* Now do any needed negations. */
1285 *minus_litp = *litp, *litp = 0;
1287 *conp = negate_expr (*conp);
1289 var = negate_expr (var);
1291 else if (TREE_CONSTANT (in))
1299 *minus_litp = *litp, *litp = 0;
1300 else if (*minus_litp)
1301 *litp = *minus_litp, *minus_litp = 0;
1302 *conp = negate_expr (*conp);
1303 var = negate_expr (var);
1309 /* Re-associate trees split by the above function. T1 and T2 are either
1310 expressions to associate or null. Return the new expression, if any. If
1311 we build an operation, do it in TYPE and with CODE. */
1314 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1321 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1322 try to fold this since we will have infinite recursion. But do
1323 deal with any NEGATE_EXPRs. */
1324 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1325 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1327 if (code == PLUS_EXPR)
1329 if (TREE_CODE (t1) == NEGATE_EXPR)
1330 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1331 fold_convert (type, TREE_OPERAND (t1, 0)));
1332 else if (TREE_CODE (t2) == NEGATE_EXPR)
1333 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1334 fold_convert (type, TREE_OPERAND (t2, 0)));
1335 else if (integer_zerop (t2))
1336 return fold_convert (type, t1);
1338 else if (code == MINUS_EXPR)
1340 if (integer_zerop (t2))
1341 return fold_convert (type, t1);
1344 return build2 (code, type, fold_convert (type, t1),
1345 fold_convert (type, t2));
1348 return fold_build2 (code, type, fold_convert (type, t1),
1349 fold_convert (type, t2));
1352 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1353 to produce a new constant. Return NULL_TREE if we don't know how
1354 to evaluate CODE at compile-time.
1356 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1359 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1361 unsigned HOST_WIDE_INT int1l, int2l;
1362 HOST_WIDE_INT int1h, int2h;
1363 unsigned HOST_WIDE_INT low;
1365 unsigned HOST_WIDE_INT garbagel;
1366 HOST_WIDE_INT garbageh;
1368 tree type = TREE_TYPE (arg1);
1369 int uns = TYPE_UNSIGNED (type);
1371 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1374 int1l = TREE_INT_CST_LOW (arg1);
1375 int1h = TREE_INT_CST_HIGH (arg1);
1376 int2l = TREE_INT_CST_LOW (arg2);
1377 int2h = TREE_INT_CST_HIGH (arg2);
1382 low = int1l | int2l, hi = int1h | int2h;
1386 low = int1l ^ int2l, hi = int1h ^ int2h;
1390 low = int1l & int2l, hi = int1h & int2h;
1396 /* It's unclear from the C standard whether shifts can overflow.
1397 The following code ignores overflow; perhaps a C standard
1398 interpretation ruling is needed. */
1399 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1406 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1411 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1415 neg_double (int2l, int2h, &low, &hi);
1416 add_double (int1l, int1h, low, hi, &low, &hi);
1417 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1421 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1424 case TRUNC_DIV_EXPR:
1425 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1426 case EXACT_DIV_EXPR:
1427 /* This is a shortcut for a common special case. */
1428 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1429 && ! TREE_CONSTANT_OVERFLOW (arg1)
1430 && ! TREE_CONSTANT_OVERFLOW (arg2)
1431 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433 if (code == CEIL_DIV_EXPR)
1436 low = int1l / int2l, hi = 0;
1440 /* ... fall through ... */
1442 case ROUND_DIV_EXPR:
1443 if (int2h == 0 && int2l == 0)
1445 if (int2h == 0 && int2l == 1)
1447 low = int1l, hi = int1h;
1450 if (int1l == int2l && int1h == int2h
1451 && ! (int1l == 0 && int1h == 0))
1456 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1457 &low, &hi, &garbagel, &garbageh);
1460 case TRUNC_MOD_EXPR:
1461 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1462 /* This is a shortcut for a common special case. */
1463 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1464 && ! TREE_CONSTANT_OVERFLOW (arg1)
1465 && ! TREE_CONSTANT_OVERFLOW (arg2)
1466 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1468 if (code == CEIL_MOD_EXPR)
1470 low = int1l % int2l, hi = 0;
1474 /* ... fall through ... */
1476 case ROUND_MOD_EXPR:
1477 if (int2h == 0 && int2l == 0)
1479 overflow = div_and_round_double (code, uns,
1480 int1l, int1h, int2l, int2h,
1481 &garbagel, &garbageh, &low, &hi);
1487 low = (((unsigned HOST_WIDE_INT) int1h
1488 < (unsigned HOST_WIDE_INT) int2h)
1489 || (((unsigned HOST_WIDE_INT) int1h
1490 == (unsigned HOST_WIDE_INT) int2h)
1493 low = (int1h < int2h
1494 || (int1h == int2h && int1l < int2l));
1496 if (low == (code == MIN_EXPR))
1497 low = int1l, hi = int1h;
1499 low = int2l, hi = int2h;
1506 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1510 /* Propagate overflow flags ourselves. */
1511 if (((!uns || is_sizetype) && overflow)
1512 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1515 TREE_OVERFLOW (t) = 1;
1516 TREE_CONSTANT_OVERFLOW (t) = 1;
1518 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1521 TREE_CONSTANT_OVERFLOW (t) = 1;
1525 t = force_fit_type (t, 1,
1526 ((!uns || is_sizetype) && overflow)
1527 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1528 TREE_CONSTANT_OVERFLOW (arg1)
1529 | TREE_CONSTANT_OVERFLOW (arg2));
1534 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1535 constant. We assume ARG1 and ARG2 have the same data type, or at least
1536 are the same kind of constant and the same machine mode.
1538 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1541 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1546 if (TREE_CODE (arg1) == INTEGER_CST)
1547 return int_const_binop (code, arg1, arg2, notrunc);
1549 if (TREE_CODE (arg1) == REAL_CST)
1551 enum machine_mode mode;
1554 REAL_VALUE_TYPE value;
1555 REAL_VALUE_TYPE result;
1559 /* The following codes are handled by real_arithmetic. */
1574 d1 = TREE_REAL_CST (arg1);
1575 d2 = TREE_REAL_CST (arg2);
1577 type = TREE_TYPE (arg1);
1578 mode = TYPE_MODE (type);
1580 /* Don't perform operation if we honor signaling NaNs and
1581 either operand is a NaN. */
1582 if (HONOR_SNANS (mode)
1583 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1586 /* Don't perform operation if it would raise a division
1587 by zero exception. */
1588 if (code == RDIV_EXPR
1589 && REAL_VALUES_EQUAL (d2, dconst0)
1590 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1593 /* If either operand is a NaN, just return it. Otherwise, set up
1594 for floating-point trap; we return an overflow. */
1595 if (REAL_VALUE_ISNAN (d1))
1597 else if (REAL_VALUE_ISNAN (d2))
1600 inexact = real_arithmetic (&value, code, &d1, &d2);
1601 real_convert (&result, mode, &value);
1603 /* Don't constant fold this floating point operation if
1604 the result has overflowed and flag_trapping_math. */
1606 if (flag_trapping_math
1607 && MODE_HAS_INFINITIES (mode)
1608 && REAL_VALUE_ISINF (result)
1609 && !REAL_VALUE_ISINF (d1)
1610 && !REAL_VALUE_ISINF (d2))
1613 /* Don't constant fold this floating point operation if the
1614 result may dependent upon the run-time rounding mode and
1615 flag_rounding_math is set, or if GCC's software emulation
1616 is unable to accurately represent the result. */
1618 if ((flag_rounding_math
1619 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1620 && !flag_unsafe_math_optimizations))
1621 && (inexact || !real_identical (&result, &value)))
1624 t = build_real (type, result);
1626 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1627 TREE_CONSTANT_OVERFLOW (t)
1629 | TREE_CONSTANT_OVERFLOW (arg1)
1630 | TREE_CONSTANT_OVERFLOW (arg2);
1634 if (TREE_CODE (arg1) == COMPLEX_CST)
1636 tree type = TREE_TYPE (arg1);
1637 tree r1 = TREE_REALPART (arg1);
1638 tree i1 = TREE_IMAGPART (arg1);
1639 tree r2 = TREE_REALPART (arg2);
1640 tree i2 = TREE_IMAGPART (arg2);
1646 t = build_complex (type,
1647 const_binop (PLUS_EXPR, r1, r2, notrunc),
1648 const_binop (PLUS_EXPR, i1, i2, notrunc));
1652 t = build_complex (type,
1653 const_binop (MINUS_EXPR, r1, r2, notrunc),
1654 const_binop (MINUS_EXPR, i1, i2, notrunc));
1658 t = build_complex (type,
1659 const_binop (MINUS_EXPR,
1660 const_binop (MULT_EXPR,
1662 const_binop (MULT_EXPR,
1665 const_binop (PLUS_EXPR,
1666 const_binop (MULT_EXPR,
1668 const_binop (MULT_EXPR,
1675 tree t1, t2, real, imag;
1677 = const_binop (PLUS_EXPR,
1678 const_binop (MULT_EXPR, r2, r2, notrunc),
1679 const_binop (MULT_EXPR, i2, i2, notrunc),
1682 t1 = const_binop (PLUS_EXPR,
1683 const_binop (MULT_EXPR, r1, r2, notrunc),
1684 const_binop (MULT_EXPR, i1, i2, notrunc),
1686 t2 = const_binop (MINUS_EXPR,
1687 const_binop (MULT_EXPR, i1, r2, notrunc),
1688 const_binop (MULT_EXPR, r1, i2, notrunc),
1691 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1693 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1694 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1698 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1699 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1704 t = build_complex (type, real, imag);
1716 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1717 indicates which particular sizetype to create. */
1720 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1722 return build_int_cst (sizetype_tab[(int) kind], number);
1725 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1726 is a tree code. The type of the result is taken from the operands.
1727 Both must be the same type integer type and it must be a size type.
1728 If the operands are constant, so is the result. */
1731 size_binop (enum tree_code code, tree arg0, tree arg1)
1733 tree type = TREE_TYPE (arg0);
1735 if (arg0 == error_mark_node || arg1 == error_mark_node)
1736 return error_mark_node;
1738 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1739 && type == TREE_TYPE (arg1));
1741 /* Handle the special case of two integer constants faster. */
1742 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1744 /* And some specific cases even faster than that. */
1745 if (code == PLUS_EXPR && integer_zerop (arg0))
1747 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1748 && integer_zerop (arg1))
1750 else if (code == MULT_EXPR && integer_onep (arg0))
1753 /* Handle general case of two integer constants. */
1754 return int_const_binop (code, arg0, arg1, 0);
1757 return fold_build2 (code, type, arg0, arg1);
1760 /* Given two values, either both of sizetype or both of bitsizetype,
1761 compute the difference between the two values. Return the value
1762 in signed type corresponding to the type of the operands. */
1765 size_diffop (tree arg0, tree arg1)
1767 tree type = TREE_TYPE (arg0);
1770 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1771 && type == TREE_TYPE (arg1));
1773 /* If the type is already signed, just do the simple thing. */
1774 if (!TYPE_UNSIGNED (type))
1775 return size_binop (MINUS_EXPR, arg0, arg1);
1777 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1779 /* If either operand is not a constant, do the conversions to the signed
1780 type and subtract. The hardware will do the right thing with any
1781 overflow in the subtraction. */
1782 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1783 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1784 fold_convert (ctype, arg1));
1786 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1787 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1788 overflow) and negate (which can't either). Special-case a result
1789 of zero while we're here. */
1790 if (tree_int_cst_equal (arg0, arg1))
1791 return build_int_cst (ctype, 0);
1792 else if (tree_int_cst_lt (arg1, arg0))
1793 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1795 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1796 fold_convert (ctype, size_binop (MINUS_EXPR,
1800 /* A subroutine of fold_convert_const handling conversions of an
1801 INTEGER_CST to another integer type. */
1804 fold_convert_const_int_from_int (tree type, tree arg1)
1808 /* Given an integer constant, make new constant with new type,
1809 appropriately sign-extended or truncated. */
1810 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1811 TREE_INT_CST_HIGH (arg1));
1813 t = force_fit_type (t,
1814 /* Don't set the overflow when
1815 converting a pointer */
1816 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1817 (TREE_INT_CST_HIGH (arg1) < 0
1818 && (TYPE_UNSIGNED (type)
1819 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1820 | TREE_OVERFLOW (arg1),
1821 TREE_CONSTANT_OVERFLOW (arg1));
1826 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1827 to an integer type. */
1830 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1835 /* The following code implements the floating point to integer
1836 conversion rules required by the Java Language Specification,
1837 that IEEE NaNs are mapped to zero and values that overflow
1838 the target precision saturate, i.e. values greater than
1839 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1840 are mapped to INT_MIN. These semantics are allowed by the
1841 C and C++ standards that simply state that the behavior of
1842 FP-to-integer conversion is unspecified upon overflow. */
1844 HOST_WIDE_INT high, low;
1846 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1850 case FIX_TRUNC_EXPR:
1851 real_trunc (&r, VOIDmode, &x);
1855 real_ceil (&r, VOIDmode, &x);
1858 case FIX_FLOOR_EXPR:
1859 real_floor (&r, VOIDmode, &x);
1862 case FIX_ROUND_EXPR:
1863 real_round (&r, VOIDmode, &x);
1870 /* If R is NaN, return zero and show we have an overflow. */
1871 if (REAL_VALUE_ISNAN (r))
1878 /* See if R is less than the lower bound or greater than the
1883 tree lt = TYPE_MIN_VALUE (type);
1884 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1885 if (REAL_VALUES_LESS (r, l))
1888 high = TREE_INT_CST_HIGH (lt);
1889 low = TREE_INT_CST_LOW (lt);
1895 tree ut = TYPE_MAX_VALUE (type);
1898 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1899 if (REAL_VALUES_LESS (u, r))
1902 high = TREE_INT_CST_HIGH (ut);
1903 low = TREE_INT_CST_LOW (ut);
1909 REAL_VALUE_TO_INT (&low, &high, r);
1911 t = build_int_cst_wide (type, low, high);
1913 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1918 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1919 to another floating point type. */
1922 fold_convert_const_real_from_real (tree type, tree arg1)
1924 REAL_VALUE_TYPE value;
1927 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1928 t = build_real (type, value);
1930 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1931 TREE_CONSTANT_OVERFLOW (t)
1932 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1936 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1937 type TYPE. If no simplification can be done return NULL_TREE. */
1940 fold_convert_const (enum tree_code code, tree type, tree arg1)
1942 if (TREE_TYPE (arg1) == type)
1945 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1947 if (TREE_CODE (arg1) == INTEGER_CST)
1948 return fold_convert_const_int_from_int (type, arg1);
1949 else if (TREE_CODE (arg1) == REAL_CST)
1950 return fold_convert_const_int_from_real (code, type, arg1);
1952 else if (TREE_CODE (type) == REAL_TYPE)
1954 if (TREE_CODE (arg1) == INTEGER_CST)
1955 return build_real_from_int_cst (type, arg1);
1956 if (TREE_CODE (arg1) == REAL_CST)
1957 return fold_convert_const_real_from_real (type, arg1);
1962 /* Construct a vector of zero elements of vector type TYPE. */
1965 build_zero_vector (tree type)
1970 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1971 units = TYPE_VECTOR_SUBPARTS (type);
1974 for (i = 0; i < units; i++)
1975 list = tree_cons (NULL_TREE, elem, list);
1976 return build_vector (type, list);
1979 /* Convert expression ARG to type TYPE. Used by the middle-end for
1980 simple conversions in preference to calling the front-end's convert. */
1983 fold_convert (tree type, tree arg)
1985 tree orig = TREE_TYPE (arg);
1991 if (TREE_CODE (arg) == ERROR_MARK
1992 || TREE_CODE (type) == ERROR_MARK
1993 || TREE_CODE (orig) == ERROR_MARK)
1994 return error_mark_node;
1996 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1997 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1998 TYPE_MAIN_VARIANT (orig)))
1999 return fold_build1 (NOP_EXPR, type, arg);
2001 switch (TREE_CODE (type))
2003 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2004 case POINTER_TYPE: case REFERENCE_TYPE:
2006 if (TREE_CODE (arg) == INTEGER_CST)
2008 tem = fold_convert_const (NOP_EXPR, type, arg);
2009 if (tem != NULL_TREE)
2012 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2013 || TREE_CODE (orig) == OFFSET_TYPE)
2014 return fold_build1 (NOP_EXPR, type, arg);
2015 if (TREE_CODE (orig) == COMPLEX_TYPE)
2017 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2018 return fold_convert (type, tem);
2020 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2021 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2022 return fold_build1 (NOP_EXPR, type, arg);
2025 if (TREE_CODE (arg) == INTEGER_CST)
2027 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2028 if (tem != NULL_TREE)
2031 else if (TREE_CODE (arg) == REAL_CST)
2033 tem = fold_convert_const (NOP_EXPR, type, arg);
2034 if (tem != NULL_TREE)
2038 switch (TREE_CODE (orig))
2041 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2042 case POINTER_TYPE: case REFERENCE_TYPE:
2043 return fold_build1 (FLOAT_EXPR, type, arg);
2046 return fold_build1 (NOP_EXPR, type, arg);
2049 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2050 return fold_convert (type, tem);
2057 switch (TREE_CODE (orig))
2060 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2061 case POINTER_TYPE: case REFERENCE_TYPE:
2063 return build2 (COMPLEX_EXPR, type,
2064 fold_convert (TREE_TYPE (type), arg),
2065 fold_convert (TREE_TYPE (type), integer_zero_node));
2070 if (TREE_CODE (arg) == COMPLEX_EXPR)
2072 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2073 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2074 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2077 arg = save_expr (arg);
2078 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2079 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2080 rpart = fold_convert (TREE_TYPE (type), rpart);
2081 ipart = fold_convert (TREE_TYPE (type), ipart);
2082 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2090 if (integer_zerop (arg))
2091 return build_zero_vector (type);
2092 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2093 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2094 || TREE_CODE (orig) == VECTOR_TYPE);
2095 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2098 return fold_build1 (NOP_EXPR, type, fold_ignored_result (arg));
2105 /* Return false if expr can be assumed not to be an lvalue, true
2109 maybe_lvalue_p (tree x)
2111 /* We only need to wrap lvalue tree codes. */
2112 switch (TREE_CODE (x))
2123 case ALIGN_INDIRECT_REF:
2124 case MISALIGNED_INDIRECT_REF:
2126 case ARRAY_RANGE_REF:
2132 case PREINCREMENT_EXPR:
2133 case PREDECREMENT_EXPR:
2135 case TRY_CATCH_EXPR:
2136 case WITH_CLEANUP_EXPR:
2147 /* Assume the worst for front-end tree codes. */
2148 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2156 /* Return an expr equal to X but certainly not valid as an lvalue. */
2161 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2166 if (! maybe_lvalue_p (x))
2168 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2171 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2172 Zero means allow extended lvalues. */
2174 int pedantic_lvalues;
2176 /* When pedantic, return an expr equal to X but certainly not valid as a
2177 pedantic lvalue. Otherwise, return X. */
2180 pedantic_non_lvalue (tree x)
2182 if (pedantic_lvalues)
2183 return non_lvalue (x);
2188 /* Given a tree comparison code, return the code that is the logical inverse
2189 of the given code. It is not safe to do this for floating-point
2190 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2191 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2194 invert_tree_comparison (enum tree_code code, bool honor_nans)
2196 if (honor_nans && flag_trapping_math)
2206 return honor_nans ? UNLE_EXPR : LE_EXPR;
2208 return honor_nans ? UNLT_EXPR : LT_EXPR;
2210 return honor_nans ? UNGE_EXPR : GE_EXPR;
2212 return honor_nans ? UNGT_EXPR : GT_EXPR;
2226 return UNORDERED_EXPR;
2227 case UNORDERED_EXPR:
2228 return ORDERED_EXPR;
2234 /* Similar, but return the comparison that results if the operands are
2235 swapped. This is safe for floating-point. */
2238 swap_tree_comparison (enum tree_code code)
2245 case UNORDERED_EXPR:
2271 /* Convert a comparison tree code from an enum tree_code representation
2272 into a compcode bit-based encoding. This function is the inverse of
2273 compcode_to_comparison. */
2275 static enum comparison_code
2276 comparison_to_compcode (enum tree_code code)
2293 return COMPCODE_ORD;
2294 case UNORDERED_EXPR:
2295 return COMPCODE_UNORD;
2297 return COMPCODE_UNLT;
2299 return COMPCODE_UNEQ;
2301 return COMPCODE_UNLE;
2303 return COMPCODE_UNGT;
2305 return COMPCODE_LTGT;
2307 return COMPCODE_UNGE;
2313 /* Convert a compcode bit-based encoding of a comparison operator back
2314 to GCC's enum tree_code representation. This function is the
2315 inverse of comparison_to_compcode. */
2317 static enum tree_code
2318 compcode_to_comparison (enum comparison_code code)
2335 return ORDERED_EXPR;
2336 case COMPCODE_UNORD:
2337 return UNORDERED_EXPR;
2355 /* Return a tree for the comparison which is the combination of
2356 doing the AND or OR (depending on CODE) of the two operations LCODE
2357 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2358 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2359 if this makes the transformation invalid. */
2362 combine_comparisons (enum tree_code code, enum tree_code lcode,
2363 enum tree_code rcode, tree truth_type,
2364 tree ll_arg, tree lr_arg)
2366 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2367 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2368 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2369 enum comparison_code compcode;
2373 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2374 compcode = lcompcode & rcompcode;
2377 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2378 compcode = lcompcode | rcompcode;
2387 /* Eliminate unordered comparisons, as well as LTGT and ORD
2388 which are not used unless the mode has NaNs. */
2389 compcode &= ~COMPCODE_UNORD;
2390 if (compcode == COMPCODE_LTGT)
2391 compcode = COMPCODE_NE;
2392 else if (compcode == COMPCODE_ORD)
2393 compcode = COMPCODE_TRUE;
2395 else if (flag_trapping_math)
2397 /* Check that the original operation and the optimized ones will trap
2398 under the same condition. */
2399 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2400 && (lcompcode != COMPCODE_EQ)
2401 && (lcompcode != COMPCODE_ORD);
2402 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2403 && (rcompcode != COMPCODE_EQ)
2404 && (rcompcode != COMPCODE_ORD);
2405 bool trap = (compcode & COMPCODE_UNORD) == 0
2406 && (compcode != COMPCODE_EQ)
2407 && (compcode != COMPCODE_ORD);
2409 /* In a short-circuited boolean expression the LHS might be
2410 such that the RHS, if evaluated, will never trap. For
2411 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2412 if neither x nor y is NaN. (This is a mixed blessing: for
2413 example, the expression above will never trap, hence
2414 optimizing it to x < y would be invalid). */
2415 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2416 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2419 /* If the comparison was short-circuited, and only the RHS
2420 trapped, we may now generate a spurious trap. */
2422 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2425 /* If we changed the conditions that cause a trap, we lose. */
2426 if ((ltrap || rtrap) != trap)
2430 if (compcode == COMPCODE_TRUE)
2431 return constant_boolean_node (true, truth_type);
2432 else if (compcode == COMPCODE_FALSE)
2433 return constant_boolean_node (false, truth_type);
2435 return fold_build2 (compcode_to_comparison (compcode),
2436 truth_type, ll_arg, lr_arg);
2439 /* Return nonzero if CODE is a tree code that represents a truth value. */
2442 truth_value_p (enum tree_code code)
2444 return (TREE_CODE_CLASS (code) == tcc_comparison
2445 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2446 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2447 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2450 /* Return nonzero if two operands (typically of the same tree node)
2451 are necessarily equal. If either argument has side-effects this
2452 function returns zero. FLAGS modifies behavior as follows:
2454 If OEP_ONLY_CONST is set, only return nonzero for constants.
2455 This function tests whether the operands are indistinguishable;
2456 it does not test whether they are equal using C's == operation.
2457 The distinction is important for IEEE floating point, because
2458 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2459 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2461 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2462 even though it may hold multiple values during a function.
2463 This is because a GCC tree node guarantees that nothing else is
2464 executed between the evaluation of its "operands" (which may often
2465 be evaluated in arbitrary order). Hence if the operands themselves
2466 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2467 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2468 unset means assuming isochronic (or instantaneous) tree equivalence.
2469 Unless comparing arbitrary expression trees, such as from different
2470 statements, this flag can usually be left unset.
2472 If OEP_PURE_SAME is set, then pure functions with identical arguments
2473 are considered the same. It is used when the caller has other ways
2474 to ensure that global memory is unchanged in between. */
2477 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2479 /* If either is ERROR_MARK, they aren't equal. */
2480 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2483 /* If both types don't have the same signedness, then we can't consider
2484 them equal. We must check this before the STRIP_NOPS calls
2485 because they may change the signedness of the arguments. */
2486 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2492 /* In case both args are comparisons but with different comparison
2493 code, try to swap the comparison operands of one arg to produce
2494 a match and compare that variant. */
2495 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2496 && COMPARISON_CLASS_P (arg0)
2497 && COMPARISON_CLASS_P (arg1))
2499 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2501 if (TREE_CODE (arg0) == swap_code)
2502 return operand_equal_p (TREE_OPERAND (arg0, 0),
2503 TREE_OPERAND (arg1, 1), flags)
2504 && operand_equal_p (TREE_OPERAND (arg0, 1),
2505 TREE_OPERAND (arg1, 0), flags);
2508 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below. */
2523 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2524 && (TREE_CODE (arg0) == SAVE_EXPR
2525 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2528 /* Next handle constant cases, those for which we can return 1 even
2529 if ONLY_CONST is set. */
2530 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2531 switch (TREE_CODE (arg0))
2534 return (! TREE_CONSTANT_OVERFLOW (arg0)
2535 && ! TREE_CONSTANT_OVERFLOW (arg1)
2536 && tree_int_cst_equal (arg0, arg1));
2539 return (! TREE_CONSTANT_OVERFLOW (arg0)
2540 && ! TREE_CONSTANT_OVERFLOW (arg1)
2541 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2542 TREE_REAL_CST (arg1)));
2548 if (TREE_CONSTANT_OVERFLOW (arg0)
2549 || TREE_CONSTANT_OVERFLOW (arg1))
2552 v1 = TREE_VECTOR_CST_ELTS (arg0);
2553 v2 = TREE_VECTOR_CST_ELTS (arg1);
2556 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2559 v1 = TREE_CHAIN (v1);
2560 v2 = TREE_CHAIN (v2);
2567 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2569 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2573 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2574 && ! memcmp (TREE_STRING_POINTER (arg0),
2575 TREE_STRING_POINTER (arg1),
2576 TREE_STRING_LENGTH (arg0)));
2579 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2585 if (flags & OEP_ONLY_CONST)
2588 /* Define macros to test an operand from arg0 and arg1 for equality and a
2589 variant that allows null and views null as being different from any
2590 non-null value. In the latter case, if either is null, the both
2591 must be; otherwise, do the normal comparison. */
2592 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2593 TREE_OPERAND (arg1, N), flags)
2595 #define OP_SAME_WITH_NULL(N) \
2596 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2597 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2599 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2602 /* Two conversions are equal only if signedness and modes match. */
2603 switch (TREE_CODE (arg0))
2608 case FIX_TRUNC_EXPR:
2609 case FIX_FLOOR_EXPR:
2610 case FIX_ROUND_EXPR:
2611 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2612 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2622 case tcc_comparison:
2624 if (OP_SAME (0) && OP_SAME (1))
2627 /* For commutative ops, allow the other order. */
2628 return (commutative_tree_code (TREE_CODE (arg0))
2629 && operand_equal_p (TREE_OPERAND (arg0, 0),
2630 TREE_OPERAND (arg1, 1), flags)
2631 && operand_equal_p (TREE_OPERAND (arg0, 1),
2632 TREE_OPERAND (arg1, 0), flags));
2635 /* If either of the pointer (or reference) expressions we are
2636 dereferencing contain a side effect, these cannot be equal. */
2637 if (TREE_SIDE_EFFECTS (arg0)
2638 || TREE_SIDE_EFFECTS (arg1))
2641 switch (TREE_CODE (arg0))
2644 case ALIGN_INDIRECT_REF:
2645 case MISALIGNED_INDIRECT_REF:
2651 case ARRAY_RANGE_REF:
2652 /* Operands 2 and 3 may be null. */
2655 && OP_SAME_WITH_NULL (2)
2656 && OP_SAME_WITH_NULL (3));
2659 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2660 may be NULL when we're called to compare MEM_EXPRs. */
2661 return OP_SAME_WITH_NULL (0)
2663 && OP_SAME_WITH_NULL (2);
2666 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2672 case tcc_expression:
2673 switch (TREE_CODE (arg0))
2676 case TRUTH_NOT_EXPR:
2679 case TRUTH_ANDIF_EXPR:
2680 case TRUTH_ORIF_EXPR:
2681 return OP_SAME (0) && OP_SAME (1);
2683 case TRUTH_AND_EXPR:
2685 case TRUTH_XOR_EXPR:
2686 if (OP_SAME (0) && OP_SAME (1))
2689 /* Otherwise take into account this is a commutative operation. */
2690 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2691 TREE_OPERAND (arg1, 1), flags)
2692 && operand_equal_p (TREE_OPERAND (arg0, 1),
2693 TREE_OPERAND (arg1, 0), flags));
2696 /* If the CALL_EXPRs call different functions, then they
2697 clearly can not be equal. */
2702 unsigned int cef = call_expr_flags (arg0);
2703 if (flags & OEP_PURE_SAME)
2704 cef &= ECF_CONST | ECF_PURE;
2711 /* Now see if all the arguments are the same. operand_equal_p
2712 does not handle TREE_LIST, so we walk the operands here
2713 feeding them to operand_equal_p. */
2714 arg0 = TREE_OPERAND (arg0, 1);
2715 arg1 = TREE_OPERAND (arg1, 1);
2716 while (arg0 && arg1)
2718 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2722 arg0 = TREE_CHAIN (arg0);
2723 arg1 = TREE_CHAIN (arg1);
2726 /* If we get here and both argument lists are exhausted
2727 then the CALL_EXPRs are equal. */
2728 return ! (arg0 || arg1);
2734 case tcc_declaration:
2735 /* Consider __builtin_sqrt equal to sqrt. */
2736 return (TREE_CODE (arg0) == FUNCTION_DECL
2737 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2738 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2739 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2746 #undef OP_SAME_WITH_NULL
2749 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2750 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2752 When in doubt, return 0. */
2755 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2757 int unsignedp1, unsignedpo;
2758 tree primarg0, primarg1, primother;
2759 unsigned int correct_width;
2761 if (operand_equal_p (arg0, arg1, 0))
2764 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2765 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2768 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2769 and see if the inner values are the same. This removes any
2770 signedness comparison, which doesn't matter here. */
2771 primarg0 = arg0, primarg1 = arg1;
2772 STRIP_NOPS (primarg0);
2773 STRIP_NOPS (primarg1);
2774 if (operand_equal_p (primarg0, primarg1, 0))
2777 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2778 actual comparison operand, ARG0.
2780 First throw away any conversions to wider types
2781 already present in the operands. */
2783 primarg1 = get_narrower (arg1, &unsignedp1);
2784 primother = get_narrower (other, &unsignedpo);
2786 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2787 if (unsignedp1 == unsignedpo
2788 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2789 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2791 tree type = TREE_TYPE (arg0);
2793 /* Make sure shorter operand is extended the right way
2794 to match the longer operand. */
2795 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2796 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2798 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2805 /* See if ARG is an expression that is either a comparison or is performing
2806 arithmetic on comparisons. The comparisons must only be comparing
2807 two different values, which will be stored in *CVAL1 and *CVAL2; if
2808 they are nonzero it means that some operands have already been found.
2809 No variables may be used anywhere else in the expression except in the
2810 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2811 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2813 If this is true, return 1. Otherwise, return zero. */
2816 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2818 enum tree_code code = TREE_CODE (arg);
2819 enum tree_code_class class = TREE_CODE_CLASS (code);
2821 /* We can handle some of the tcc_expression cases here. */
2822 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2824 else if (class == tcc_expression
2825 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2826 || code == COMPOUND_EXPR))
2829 else if (class == tcc_expression && code == SAVE_EXPR
2830 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2832 /* If we've already found a CVAL1 or CVAL2, this expression is
2833 two complex to handle. */
2834 if (*cval1 || *cval2)
2844 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2847 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2848 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2849 cval1, cval2, save_p));
2854 case tcc_expression:
2855 if (code == COND_EXPR)
2856 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2857 cval1, cval2, save_p)
2858 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2859 cval1, cval2, save_p)
2860 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2861 cval1, cval2, save_p));
2864 case tcc_comparison:
2865 /* First see if we can handle the first operand, then the second. For
2866 the second operand, we know *CVAL1 can't be zero. It must be that
2867 one side of the comparison is each of the values; test for the
2868 case where this isn't true by failing if the two operands
2871 if (operand_equal_p (TREE_OPERAND (arg, 0),
2872 TREE_OPERAND (arg, 1), 0))
2876 *cval1 = TREE_OPERAND (arg, 0);
2877 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2879 else if (*cval2 == 0)
2880 *cval2 = TREE_OPERAND (arg, 0);
2881 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2886 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2888 else if (*cval2 == 0)
2889 *cval2 = TREE_OPERAND (arg, 1);
2890 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2902 /* ARG is a tree that is known to contain just arithmetic operations and
2903 comparisons. Evaluate the operations in the tree substituting NEW0 for
2904 any occurrence of OLD0 as an operand of a comparison and likewise for
2908 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2910 tree type = TREE_TYPE (arg);
2911 enum tree_code code = TREE_CODE (arg);
2912 enum tree_code_class class = TREE_CODE_CLASS (code);
2914 /* We can handle some of the tcc_expression cases here. */
2915 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2917 else if (class == tcc_expression
2918 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2924 return fold_build1 (code, type,
2925 eval_subst (TREE_OPERAND (arg, 0),
2926 old0, new0, old1, new1));
2929 return fold_build2 (code, type,
2930 eval_subst (TREE_OPERAND (arg, 0),
2931 old0, new0, old1, new1),
2932 eval_subst (TREE_OPERAND (arg, 1),
2933 old0, new0, old1, new1));
2935 case tcc_expression:
2939 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2942 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2945 return fold_build3 (code, type,
2946 eval_subst (TREE_OPERAND (arg, 0),
2947 old0, new0, old1, new1),
2948 eval_subst (TREE_OPERAND (arg, 1),
2949 old0, new0, old1, new1),
2950 eval_subst (TREE_OPERAND (arg, 2),
2951 old0, new0, old1, new1));
2955 /* Fall through - ??? */
2957 case tcc_comparison:
2959 tree arg0 = TREE_OPERAND (arg, 0);
2960 tree arg1 = TREE_OPERAND (arg, 1);
2962 /* We need to check both for exact equality and tree equality. The
2963 former will be true if the operand has a side-effect. In that
2964 case, we know the operand occurred exactly once. */
2966 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2968 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2971 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2973 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2976 return fold_build2 (code, type, arg0, arg1);
2984 /* Return a tree for the case when the result of an expression is RESULT
2985 converted to TYPE and OMITTED was previously an operand of the expression
2986 but is now not needed (e.g., we folded OMITTED * 0).
2988 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2989 the conversion of RESULT to TYPE. */
2992 omit_one_operand (tree type, tree result, tree omitted)
2994 tree t = fold_convert (type, result);
2996 if (TREE_SIDE_EFFECTS (omitted))
2997 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2999 return non_lvalue (t);
3002 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3005 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3007 tree t = fold_convert (type, result);
3009 if (TREE_SIDE_EFFECTS (omitted))
3010 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3012 return pedantic_non_lvalue (t);
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3017 of the expression but are now not needed.
3019 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3020 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3021 evaluated before OMITTED2. Otherwise, if neither has side effects,
3022 just do the conversion of RESULT to TYPE. */
3025 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3027 tree t = fold_convert (type, result);
3029 if (TREE_SIDE_EFFECTS (omitted2))
3030 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3031 if (TREE_SIDE_EFFECTS (omitted1))
3032 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3034 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3038 /* Return a simplified tree node for the truth-negation of ARG. This
3039 never alters ARG itself. We assume that ARG is an operation that
3040 returns a truth value (0 or 1).
3042 FIXME: one would think we would fold the result, but it causes
3043 problems with the dominator optimizer. */
3045 invert_truthvalue (tree arg)
3047 tree type = TREE_TYPE (arg);
3048 enum tree_code code = TREE_CODE (arg);
3050 if (code == ERROR_MARK)
3053 /* If this is a comparison, we can simply invert it, except for
3054 floating-point non-equality comparisons, in which case we just
3055 enclose a TRUTH_NOT_EXPR around what we have. */
3057 if (TREE_CODE_CLASS (code) == tcc_comparison)
3059 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3060 if (FLOAT_TYPE_P (op_type)
3061 && flag_trapping_math
3062 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3063 && code != NE_EXPR && code != EQ_EXPR)
3064 return build1 (TRUTH_NOT_EXPR, type, arg);
3067 code = invert_tree_comparison (code,
3068 HONOR_NANS (TYPE_MODE (op_type)));
3069 if (code == ERROR_MARK)
3070 return build1 (TRUTH_NOT_EXPR, type, arg);
3072 return build2 (code, type,
3073 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3080 return constant_boolean_node (integer_zerop (arg), type);
3082 case TRUTH_AND_EXPR:
3083 return build2 (TRUTH_OR_EXPR, type,
3084 invert_truthvalue (TREE_OPERAND (arg, 0)),
3085 invert_truthvalue (TREE_OPERAND (arg, 1)));
3088 return build2 (TRUTH_AND_EXPR, type,
3089 invert_truthvalue (TREE_OPERAND (arg, 0)),
3090 invert_truthvalue (TREE_OPERAND (arg, 1)));
3092 case TRUTH_XOR_EXPR:
3093 /* Here we can invert either operand. We invert the first operand
3094 unless the second operand is a TRUTH_NOT_EXPR in which case our
3095 result is the XOR of the first operand with the inside of the
3096 negation of the second operand. */
3098 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3099 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3100 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3102 return build2 (TRUTH_XOR_EXPR, type,
3103 invert_truthvalue (TREE_OPERAND (arg, 0)),
3104 TREE_OPERAND (arg, 1));
3106 case TRUTH_ANDIF_EXPR:
3107 return build2 (TRUTH_ORIF_EXPR, type,
3108 invert_truthvalue (TREE_OPERAND (arg, 0)),
3109 invert_truthvalue (TREE_OPERAND (arg, 1)));
3111 case TRUTH_ORIF_EXPR:
3112 return build2 (TRUTH_ANDIF_EXPR, type,
3113 invert_truthvalue (TREE_OPERAND (arg, 0)),
3114 invert_truthvalue (TREE_OPERAND (arg, 1)));
3116 case TRUTH_NOT_EXPR:
3117 return TREE_OPERAND (arg, 0);
3121 tree arg1 = TREE_OPERAND (arg, 1);
3122 tree arg2 = TREE_OPERAND (arg, 2);
3123 /* A COND_EXPR may have a throw as one operand, which
3124 then has void type. Just leave void operands
3126 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3127 VOID_TYPE_P (TREE_TYPE (arg1))
3128 ? arg1 : invert_truthvalue (arg1),
3129 VOID_TYPE_P (TREE_TYPE (arg2))
3130 ? arg2 : invert_truthvalue (arg2));
3134 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3135 invert_truthvalue (TREE_OPERAND (arg, 1)));
3137 case NON_LVALUE_EXPR:
3138 return invert_truthvalue (TREE_OPERAND (arg, 0));
3141 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3146 return build1 (TREE_CODE (arg), type,
3147 invert_truthvalue (TREE_OPERAND (arg, 0)));
3150 if (!integer_onep (TREE_OPERAND (arg, 1)))
3152 return build2 (EQ_EXPR, type, arg,
3153 build_int_cst (type, 0));
3156 return build1 (TRUTH_NOT_EXPR, type, arg);
3158 case CLEANUP_POINT_EXPR:
3159 return build1 (CLEANUP_POINT_EXPR, type,
3160 invert_truthvalue (TREE_OPERAND (arg, 0)));
3165 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3166 return build1 (TRUTH_NOT_EXPR, type, arg);
3169 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3170 operands are another bit-wise operation with a common input. If so,
3171 distribute the bit operations to save an operation and possibly two if
3172 constants are involved. For example, convert
3173 (A | B) & (A | C) into A | (B & C)
3174 Further simplification will occur if B and C are constants.
3176 If this optimization cannot be done, 0 will be returned. */
3179 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3184 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3185 || TREE_CODE (arg0) == code
3186 || (TREE_CODE (arg0) != BIT_AND_EXPR
3187 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3190 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3192 common = TREE_OPERAND (arg0, 0);
3193 left = TREE_OPERAND (arg0, 1);
3194 right = TREE_OPERAND (arg1, 1);
3196 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3198 common = TREE_OPERAND (arg0, 0);
3199 left = TREE_OPERAND (arg0, 1);
3200 right = TREE_OPERAND (arg1, 0);
3202 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3204 common = TREE_OPERAND (arg0, 1);
3205 left = TREE_OPERAND (arg0, 0);
3206 right = TREE_OPERAND (arg1, 1);
3208 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3210 common = TREE_OPERAND (arg0, 1);
3211 left = TREE_OPERAND (arg0, 0);
3212 right = TREE_OPERAND (arg1, 0);
3217 return fold_build2 (TREE_CODE (arg0), type, common,
3218 fold_build2 (code, type, left, right));
3221 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3222 with code CODE. This optimization is unsafe. */
3224 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3226 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3227 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3229 /* (A / C) +- (B / C) -> (A +- B) / C. */
3231 && operand_equal_p (TREE_OPERAND (arg0, 1),
3232 TREE_OPERAND (arg1, 1), 0))
3233 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3234 fold_build2 (code, type,
3235 TREE_OPERAND (arg0, 0),
3236 TREE_OPERAND (arg1, 0)),
3237 TREE_OPERAND (arg0, 1));
3239 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3240 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3241 TREE_OPERAND (arg1, 0), 0)
3242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3243 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3245 REAL_VALUE_TYPE r0, r1;
3246 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3247 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3249 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3251 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3252 real_arithmetic (&r0, code, &r0, &r1);
3253 return fold_build2 (MULT_EXPR, type,
3254 TREE_OPERAND (arg0, 0),
3255 build_real (type, r0));
3261 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3262 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3265 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3272 tree size = TYPE_SIZE (TREE_TYPE (inner));
3273 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3274 || POINTER_TYPE_P (TREE_TYPE (inner)))
3275 && host_integerp (size, 0)
3276 && tree_low_cst (size, 0) == bitsize)
3277 return fold_convert (type, inner);
3280 result = build3 (BIT_FIELD_REF, type, inner,
3281 size_int (bitsize), bitsize_int (bitpos));
3283 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3288 /* Optimize a bit-field compare.
3290 There are two cases: First is a compare against a constant and the
3291 second is a comparison of two items where the fields are at the same
3292 bit position relative to the start of a chunk (byte, halfword, word)
3293 large enough to contain it. In these cases we can avoid the shift
3294 implicit in bitfield extractions.
3296 For constants, we emit a compare of the shifted constant with the
3297 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3298 compared. For two fields at the same position, we do the ANDs with the
3299 similar mask and compare the result of the ANDs.
3301 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3302 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3303 are the left and right operands of the comparison, respectively.
3305 If the optimization described above can be done, we return the resulting
3306 tree. Otherwise we return zero. */
3309 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3312 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3313 tree type = TREE_TYPE (lhs);
3314 tree signed_type, unsigned_type;
3315 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3316 enum machine_mode lmode, rmode, nmode;
3317 int lunsignedp, runsignedp;
3318 int lvolatilep = 0, rvolatilep = 0;
3319 tree linner, rinner = NULL_TREE;
3323 /* Get all the information about the extractions being done. If the bit size
3324 if the same as the size of the underlying object, we aren't doing an
3325 extraction at all and so can do nothing. We also don't want to
3326 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3327 then will no longer be able to replace it. */
3328 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3329 &lunsignedp, &lvolatilep, false);
3330 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3331 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3336 /* If this is not a constant, we can only do something if bit positions,
3337 sizes, and signedness are the same. */
3338 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3339 &runsignedp, &rvolatilep, false);
3341 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3342 || lunsignedp != runsignedp || offset != 0
3343 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3347 /* See if we can find a mode to refer to this field. We should be able to,
3348 but fail if we can't. */
3349 nmode = get_best_mode (lbitsize, lbitpos,
3350 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3351 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3352 TYPE_ALIGN (TREE_TYPE (rinner))),
3353 word_mode, lvolatilep || rvolatilep);
3354 if (nmode == VOIDmode)
3357 /* Set signed and unsigned types of the precision of this mode for the
3359 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3360 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3362 /* Compute the bit position and size for the new reference and our offset
3363 within it. If the new reference is the same size as the original, we
3364 won't optimize anything, so return zero. */
3365 nbitsize = GET_MODE_BITSIZE (nmode);
3366 nbitpos = lbitpos & ~ (nbitsize - 1);
3368 if (nbitsize == lbitsize)
3371 if (BYTES_BIG_ENDIAN)
3372 lbitpos = nbitsize - lbitsize - lbitpos;
3374 /* Make the mask to be used against the extracted field. */
3375 mask = build_int_cst (unsigned_type, -1);
3376 mask = force_fit_type (mask, 0, false, false);
3377 mask = fold_convert (unsigned_type, mask);
3378 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3379 mask = const_binop (RSHIFT_EXPR, mask,
3380 size_int (nbitsize - lbitsize - lbitpos), 0);
3383 /* If not comparing with constant, just rework the comparison
3385 return build2 (code, compare_type,
3386 build2 (BIT_AND_EXPR, unsigned_type,
3387 make_bit_field_ref (linner, unsigned_type,
3388 nbitsize, nbitpos, 1),
3390 build2 (BIT_AND_EXPR, unsigned_type,
3391 make_bit_field_ref (rinner, unsigned_type,
3392 nbitsize, nbitpos, 1),
3395 /* Otherwise, we are handling the constant case. See if the constant is too
3396 big for the field. Warn and return a tree of for 0 (false) if so. We do
3397 this not only for its own sake, but to avoid having to test for this
3398 error case below. If we didn't, we might generate wrong code.
3400 For unsigned fields, the constant shifted right by the field length should
3401 be all zero. For signed fields, the high-order bits should agree with
3406 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3407 fold_convert (unsigned_type, rhs),
3408 size_int (lbitsize), 0)))
3410 warning (0, "comparison is always %d due to width of bit-field",
3412 return constant_boolean_node (code == NE_EXPR, compare_type);
3417 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3418 size_int (lbitsize - 1), 0);
3419 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3421 warning (0, "comparison is always %d due to width of bit-field",
3423 return constant_boolean_node (code == NE_EXPR, compare_type);
3427 /* Single-bit compares should always be against zero. */
3428 if (lbitsize == 1 && ! integer_zerop (rhs))
3430 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3431 rhs = build_int_cst (type, 0);
3434 /* Make a new bitfield reference, shift the constant over the
3435 appropriate number of bits and mask it with the computed mask
3436 (in case this was a signed field). If we changed it, make a new one. */
3437 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3440 TREE_SIDE_EFFECTS (lhs) = 1;
3441 TREE_THIS_VOLATILE (lhs) = 1;
3444 rhs = const_binop (BIT_AND_EXPR,
3445 const_binop (LSHIFT_EXPR,
3446 fold_convert (unsigned_type, rhs),
3447 size_int (lbitpos), 0),
3450 return build2 (code, compare_type,
3451 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3455 /* Subroutine for fold_truthop: decode a field reference.
3457 If EXP is a comparison reference, we return the innermost reference.
3459 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3460 set to the starting bit number.
3462 If the innermost field can be completely contained in a mode-sized
3463 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3465 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3466 otherwise it is not changed.
3468 *PUNSIGNEDP is set to the signedness of the field.
3470 *PMASK is set to the mask used. This is either contained in a
3471 BIT_AND_EXPR or derived from the width of the field.
3473 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3475 Return 0 if this is not a component reference or is one that we can't
3476 do anything with. */
3479 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3480 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3481 int *punsignedp, int *pvolatilep,
3482 tree *pmask, tree *pand_mask)
3484 tree outer_type = 0;
3486 tree mask, inner, offset;
3488 unsigned int precision;
3490 /* All the optimizations using this function assume integer fields.
3491 There are problems with FP fields since the type_for_size call
3492 below can fail for, e.g., XFmode. */
3493 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3496 /* We are interested in the bare arrangement of bits, so strip everything
3497 that doesn't affect the machine mode. However, record the type of the
3498 outermost expression if it may matter below. */
3499 if (TREE_CODE (exp) == NOP_EXPR
3500 || TREE_CODE (exp) == CONVERT_EXPR
3501 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3502 outer_type = TREE_TYPE (exp);
3505 if (TREE_CODE (exp) == BIT_AND_EXPR)
3507 and_mask = TREE_OPERAND (exp, 1);
3508 exp = TREE_OPERAND (exp, 0);
3509 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3510 if (TREE_CODE (and_mask) != INTEGER_CST)
3514 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3515 punsignedp, pvolatilep, false);
3516 if ((inner == exp && and_mask == 0)
3517 || *pbitsize < 0 || offset != 0
3518 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3521 /* If the number of bits in the reference is the same as the bitsize of
3522 the outer type, then the outer type gives the signedness. Otherwise
3523 (in case of a small bitfield) the signedness is unchanged. */
3524 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3525 *punsignedp = TYPE_UNSIGNED (outer_type);
3527 /* Compute the mask to access the bitfield. */
3528 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3529 precision = TYPE_PRECISION (unsigned_type);
3531 mask = build_int_cst (unsigned_type, -1);
3532 mask = force_fit_type (mask, 0, false, false);
3534 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3535 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3537 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3539 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3540 fold_convert (unsigned_type, and_mask), mask);
3543 *pand_mask = and_mask;
3547 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3551 all_ones_mask_p (tree mask, int size)
3553 tree type = TREE_TYPE (mask);
3554 unsigned int precision = TYPE_PRECISION (type);
3557 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3558 tmask = force_fit_type (tmask, 0, false, false);
3561 tree_int_cst_equal (mask,
3562 const_binop (RSHIFT_EXPR,
3563 const_binop (LSHIFT_EXPR, tmask,
3564 size_int (precision - size),
3566 size_int (precision - size), 0));
3569 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3570 represents the sign bit of EXP's type. If EXP represents a sign
3571 or zero extension, also test VAL against the unextended type.
3572 The return value is the (sub)expression whose sign bit is VAL,
3573 or NULL_TREE otherwise. */
3576 sign_bit_p (tree exp, tree val)
3578 unsigned HOST_WIDE_INT mask_lo, lo;
3579 HOST_WIDE_INT mask_hi, hi;
3583 /* Tree EXP must have an integral type. */
3584 t = TREE_TYPE (exp);
3585 if (! INTEGRAL_TYPE_P (t))
3588 /* Tree VAL must be an integer constant. */
3589 if (TREE_CODE (val) != INTEGER_CST
3590 || TREE_CONSTANT_OVERFLOW (val))
3593 width = TYPE_PRECISION (t);
3594 if (width > HOST_BITS_PER_WIDE_INT)
3596 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3599 mask_hi = ((unsigned HOST_WIDE_INT) -1
3600 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3606 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3609 mask_lo = ((unsigned HOST_WIDE_INT) -1
3610 >> (HOST_BITS_PER_WIDE_INT - width));
3613 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3614 treat VAL as if it were unsigned. */
3615 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3616 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3619 /* Handle extension from a narrower type. */
3620 if (TREE_CODE (exp) == NOP_EXPR
3621 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3622 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3627 /* Subroutine for fold_truthop: determine if an operand is simple enough
3628 to be evaluated unconditionally. */
3631 simple_operand_p (tree exp)
3633 /* Strip any conversions that don't change the machine mode. */
3636 return (CONSTANT_CLASS_P (exp)
3637 || TREE_CODE (exp) == SSA_NAME
3639 && ! TREE_ADDRESSABLE (exp)
3640 && ! TREE_THIS_VOLATILE (exp)
3641 && ! DECL_NONLOCAL (exp)
3642 /* Don't regard global variables as simple. They may be
3643 allocated in ways unknown to the compiler (shared memory,
3644 #pragma weak, etc). */
3645 && ! TREE_PUBLIC (exp)
3646 && ! DECL_EXTERNAL (exp)
3647 /* Loading a static variable is unduly expensive, but global
3648 registers aren't expensive. */
3649 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3652 /* The following functions are subroutines to fold_range_test and allow it to
3653 try to change a logical combination of comparisons into a range test.
3656 X == 2 || X == 3 || X == 4 || X == 5
3660 (unsigned) (X - 2) <= 3
3662 We describe each set of comparisons as being either inside or outside
3663 a range, using a variable named like IN_P, and then describe the
3664 range with a lower and upper bound. If one of the bounds is omitted,
3665 it represents either the highest or lowest value of the type.
3667 In the comments below, we represent a range by two numbers in brackets
3668 preceded by a "+" to designate being inside that range, or a "-" to
3669 designate being outside that range, so the condition can be inverted by
3670 flipping the prefix. An omitted bound is represented by a "-". For
3671 example, "- [-, 10]" means being outside the range starting at the lowest
3672 possible value and ending at 10, in other words, being greater than 10.
3673 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3676 We set up things so that the missing bounds are handled in a consistent
3677 manner so neither a missing bound nor "true" and "false" need to be
3678 handled using a special case. */
3680 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3681 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3682 and UPPER1_P are nonzero if the respective argument is an upper bound
3683 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3684 must be specified for a comparison. ARG1 will be converted to ARG0's
3685 type if both are specified. */
3688 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3689 tree arg1, int upper1_p)
3695 /* If neither arg represents infinity, do the normal operation.
3696 Else, if not a comparison, return infinity. Else handle the special
3697 comparison rules. Note that most of the cases below won't occur, but
3698 are handled for consistency. */
3700 if (arg0 != 0 && arg1 != 0)
3702 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3703 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3705 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3708 if (TREE_CODE_CLASS (code) != tcc_comparison)
3711 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3712 for neither. In real maths, we cannot assume open ended ranges are
3713 the same. But, this is computer arithmetic, where numbers are finite.
3714 We can therefore make the transformation of any unbounded range with
3715 the value Z, Z being greater than any representable number. This permits
3716 us to treat unbounded ranges as equal. */
3717 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3718 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3722 result = sgn0 == sgn1;
3725 result = sgn0 != sgn1;
3728 result = sgn0 < sgn1;
3731 result = sgn0 <= sgn1;
3734 result = sgn0 > sgn1;
3737 result = sgn0 >= sgn1;
3743 return constant_boolean_node (result, type);
3746 /* Given EXP, a logical expression, set the range it is testing into
3747 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3748 actually being tested. *PLOW and *PHIGH will be made of the same type
3749 as the returned expression. If EXP is not a comparison, we will most
3750 likely not be returning a useful value and range. */
3753 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3755 enum tree_code code;
3756 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3757 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3759 tree low, high, n_low, n_high;
3761 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3762 and see if we can refine the range. Some of the cases below may not
3763 happen, but it doesn't seem worth worrying about this. We "continue"
3764 the outer loop when we've changed something; otherwise we "break"
3765 the switch, which will "break" the while. */
3768 low = high = build_int_cst (TREE_TYPE (exp), 0);
3772 code = TREE_CODE (exp);
3773 exp_type = TREE_TYPE (exp);
3775 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3777 if (TREE_CODE_LENGTH (code) > 0)
3778 arg0 = TREE_OPERAND (exp, 0);
3779 if (TREE_CODE_CLASS (code) == tcc_comparison
3780 || TREE_CODE_CLASS (code) == tcc_unary
3781 || TREE_CODE_CLASS (code) == tcc_binary)
3782 arg0_type = TREE_TYPE (arg0);
3783 if (TREE_CODE_CLASS (code) == tcc_binary
3784 || TREE_CODE_CLASS (code) == tcc_comparison
3785 || (TREE_CODE_CLASS (code) == tcc_expression
3786 && TREE_CODE_LENGTH (code) > 1))
3787 arg1 = TREE_OPERAND (exp, 1);
3792 case TRUTH_NOT_EXPR:
3793 in_p = ! in_p, exp = arg0;
3796 case EQ_EXPR: case NE_EXPR:
3797 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3798 /* We can only do something if the range is testing for zero
3799 and if the second operand is an integer constant. Note that
3800 saying something is "in" the range we make is done by
3801 complementing IN_P since it will set in the initial case of
3802 being not equal to zero; "out" is leaving it alone. */
3803 if (low == 0 || high == 0
3804 || ! integer_zerop (low) || ! integer_zerop (high)
3805 || TREE_CODE (arg1) != INTEGER_CST)
3810 case NE_EXPR: /* - [c, c] */
3813 case EQ_EXPR: /* + [c, c] */
3814 in_p = ! in_p, low = high = arg1;
3816 case GT_EXPR: /* - [-, c] */
3817 low = 0, high = arg1;
3819 case GE_EXPR: /* + [c, -] */
3820 in_p = ! in_p, low = arg1, high = 0;
3822 case LT_EXPR: /* - [c, -] */
3823 low = arg1, high = 0;
3825 case LE_EXPR: /* + [-, c] */
3826 in_p = ! in_p, low = 0, high = arg1;
3832 /* If this is an unsigned comparison, we also know that EXP is
3833 greater than or equal to zero. We base the range tests we make
3834 on that fact, so we record it here so we can parse existing
3835 range tests. We test arg0_type since often the return type
3836 of, e.g. EQ_EXPR, is boolean. */
3837 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3839 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3841 build_int_cst (arg0_type, 0),
3845 in_p = n_in_p, low = n_low, high = n_high;
3847 /* If the high bound is missing, but we have a nonzero low
3848 bound, reverse the range so it goes from zero to the low bound
3850 if (high == 0 && low && ! integer_zerop (low))
3853 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3854 integer_one_node, 0);
3855 low = build_int_cst (arg0_type, 0);
3863 /* (-x) IN [a,b] -> x in [-b, -a] */
3864 n_low = range_binop (MINUS_EXPR, exp_type,
3865 build_int_cst (exp_type, 0),
3867 n_high = range_binop (MINUS_EXPR, exp_type,
3868 build_int_cst (exp_type, 0),
3870 low = n_low, high = n_high;
3876 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3877 build_int_cst (exp_type, 1));
3880 case PLUS_EXPR: case MINUS_EXPR:
3881 if (TREE_CODE (arg1) != INTEGER_CST)
3884 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3885 move a constant to the other side. */
3886 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3889 /* If EXP is signed, any overflow in the computation is undefined,
3890 so we don't worry about it so long as our computations on
3891 the bounds don't overflow. For unsigned, overflow is defined
3892 and this is exactly the right thing. */
3893 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3894 arg0_type, low, 0, arg1, 0);
3895 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3896 arg0_type, high, 1, arg1, 0);
3897 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3898 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3901 /* Check for an unsigned range which has wrapped around the maximum
3902 value thus making n_high < n_low, and normalize it. */
3903 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3905 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3906 integer_one_node, 0);
3907 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3908 integer_one_node, 0);
3910 /* If the range is of the form +/- [ x+1, x ], we won't
3911 be able to normalize it. But then, it represents the
3912 whole range or the empty set, so make it
3914 if (tree_int_cst_equal (n_low, low)
3915 && tree_int_cst_equal (n_high, high))
3921 low = n_low, high = n_high;
3926 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3927 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3930 if (! INTEGRAL_TYPE_P (arg0_type)
3931 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3932 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3935 n_low = low, n_high = high;
3938 n_low = fold_convert (arg0_type, n_low);
3941 n_high = fold_convert (arg0_type, n_high);
3944 /* If we're converting arg0 from an unsigned type, to exp,
3945 a signed type, we will be doing the comparison as unsigned.
3946 The tests above have already verified that LOW and HIGH
3949 So we have to ensure that we will handle large unsigned
3950 values the same way that the current signed bounds treat
3953 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3956 tree equiv_type = lang_hooks.types.type_for_mode
3957 (TYPE_MODE (arg0_type), 1);
3959 /* A range without an upper bound is, naturally, unbounded.
3960 Since convert would have cropped a very large value, use
3961 the max value for the destination type. */
3963 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3964 : TYPE_MAX_VALUE (arg0_type);
3966 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3967 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3968 fold_convert (arg0_type,
3970 fold_convert (arg0_type,
3973 /* If the low bound is specified, "and" the range with the
3974 range for which the original unsigned value will be
3978 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3979 1, n_low, n_high, 1,
3980 fold_convert (arg0_type,
3985 in_p = (n_in_p == in_p);
3989 /* Otherwise, "or" the range with the range of the input
3990 that will be interpreted as negative. */
3991 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3992 0, n_low, n_high, 1,
3993 fold_convert (arg0_type,
3998 in_p = (in_p != n_in_p);
4003 low = n_low, high = n_high;
4013 /* If EXP is a constant, we can evaluate whether this is true or false. */
4014 if (TREE_CODE (exp) == INTEGER_CST)
4016 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4018 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4024 *pin_p = in_p, *plow = low, *phigh = high;
4028 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4029 type, TYPE, return an expression to test if EXP is in (or out of, depending
4030 on IN_P) the range. Return 0 if the test couldn't be created. */
4033 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4035 tree etype = TREE_TYPE (exp);
4038 #ifdef HAVE_canonicalize_funcptr_for_compare
4039 /* Disable this optimization for function pointer expressions
4040 on targets that require function pointer canonicalization. */
4041 if (HAVE_canonicalize_funcptr_for_compare
4042 && TREE_CODE (etype) == POINTER_TYPE
4043 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4049 value = build_range_check (type, exp, 1, low, high);
4051 return invert_truthvalue (value);
4056 if (low == 0 && high == 0)
4057 return build_int_cst (type, 1);
4060 return fold_build2 (LE_EXPR, type, exp,
4061 fold_convert (etype, high));
4064 return fold_build2 (GE_EXPR, type, exp,
4065 fold_convert (etype, low));
4067 if (operand_equal_p (low, high, 0))
4068 return fold_build2 (EQ_EXPR, type, exp,
4069 fold_convert (etype, low));
4071 if (integer_zerop (low))
4073 if (! TYPE_UNSIGNED (etype))
4075 etype = lang_hooks.types.unsigned_type (etype);
4076 high = fold_convert (etype, high);
4077 exp = fold_convert (etype, exp);
4079 return build_range_check (type, exp, 1, 0, high);
4082 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4083 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4085 unsigned HOST_WIDE_INT lo;
4089 prec = TYPE_PRECISION (etype);
4090 if (prec <= HOST_BITS_PER_WIDE_INT)
4093 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4097 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4098 lo = (unsigned HOST_WIDE_INT) -1;
4101 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4103 if (TYPE_UNSIGNED (etype))
4105 etype = lang_hooks.types.signed_type (etype);
4106 exp = fold_convert (etype, exp);
4108 return fold_build2 (GT_EXPR, type, exp,
4109 build_int_cst (etype, 0));
4113 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4114 This requires wrap-around arithmetics for the type of the expression. */
4115 switch (TREE_CODE (etype))
4118 /* There is no requirement that LOW be within the range of ETYPE
4119 if the latter is a subtype. It must, however, be within the base
4120 type of ETYPE. So be sure we do the subtraction in that type. */
4121 if (TREE_TYPE (etype))
4122 etype = TREE_TYPE (etype);
4127 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4128 TYPE_UNSIGNED (etype));
4135 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4136 if (TREE_CODE (etype) == INTEGER_TYPE
4137 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4139 tree utype, minv, maxv;
4141 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4142 for the type in question, as we rely on this here. */
4143 utype = lang_hooks.types.unsigned_type (etype);
4144 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4145 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4146 integer_one_node, 1);
4147 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4149 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4156 high = fold_convert (etype, high);
4157 low = fold_convert (etype, low);
4158 exp = fold_convert (etype, exp);
4160 value = const_binop (MINUS_EXPR, high, low, 0);
4162 if (value != 0 && !TREE_OVERFLOW (value))
4163 return build_range_check (type,
4164 fold_build2 (MINUS_EXPR, etype, exp, low),
4165 1, build_int_cst (etype, 0), value);
4170 /* Return the predecessor of VAL in its type, handling the infinite case. */
4173 range_predecessor (tree val)
4175 tree type = TREE_TYPE (val);
4177 if (INTEGRAL_TYPE_P (type)
4178 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4181 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4184 /* Return the successor of VAL in its type, handling the infinite case. */
4187 range_successor (tree val)
4189 tree type = TREE_TYPE (val);
4191 if (INTEGRAL_TYPE_P (type)
4192 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4195 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4198 /* Given two ranges, see if we can merge them into one. Return 1 if we
4199 can, 0 if we can't. Set the output range into the specified parameters. */
4202 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4203 tree high0, int in1_p, tree low1, tree high1)
4211 int lowequal = ((low0 == 0 && low1 == 0)
4212 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4213 low0, 0, low1, 0)));
4214 int highequal = ((high0 == 0 && high1 == 0)
4215 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4216 high0, 1, high1, 1)));
4218 /* Make range 0 be the range that starts first, or ends last if they
4219 start at the same value. Swap them if it isn't. */
4220 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4223 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4224 high1, 1, high0, 1))))
4226 temp = in0_p, in0_p = in1_p, in1_p = temp;
4227 tem = low0, low0 = low1, low1 = tem;
4228 tem = high0, high0 = high1, high1 = tem;
4231 /* Now flag two cases, whether the ranges are disjoint or whether the
4232 second range is totally subsumed in the first. Note that the tests
4233 below are simplified by the ones above. */
4234 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4235 high0, 1, low1, 0));
4236 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4237 high1, 1, high0, 1));
4239 /* We now have four cases, depending on whether we are including or
4240 excluding the two ranges. */
4243 /* If they don't overlap, the result is false. If the second range
4244 is a subset it is the result. Otherwise, the range is from the start
4245 of the second to the end of the first. */
4247 in_p = 0, low = high = 0;
4249 in_p = 1, low = low1, high = high1;
4251 in_p = 1, low = low1, high = high0;
4254 else if (in0_p && ! in1_p)
4256 /* If they don't overlap, the result is the first range. If they are
4257 equal, the result is false. If the second range is a subset of the
4258 first, and the ranges begin at the same place, we go from just after
4259 the end of the second range to the end of the first. If the second
4260 range is not a subset of the first, or if it is a subset and both
4261 ranges end at the same place, the range starts at the start of the
4262 first range and ends just before the second range.
4263 Otherwise, we can't describe this as a single range. */
4265 in_p = 1, low = low0, high = high0;
4266 else if (lowequal && highequal)
4267 in_p = 0, low = high = 0;
4268 else if (subset && lowequal)
4270 low = range_successor (high1);
4274 else if (! subset || highequal)
4277 high = range_predecessor (low1);
4284 else if (! in0_p && in1_p)
4286 /* If they don't overlap, the result is the second range. If the second
4287 is a subset of the first, the result is false. Otherwise,
4288 the range starts just after the first range and ends at the
4289 end of the second. */
4291 in_p = 1, low = low1, high = high1;
4292 else if (subset || highequal)
4293 in_p = 0, low = high = 0;
4296 low = range_successor (high0);
4304 /* The case where we are excluding both ranges. Here the complex case
4305 is if they don't overlap. In that case, the only time we have a
4306 range is if they are adjacent. If the second is a subset of the
4307 first, the result is the first. Otherwise, the range to exclude
4308 starts at the beginning of the first range and ends at the end of the
4312 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4313 range_successor (high0),
4315 in_p = 0, low = low0, high = high1;
4318 /* Canonicalize - [min, x] into - [-, x]. */
4319 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4320 switch (TREE_CODE (TREE_TYPE (low0)))
4323 if (TYPE_PRECISION (TREE_TYPE (low0))
4324 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4328 if (tree_int_cst_equal (low0,
4329 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4333 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4334 && integer_zerop (low0))
4341 /* Canonicalize - [x, max] into - [x, -]. */
4342 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4343 switch (TREE_CODE (TREE_TYPE (high1)))
4346 if (TYPE_PRECISION (TREE_TYPE (high1))
4347 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4351 if (tree_int_cst_equal (high1,
4352 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4356 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4357 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4359 integer_one_node, 1)))
4366 /* The ranges might be also adjacent between the maximum and
4367 minimum values of the given type. For
4368 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4369 return + [x + 1, y - 1]. */
4370 if (low0 == 0 && high1 == 0)
4372 low = range_successor (high0);
4373 high = range_predecessor (low1);
4374 if (low == 0 || high == 0)
4384 in_p = 0, low = low0, high = high0;
4386 in_p = 0, low = low0, high = high1;
4389 *pin_p = in_p, *plow = low, *phigh = high;
4394 /* Subroutine of fold, looking inside expressions of the form
4395 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4396 of the COND_EXPR. This function is being used also to optimize
4397 A op B ? C : A, by reversing the comparison first.
4399 Return a folded expression whose code is not a COND_EXPR
4400 anymore, or NULL_TREE if no folding opportunity is found. */
4403 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4405 enum tree_code comp_code = TREE_CODE (arg0);
4406 tree arg00 = TREE_OPERAND (arg0, 0);
4407 tree arg01 = TREE_OPERAND (arg0, 1);
4408 tree arg1_type = TREE_TYPE (arg1);
4414 /* If we have A op 0 ? A : -A, consider applying the following
4417 A == 0? A : -A same as -A
4418 A != 0? A : -A same as A
4419 A >= 0? A : -A same as abs (A)
4420 A > 0? A : -A same as abs (A)
4421 A <= 0? A : -A same as -abs (A)
4422 A < 0? A : -A same as -abs (A)
4424 None of these transformations work for modes with signed
4425 zeros. If A is +/-0, the first two transformations will
4426 change the sign of the result (from +0 to -0, or vice
4427 versa). The last four will fix the sign of the result,
4428 even though the original expressions could be positive or
4429 negative, depending on the sign of A.
4431 Note that all these transformations are correct if A is
4432 NaN, since the two alternatives (A and -A) are also NaNs. */
4433 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4434 ? real_zerop (arg01)
4435 : integer_zerop (arg01))
4436 && ((TREE_CODE (arg2) == NEGATE_EXPR
4437 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4438 /* In the case that A is of the form X-Y, '-A' (arg2) may
4439 have already been folded to Y-X, check for that. */
4440 || (TREE_CODE (arg1) == MINUS_EXPR
4441 && TREE_CODE (arg2) == MINUS_EXPR
4442 && operand_equal_p (TREE_OPERAND (arg1, 0),
4443 TREE_OPERAND (arg2, 1), 0)
4444 && operand_equal_p (TREE_OPERAND (arg1, 1),
4445 TREE_OPERAND (arg2, 0), 0))))
4450 tem = fold_convert (arg1_type, arg1);
4451 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4454 return pedantic_non_lvalue (fold_convert (type, arg1));
4457 if (flag_trapping_math)
4462 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4463 arg1 = fold_convert (lang_hooks.types.signed_type
4464 (TREE_TYPE (arg1)), arg1);
4465 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4466 return pedantic_non_lvalue (fold_convert (type, tem));
4469 if (flag_trapping_math)
4473 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4474 arg1 = fold_convert (lang_hooks.types.signed_type
4475 (TREE_TYPE (arg1)), arg1);
4476 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4477 return negate_expr (fold_convert (type, tem));
4479 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4483 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4484 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4485 both transformations are correct when A is NaN: A != 0
4486 is then true, and A == 0 is false. */
4488 if (integer_zerop (arg01) && integer_zerop (arg2))
4490 if (comp_code == NE_EXPR)
4491 return pedantic_non_lvalue (fold_convert (type, arg1));
4492 else if (comp_code == EQ_EXPR)
4493 return build_int_cst (type, 0);
4496 /* Try some transformations of A op B ? A : B.
4498 A == B? A : B same as B
4499 A != B? A : B same as A
4500 A >= B? A : B same as max (A, B)
4501 A > B? A : B same as max (B, A)
4502 A <= B? A : B same as min (A, B)
4503 A < B? A : B same as min (B, A)
4505 As above, these transformations don't work in the presence
4506 of signed zeros. For example, if A and B are zeros of
4507 opposite sign, the first two transformations will change
4508 the sign of the result. In the last four, the original
4509 expressions give different results for (A=+0, B=-0) and
4510 (A=-0, B=+0), but the transformed expressions do not.
4512 The first two transformations are correct if either A or B
4513 is a NaN. In the first transformation, the condition will
4514 be false, and B will indeed be chosen. In the case of the
4515 second transformation, the condition A != B will be true,
4516 and A will be chosen.
4518 The conversions to max() and min() are not correct if B is
4519 a number and A is not. The conditions in the original
4520 expressions will be false, so all four give B. The min()
4521 and max() versions would give a NaN instead. */
4522 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4523 /* Avoid these transformations if the COND_EXPR may be used
4524 as an lvalue in the C++ front-end. PR c++/19199. */
4526 || strcmp (lang_hooks.name, "GNU C++") != 0
4527 || ! maybe_lvalue_p (arg1)
4528 || ! maybe_lvalue_p (arg2)))
4530 tree comp_op0 = arg00;
4531 tree comp_op1 = arg01;
4532 tree comp_type = TREE_TYPE (comp_op0);
4534 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4535 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4545 return pedantic_non_lvalue (fold_convert (type, arg2));
4547 return pedantic_non_lvalue (fold_convert (type, arg1));
4552 /* In C++ a ?: expression can be an lvalue, so put the
4553 operand which will be used if they are equal first
4554 so that we can convert this back to the
4555 corresponding COND_EXPR. */
4556 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4558 comp_op0 = fold_convert (comp_type, comp_op0);
4559 comp_op1 = fold_convert (comp_type, comp_op1);
4560 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4561 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4562 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4563 return pedantic_non_lvalue (fold_convert (type, tem));
4570 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4572 comp_op0 = fold_convert (comp_type, comp_op0);
4573 comp_op1 = fold_convert (comp_type, comp_op1);
4574 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4575 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4576 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4577 return pedantic_non_lvalue (fold_convert (type, tem));
4581 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4582 return pedantic_non_lvalue (fold_convert (type, arg2));
4585 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4586 return pedantic_non_lvalue (fold_convert (type, arg1));
4589 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4594 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4595 we might still be able to simplify this. For example,
4596 if C1 is one less or one more than C2, this might have started
4597 out as a MIN or MAX and been transformed by this function.
4598 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4600 if (INTEGRAL_TYPE_P (type)
4601 && TREE_CODE (arg01) == INTEGER_CST
4602 && TREE_CODE (arg2) == INTEGER_CST)
4606 /* We can replace A with C1 in this case. */
4607 arg1 = fold_convert (type, arg01);
4608 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4611 /* If C1 is C2 + 1, this is min(A, C2). */
4612 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4614 && operand_equal_p (arg01,
4615 const_binop (PLUS_EXPR, arg2,
4616 integer_one_node, 0),
4618 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4623 /* If C1 is C2 - 1, this is min(A, C2). */
4624 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4626 && operand_equal_p (arg01,
4627 const_binop (MINUS_EXPR, arg2,
4628 integer_one_node, 0),
4630 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4635 /* If C1 is C2 - 1, this is max(A, C2). */
4636 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4638 && operand_equal_p (arg01,
4639 const_binop (MINUS_EXPR, arg2,
4640 integer_one_node, 0),
4642 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4647 /* If C1 is C2 + 1, this is max(A, C2). */
4648 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4650 && operand_equal_p (arg01,
4651 const_binop (PLUS_EXPR, arg2,
4652 integer_one_node, 0),
4654 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4668 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4669 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4672 /* EXP is some logical combination of boolean tests. See if we can
4673 merge it into some range test. Return the new tree if so. */
4676 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4678 int or_op = (code == TRUTH_ORIF_EXPR
4679 || code == TRUTH_OR_EXPR);
4680 int in0_p, in1_p, in_p;
4681 tree low0, low1, low, high0, high1, high;
4682 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4683 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4686 /* If this is an OR operation, invert both sides; we will invert
4687 again at the end. */
4689 in0_p = ! in0_p, in1_p = ! in1_p;
4691 /* If both expressions are the same, if we can merge the ranges, and we
4692 can build the range test, return it or it inverted. If one of the
4693 ranges is always true or always false, consider it to be the same
4694 expression as the other. */
4695 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4696 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4698 && 0 != (tem = (build_range_check (type,
4700 : rhs != 0 ? rhs : integer_zero_node,
4702 return or_op ? invert_truthvalue (tem) : tem;
4704 /* On machines where the branch cost is expensive, if this is a
4705 short-circuited branch and the underlying object on both sides
4706 is the same, make a non-short-circuit operation. */
4707 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4708 && lhs != 0 && rhs != 0
4709 && (code == TRUTH_ANDIF_EXPR
4710 || code == TRUTH_ORIF_EXPR)
4711 && operand_equal_p (lhs, rhs, 0))
4713 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4714 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4715 which cases we can't do this. */
4716 if (simple_operand_p (lhs))
4717 return build2 (code == TRUTH_ANDIF_EXPR
4718 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4721 else if (lang_hooks.decls.global_bindings_p () == 0
4722 && ! CONTAINS_PLACEHOLDER_P (lhs))
4724 tree common = save_expr (lhs);
4726 if (0 != (lhs = build_range_check (type, common,
4727 or_op ? ! in0_p : in0_p,
4729 && (0 != (rhs = build_range_check (type, common,
4730 or_op ? ! in1_p : in1_p,
4732 return build2 (code == TRUTH_ANDIF_EXPR
4733 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4741 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4742 bit value. Arrange things so the extra bits will be set to zero if and
4743 only if C is signed-extended to its full width. If MASK is nonzero,
4744 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4747 unextend (tree c, int p, int unsignedp, tree mask)
4749 tree type = TREE_TYPE (c);
4750 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4753 if (p == modesize || unsignedp)
4756 /* We work by getting just the sign bit into the low-order bit, then
4757 into the high-order bit, then sign-extend. We then XOR that value
4759 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4760 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4762 /* We must use a signed type in order to get an arithmetic right shift.
4763 However, we must also avoid introducing accidental overflows, so that
4764 a subsequent call to integer_zerop will work. Hence we must
4765 do the type conversion here. At this point, the constant is either
4766 zero or one, and the conversion to a signed type can never overflow.
4767 We could get an overflow if this conversion is done anywhere else. */
4768 if (TYPE_UNSIGNED (type))
4769 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4771 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4772 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4774 temp = const_binop (BIT_AND_EXPR, temp,
4775 fold_convert (TREE_TYPE (c), mask), 0);
4776 /* If necessary, convert the type back to match the type of C. */
4777 if (TYPE_UNSIGNED (type))
4778 temp = fold_convert (type, temp);
4780 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4783 /* Find ways of folding logical expressions of LHS and RHS:
4784 Try to merge two comparisons to the same innermost item.
4785 Look for range tests like "ch >= '0' && ch <= '9'".
4786 Look for combinations of simple terms on machines with expensive branches
4787 and evaluate the RHS unconditionally.
4789 For example, if we have p->a == 2 && p->b == 4 and we can make an
4790 object large enough to span both A and B, we can do this with a comparison
4791 against the object ANDed with the a mask.
4793 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4794 operations to do this with one comparison.
4796 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4797 function and the one above.
4799 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4800 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4802 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4805 We return the simplified tree or 0 if no optimization is possible. */
4808 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4810 /* If this is the "or" of two comparisons, we can do something if
4811 the comparisons are NE_EXPR. If this is the "and", we can do something
4812 if the comparisons are EQ_EXPR. I.e.,
4813 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4815 WANTED_CODE is this operation code. For single bit fields, we can
4816 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4817 comparison for one-bit fields. */
4819 enum tree_code wanted_code;
4820 enum tree_code lcode, rcode;
4821 tree ll_arg, lr_arg, rl_arg, rr_arg;
4822 tree ll_inner, lr_inner, rl_inner, rr_inner;
4823 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4824 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4825 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4826 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4827 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4828 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4829 enum machine_mode lnmode, rnmode;
4830 tree ll_mask, lr_mask, rl_mask, rr_mask;
4831 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4832 tree l_const, r_const;
4833 tree lntype, rntype, result;
4834 int first_bit, end_bit;
4837 /* Start by getting the comparison codes. Fail if anything is volatile.
4838 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4839 it were surrounded with a NE_EXPR. */
4841 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4844 lcode = TREE_CODE (lhs);
4845 rcode = TREE_CODE (rhs);
4847 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4849 lhs = build2 (NE_EXPR, truth_type, lhs,
4850 build_int_cst (TREE_TYPE (lhs), 0));
4854 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4856 rhs = build2 (NE_EXPR, truth_type, rhs,
4857 build_int_cst (TREE_TYPE (rhs), 0));
4861 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4862 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4865 ll_arg = TREE_OPERAND (lhs, 0);
4866 lr_arg = TREE_OPERAND (lhs, 1);
4867 rl_arg = TREE_OPERAND (rhs, 0);
4868 rr_arg = TREE_OPERAND (rhs, 1);
4870 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4871 if (simple_operand_p (ll_arg)
4872 && simple_operand_p (lr_arg))
4875 if (operand_equal_p (ll_arg, rl_arg, 0)
4876 && operand_equal_p (lr_arg, rr_arg, 0))
4878 result = combine_comparisons (code, lcode, rcode,
4879 truth_type, ll_arg, lr_arg);
4883 else if (operand_equal_p (ll_arg, rr_arg, 0)
4884 && operand_equal_p (lr_arg, rl_arg, 0))
4886 result = combine_comparisons (code, lcode,
4887 swap_tree_comparison (rcode),
4888 truth_type, ll_arg, lr_arg);
4894 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4895 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4897 /* If the RHS can be evaluated unconditionally and its operands are
4898 simple, it wins to evaluate the RHS unconditionally on machines
4899 with expensive branches. In this case, this isn't a comparison
4900 that can be merged. Avoid doing this if the RHS is a floating-point
4901 comparison since those can trap. */
4903 if (BRANCH_COST >= 2
4904 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4905 && simple_operand_p (rl_arg)
4906 && simple_operand_p (rr_arg))
4908 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4909 if (code == TRUTH_OR_EXPR
4910 && lcode == NE_EXPR && integer_zerop (lr_arg)
4911 && rcode == NE_EXPR && integer_zerop (rr_arg)
4912 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4913 return build2 (NE_EXPR, truth_type,
4914 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4916 build_int_cst (TREE_TYPE (ll_arg), 0));
4918 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4919 if (code == TRUTH_AND_EXPR
4920 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4921 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4922 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4923 return build2 (EQ_EXPR, truth_type,
4924 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4926 build_int_cst (TREE_TYPE (ll_arg), 0));
4928 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4929 return build2 (code, truth_type, lhs, rhs);
4932 /* See if the comparisons can be merged. Then get all the parameters for
4935 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4936 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4940 ll_inner = decode_field_reference (ll_arg,
4941 &ll_bitsize, &ll_bitpos, &ll_mode,
4942 &ll_unsignedp, &volatilep, &ll_mask,
4944 lr_inner = decode_field_reference (lr_arg,
4945 &lr_bitsize, &lr_bitpos, &lr_mode,
4946 &lr_unsignedp, &volatilep, &lr_mask,
4948 rl_inner = decode_field_reference (rl_arg,
4949 &rl_bitsize, &rl_bitpos, &rl_mode,
4950 &rl_unsignedp, &volatilep, &rl_mask,
4952 rr_inner = decode_field_reference (rr_arg,
4953 &rr_bitsize, &rr_bitpos, &rr_mode,
4954 &rr_unsignedp, &volatilep, &rr_mask,
4957 /* It must be true that the inner operation on the lhs of each
4958 comparison must be the same if we are to be able to do anything.
4959 Then see if we have constants. If not, the same must be true for
4961 if (volatilep || ll_inner == 0 || rl_inner == 0
4962 || ! operand_equal_p (ll_inner, rl_inner, 0))
4965 if (TREE_CODE (lr_arg) == INTEGER_CST
4966 && TREE_CODE (rr_arg) == INTEGER_CST)
4967 l_const = lr_arg, r_const = rr_arg;
4968 else if (lr_inner == 0 || rr_inner == 0
4969 || ! operand_equal_p (lr_inner, rr_inner, 0))
4972 l_const = r_const = 0;
4974 /* If either comparison code is not correct for our logical operation,
4975 fail. However, we can convert a one-bit comparison against zero into
4976 the opposite comparison against that bit being set in the field. */
4978 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4979 if (lcode != wanted_code)
4981 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4983 /* Make the left operand unsigned, since we are only interested
4984 in the value of one bit. Otherwise we are doing the wrong
4993 /* This is analogous to the code for l_const above. */
4994 if (rcode != wanted_code)
4996 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5005 /* After this point all optimizations will generate bit-field
5006 references, which we might not want. */
5007 if (! lang_hooks.can_use_bit_fields_p ())
5010 /* See if we can find a mode that contains both fields being compared on
5011 the left. If we can't, fail. Otherwise, update all constants and masks
5012 to be relative to a field of that size. */
5013 first_bit = MIN (ll_bitpos, rl_bitpos);
5014 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5015 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5016 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5018 if (lnmode == VOIDmode)
5021 lnbitsize = GET_MODE_BITSIZE (lnmode);
5022 lnbitpos = first_bit & ~ (lnbitsize - 1);
5023 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5024 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5026 if (BYTES_BIG_ENDIAN)
5028 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5029 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5032 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5033 size_int (xll_bitpos), 0);
5034 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5035 size_int (xrl_bitpos), 0);
5039 l_const = fold_convert (lntype, l_const);
5040 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5041 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5042 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5043 fold_build1 (BIT_NOT_EXPR,
5047 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5049 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5054 r_const = fold_convert (lntype, r_const);
5055 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5056 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5057 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5058 fold_build1 (BIT_NOT_EXPR,
5062 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5064 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5068 /* If the right sides are not constant, do the same for it. Also,
5069 disallow this optimization if a size or signedness mismatch occurs
5070 between the left and right sides. */
5073 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5074 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5075 /* Make sure the two fields on the right
5076 correspond to the left without being swapped. */
5077 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5080 first_bit = MIN (lr_bitpos, rr_bitpos);
5081 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5082 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5083 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5085 if (rnmode == VOIDmode)
5088 rnbitsize = GET_MODE_BITSIZE (rnmode);
5089 rnbitpos = first_bit & ~ (rnbitsize - 1);
5090 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5091 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5093 if (BYTES_BIG_ENDIAN)
5095 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5096 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5099 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5100 size_int (xlr_bitpos), 0);
5101 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5102 size_int (xrr_bitpos), 0);
5104 /* Make a mask that corresponds to both fields being compared.
5105 Do this for both items being compared. If the operands are the
5106 same size and the bits being compared are in the same position
5107 then we can do this by masking both and comparing the masked
5109 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5110 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5111 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5113 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5114 ll_unsignedp || rl_unsignedp);
5115 if (! all_ones_mask_p (ll_mask, lnbitsize))
5116 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5118 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5119 lr_unsignedp || rr_unsignedp);
5120 if (! all_ones_mask_p (lr_mask, rnbitsize))
5121 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5123 return build2 (wanted_code, truth_type, lhs, rhs);
5126 /* There is still another way we can do something: If both pairs of
5127 fields being compared are adjacent, we may be able to make a wider
5128 field containing them both.
5130 Note that we still must mask the lhs/rhs expressions. Furthermore,
5131 the mask must be shifted to account for the shift done by
5132 make_bit_field_ref. */
5133 if ((ll_bitsize + ll_bitpos == rl_bitpos
5134 && lr_bitsize + lr_bitpos == rr_bitpos)
5135 || (ll_bitpos == rl_bitpos + rl_bitsize
5136 && lr_bitpos == rr_bitpos + rr_bitsize))
5140 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5141 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5142 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5143 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5145 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5146 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5147 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5148 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5150 /* Convert to the smaller type before masking out unwanted bits. */
5152 if (lntype != rntype)
5154 if (lnbitsize > rnbitsize)
5156 lhs = fold_convert (rntype, lhs);
5157 ll_mask = fold_convert (rntype, ll_mask);
5160 else if (lnbitsize < rnbitsize)
5162 rhs = fold_convert (lntype, rhs);
5163 lr_mask = fold_convert (lntype, lr_mask);
5168 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5169 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5171 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5172 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5174 return build2 (wanted_code, truth_type, lhs, rhs);
5180 /* Handle the case of comparisons with constants. If there is something in
5181 common between the masks, those bits of the constants must be the same.
5182 If not, the condition is always false. Test for this to avoid generating
5183 incorrect code below. */
5184 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5185 if (! integer_zerop (result)
5186 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5187 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5189 if (wanted_code == NE_EXPR)
5191 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5192 return constant_boolean_node (true, truth_type);
5196 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5197 return constant_boolean_node (false, truth_type);
5201 /* Construct the expression we will return. First get the component
5202 reference we will make. Unless the mask is all ones the width of
5203 that field, perform the mask operation. Then compare with the
5205 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5206 ll_unsignedp || rl_unsignedp);
5208 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5209 if (! all_ones_mask_p (ll_mask, lnbitsize))
5210 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5212 return build2 (wanted_code, truth_type, result,
5213 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5216 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5220 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5223 enum tree_code op_code;
5224 tree comp_const = op1;
5226 int consts_equal, consts_lt;
5229 STRIP_SIGN_NOPS (arg0);
5231 op_code = TREE_CODE (arg0);
5232 minmax_const = TREE_OPERAND (arg0, 1);
5233 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5234 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5235 inner = TREE_OPERAND (arg0, 0);
5237 /* If something does not permit us to optimize, return the original tree. */
5238 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5239 || TREE_CODE (comp_const) != INTEGER_CST
5240 || TREE_CONSTANT_OVERFLOW (comp_const)
5241 || TREE_CODE (minmax_const) != INTEGER_CST
5242 || TREE_CONSTANT_OVERFLOW (minmax_const))
5245 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5246 and GT_EXPR, doing the rest with recursive calls using logical
5250 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5252 /* FIXME: We should be able to invert code without building a
5253 scratch tree node, but doing so would require us to
5254 duplicate a part of invert_truthvalue here. */
5255 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5256 tem = optimize_minmax_comparison (TREE_CODE (tem),
5258 TREE_OPERAND (tem, 0),
5259 TREE_OPERAND (tem, 1));
5260 return invert_truthvalue (tem);
5265 fold_build2 (TRUTH_ORIF_EXPR, type,
5266 optimize_minmax_comparison
5267 (EQ_EXPR, type, arg0, comp_const),
5268 optimize_minmax_comparison
5269 (GT_EXPR, type, arg0, comp_const));
5272 if (op_code == MAX_EXPR && consts_equal)
5273 /* MAX (X, 0) == 0 -> X <= 0 */
5274 return fold_build2 (LE_EXPR, type, inner, comp_const);
5276 else if (op_code == MAX_EXPR && consts_lt)
5277 /* MAX (X, 0) == 5 -> X == 5 */
5278 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5280 else if (op_code == MAX_EXPR)
5281 /* MAX (X, 0) == -1 -> false */
5282 return omit_one_operand (type, integer_zero_node, inner);
5284 else if (consts_equal)
5285 /* MIN (X, 0) == 0 -> X >= 0 */
5286 return fold_build2 (GE_EXPR, type, inner, comp_const);
5289 /* MIN (X, 0) == 5 -> false */
5290 return omit_one_operand (type, integer_zero_node, inner);
5293 /* MIN (X, 0) == -1 -> X == -1 */
5294 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5297 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5298 /* MAX (X, 0) > 0 -> X > 0
5299 MAX (X, 0) > 5 -> X > 5 */
5300 return fold_build2 (GT_EXPR, type, inner, comp_const);
5302 else if (op_code == MAX_EXPR)
5303 /* MAX (X, 0) > -1 -> true */
5304 return omit_one_operand (type, integer_one_node, inner);
5306 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5307 /* MIN (X, 0) > 0 -> false
5308 MIN (X, 0) > 5 -> false */
5309 return omit_one_operand (type, integer_zero_node, inner);
5312 /* MIN (X, 0) > -1 -> X > -1 */
5313 return fold_build2 (GT_EXPR, type, inner, comp_const);
5320 /* T is an integer expression that is being multiplied, divided, or taken a
5321 modulus (CODE says which and what kind of divide or modulus) by a
5322 constant C. See if we can eliminate that operation by folding it with
5323 other operations already in T. WIDE_TYPE, if non-null, is a type that
5324 should be used for the computation if wider than our type.
5326 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5327 (X * 2) + (Y * 4). We must, however, be assured that either the original
5328 expression would not overflow or that overflow is undefined for the type
5329 in the language in question.
5331 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5332 the machine has a multiply-accumulate insn or that this is part of an
5333 addressing calculation.
5335 If we return a non-null expression, it is an equivalent form of the
5336 original computation, but need not be in the original type. */
5339 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5341 /* To avoid exponential search depth, refuse to allow recursion past
5342 three levels. Beyond that (1) it's highly unlikely that we'll find
5343 something interesting and (2) we've probably processed it before
5344 when we built the inner expression. */
5353 ret = extract_muldiv_1 (t, c, code, wide_type);
5360 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5362 tree type = TREE_TYPE (t);
5363 enum tree_code tcode = TREE_CODE (t);
5364 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5365 > GET_MODE_SIZE (TYPE_MODE (type)))
5366 ? wide_type : type);
5368 int same_p = tcode == code;
5369 tree op0 = NULL_TREE, op1 = NULL_TREE;
5371 /* Don't deal with constants of zero here; they confuse the code below. */
5372 if (integer_zerop (c))
5375 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5376 op0 = TREE_OPERAND (t, 0);
5378 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5379 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5381 /* Note that we need not handle conditional operations here since fold
5382 already handles those cases. So just do arithmetic here. */
5386 /* For a constant, we can always simplify if we are a multiply
5387 or (for divide and modulus) if it is a multiple of our constant. */
5388 if (code == MULT_EXPR
5389 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5390 return const_binop (code, fold_convert (ctype, t),
5391 fold_convert (ctype, c), 0);
5394 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5395 /* If op0 is an expression ... */
5396 if ((COMPARISON_CLASS_P (op0)
5397 || UNARY_CLASS_P (op0)
5398 || BINARY_CLASS_P (op0)
5399 || EXPRESSION_CLASS_P (op0))
5400 /* ... and is unsigned, and its type is smaller than ctype,
5401 then we cannot pass through as widening. */
5402 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5403 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5404 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5405 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5406 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5407 /* ... or this is a truncation (t is narrower than op0),
5408 then we cannot pass through this narrowing. */
5409 || (GET_MODE_SIZE (TYPE_MODE (type))
5410 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5411 /* ... or signedness changes for division or modulus,
5412 then we cannot pass through this conversion. */
5413 || (code != MULT_EXPR
5414 && (TYPE_UNSIGNED (ctype)
5415 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5418 /* Pass the constant down and see if we can make a simplification. If
5419 we can, replace this expression with the inner simplification for
5420 possible later conversion to our or some other type. */
5421 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5422 && TREE_CODE (t2) == INTEGER_CST
5423 && ! TREE_CONSTANT_OVERFLOW (t2)
5424 && (0 != (t1 = extract_muldiv (op0, t2, code,
5426 ? ctype : NULL_TREE))))
5431 /* If widening the type changes it from signed to unsigned, then we
5432 must avoid building ABS_EXPR itself as unsigned. */
5433 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5435 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5436 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5438 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5439 return fold_convert (ctype, t1);
5445 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5446 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5449 case MIN_EXPR: case MAX_EXPR:
5450 /* If widening the type changes the signedness, then we can't perform
5451 this optimization as that changes the result. */
5452 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5455 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5456 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5457 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5459 if (tree_int_cst_sgn (c) < 0)
5460 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5462 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5463 fold_convert (ctype, t2));
5467 case LSHIFT_EXPR: case RSHIFT_EXPR:
5468 /* If the second operand is constant, this is a multiplication
5469 or floor division, by a power of two, so we can treat it that
5470 way unless the multiplier or divisor overflows. Signed
5471 left-shift overflow is implementation-defined rather than
5472 undefined in C90, so do not convert signed left shift into
5474 if (TREE_CODE (op1) == INTEGER_CST
5475 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5476 /* const_binop may not detect overflow correctly,
5477 so check for it explicitly here. */
5478 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5479 && TREE_INT_CST_HIGH (op1) == 0
5480 && 0 != (t1 = fold_convert (ctype,
5481 const_binop (LSHIFT_EXPR,
5484 && ! TREE_OVERFLOW (t1))
5485 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5486 ? MULT_EXPR : FLOOR_DIV_EXPR,
5487 ctype, fold_convert (ctype, op0), t1),
5488 c, code, wide_type);
5491 case PLUS_EXPR: case MINUS_EXPR:
5492 /* See if we can eliminate the operation on both sides. If we can, we
5493 can return a new PLUS or MINUS. If we can't, the only remaining
5494 cases where we can do anything are if the second operand is a
5496 t1 = extract_muldiv (op0, c, code, wide_type);
5497 t2 = extract_muldiv (op1, c, code, wide_type);
5498 if (t1 != 0 && t2 != 0
5499 && (code == MULT_EXPR
5500 /* If not multiplication, we can only do this if both operands
5501 are divisible by c. */
5502 || (multiple_of_p (ctype, op0, c)
5503 && multiple_of_p (ctype, op1, c))))
5504 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5505 fold_convert (ctype, t2));
5507 /* If this was a subtraction, negate OP1 and set it to be an addition.
5508 This simplifies the logic below. */
5509 if (tcode == MINUS_EXPR)
5510 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5512 if (TREE_CODE (op1) != INTEGER_CST)
5515 /* If either OP1 or C are negative, this optimization is not safe for
5516 some of the division and remainder types while for others we need
5517 to change the code. */
5518 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5520 if (code == CEIL_DIV_EXPR)
5521 code = FLOOR_DIV_EXPR;
5522 else if (code == FLOOR_DIV_EXPR)
5523 code = CEIL_DIV_EXPR;
5524 else if (code != MULT_EXPR
5525 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5529 /* If it's a multiply or a division/modulus operation of a multiple
5530 of our constant, do the operation and verify it doesn't overflow. */
5531 if (code == MULT_EXPR
5532 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5534 op1 = const_binop (code, fold_convert (ctype, op1),
5535 fold_convert (ctype, c), 0);
5536 /* We allow the constant to overflow with wrapping semantics. */
5538 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5544 /* If we have an unsigned type is not a sizetype, we cannot widen
5545 the operation since it will change the result if the original
5546 computation overflowed. */
5547 if (TYPE_UNSIGNED (ctype)
5548 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5552 /* If we were able to eliminate our operation from the first side,
5553 apply our operation to the second side and reform the PLUS. */
5554 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5555 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5557 /* The last case is if we are a multiply. In that case, we can
5558 apply the distributive law to commute the multiply and addition
5559 if the multiplication of the constants doesn't overflow. */
5560 if (code == MULT_EXPR)
5561 return fold_build2 (tcode, ctype,
5562 fold_build2 (code, ctype,
5563 fold_convert (ctype, op0),
5564 fold_convert (ctype, c)),
5570 /* We have a special case here if we are doing something like
5571 (C * 8) % 4 since we know that's zero. */
5572 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5573 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5574 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5575 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5576 return omit_one_operand (type, integer_zero_node, op0);
5578 /* ... fall through ... */
5580 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5581 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5582 /* If we can extract our operation from the LHS, do so and return a
5583 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5584 do something only if the second operand is a constant. */
5586 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5587 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5588 fold_convert (ctype, op1));
5589 else if (tcode == MULT_EXPR && code == MULT_EXPR
5590 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5591 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5592 fold_convert (ctype, t1));
5593 else if (TREE_CODE (op1) != INTEGER_CST)
5596 /* If these are the same operation types, we can associate them
5597 assuming no overflow. */
5599 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5600 fold_convert (ctype, c), 0))
5601 && ! TREE_OVERFLOW (t1))
5602 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5604 /* If these operations "cancel" each other, we have the main
5605 optimizations of this pass, which occur when either constant is a
5606 multiple of the other, in which case we replace this with either an
5607 operation or CODE or TCODE.
5609 If we have an unsigned type that is not a sizetype, we cannot do
5610 this since it will change the result if the original computation
5612 if ((! TYPE_UNSIGNED (ctype)
5613 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5615 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5616 || (tcode == MULT_EXPR
5617 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5618 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5620 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5621 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5622 fold_convert (ctype,
5623 const_binop (TRUNC_DIV_EXPR,
5625 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5626 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5627 fold_convert (ctype,
5628 const_binop (TRUNC_DIV_EXPR,
5640 /* Return a node which has the indicated constant VALUE (either 0 or
5641 1), and is of the indicated TYPE. */
5644 constant_boolean_node (int value, tree type)
5646 if (type == integer_type_node)
5647 return value ? integer_one_node : integer_zero_node;
5648 else if (type == boolean_type_node)
5649 return value ? boolean_true_node : boolean_false_node;
5651 return build_int_cst (type, value);
5655 /* Return true if expr looks like an ARRAY_REF and set base and
5656 offset to the appropriate trees. If there is no offset,
5657 offset is set to NULL_TREE. Base will be canonicalized to
5658 something you can get the element type from using
5659 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5660 in bytes to the base. */
5663 extract_array_ref (tree expr, tree *base, tree *offset)
5665 /* One canonical form is a PLUS_EXPR with the first
5666 argument being an ADDR_EXPR with a possible NOP_EXPR
5668 if (TREE_CODE (expr) == PLUS_EXPR)
5670 tree op0 = TREE_OPERAND (expr, 0);
5671 tree inner_base, dummy1;
5672 /* Strip NOP_EXPRs here because the C frontends and/or
5673 folders present us (int *)&x.a + 4B possibly. */
5675 if (extract_array_ref (op0, &inner_base, &dummy1))
5678 if (dummy1 == NULL_TREE)
5679 *offset = TREE_OPERAND (expr, 1);
5681 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5682 dummy1, TREE_OPERAND (expr, 1));
5686 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5687 which we transform into an ADDR_EXPR with appropriate
5688 offset. For other arguments to the ADDR_EXPR we assume
5689 zero offset and as such do not care about the ADDR_EXPR
5690 type and strip possible nops from it. */
5691 else if (TREE_CODE (expr) == ADDR_EXPR)
5693 tree op0 = TREE_OPERAND (expr, 0);
5694 if (TREE_CODE (op0) == ARRAY_REF)
5696 tree idx = TREE_OPERAND (op0, 1);
5697 *base = TREE_OPERAND (op0, 0);
5698 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5699 array_ref_element_size (op0));
5703 /* Handle array-to-pointer decay as &a. */
5704 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5705 *base = TREE_OPERAND (expr, 0);
5708 *offset = NULL_TREE;
5712 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5713 else if (SSA_VAR_P (expr)
5714 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5717 *offset = NULL_TREE;
5725 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5726 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5727 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5728 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5729 COND is the first argument to CODE; otherwise (as in the example
5730 given here), it is the second argument. TYPE is the type of the
5731 original expression. Return NULL_TREE if no simplification is
5735 fold_binary_op_with_conditional_arg (enum tree_code code,
5736 tree type, tree op0, tree op1,
5737 tree cond, tree arg, int cond_first_p)
5739 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5740 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5741 tree test, true_value, false_value;
5742 tree lhs = NULL_TREE;
5743 tree rhs = NULL_TREE;
5745 /* This transformation is only worthwhile if we don't have to wrap
5746 arg in a SAVE_EXPR, and the operation can be simplified on at least
5747 one of the branches once its pushed inside the COND_EXPR. */
5748 if (!TREE_CONSTANT (arg))
5751 if (TREE_CODE (cond) == COND_EXPR)
5753 test = TREE_OPERAND (cond, 0);
5754 true_value = TREE_OPERAND (cond, 1);
5755 false_value = TREE_OPERAND (cond, 2);
5756 /* If this operand throws an expression, then it does not make
5757 sense to try to perform a logical or arithmetic operation
5759 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5761 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5766 tree testtype = TREE_TYPE (cond);
5768 true_value = constant_boolean_node (true, testtype);
5769 false_value = constant_boolean_node (false, testtype);
5772 arg = fold_convert (arg_type, arg);
5775 true_value = fold_convert (cond_type, true_value);
5777 lhs = fold_build2 (code, type, true_value, arg);
5779 lhs = fold_build2 (code, type, arg, true_value);
5783 false_value = fold_convert (cond_type, false_value);
5785 rhs = fold_build2 (code, type, false_value, arg);
5787 rhs = fold_build2 (code, type, arg, false_value);
5790 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5791 return fold_convert (type, test);
5795 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5797 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5798 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5799 ADDEND is the same as X.
5801 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5802 and finite. The problematic cases are when X is zero, and its mode
5803 has signed zeros. In the case of rounding towards -infinity,
5804 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5805 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5808 fold_real_zero_addition_p (tree type, tree addend, int negate)
5810 if (!real_zerop (addend))
5813 /* Don't allow the fold with -fsignaling-nans. */
5814 if (HONOR_SNANS (TYPE_MODE (type)))
5817 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5818 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5821 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5822 if (TREE_CODE (addend) == REAL_CST
5823 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5826 /* The mode has signed zeros, and we have to honor their sign.
5827 In this situation, there is only one case we can return true for.
5828 X - 0 is the same as X unless rounding towards -infinity is
5830 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5833 /* Subroutine of fold() that checks comparisons of built-in math
5834 functions against real constants.
5836 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5837 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5838 is the type of the result and ARG0 and ARG1 are the operands of the
5839 comparison. ARG1 must be a TREE_REAL_CST.
5841 The function returns the constant folded tree if a simplification
5842 can be made, and NULL_TREE otherwise. */
5845 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5846 tree type, tree arg0, tree arg1)
5850 if (BUILTIN_SQRT_P (fcode))
5852 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5853 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5855 c = TREE_REAL_CST (arg1);
5856 if (REAL_VALUE_NEGATIVE (c))
5858 /* sqrt(x) < y is always false, if y is negative. */
5859 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5860 return omit_one_operand (type, integer_zero_node, arg);
5862 /* sqrt(x) > y is always true, if y is negative and we
5863 don't care about NaNs, i.e. negative values of x. */
5864 if (code == NE_EXPR || !HONOR_NANS (mode))
5865 return omit_one_operand (type, integer_one_node, arg);
5867 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5868 return fold_build2 (GE_EXPR, type, arg,
5869 build_real (TREE_TYPE (arg), dconst0));
5871 else if (code == GT_EXPR || code == GE_EXPR)
5875 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5876 real_convert (&c2, mode, &c2);
5878 if (REAL_VALUE_ISINF (c2))
5880 /* sqrt(x) > y is x == +Inf, when y is very large. */
5881 if (HONOR_INFINITIES (mode))
5882 return fold_build2 (EQ_EXPR, type, arg,
5883 build_real (TREE_TYPE (arg), c2));
5885 /* sqrt(x) > y is always false, when y is very large
5886 and we don't care about infinities. */
5887 return omit_one_operand (type, integer_zero_node, arg);
5890 /* sqrt(x) > c is the same as x > c*c. */
5891 return fold_build2 (code, type, arg,
5892 build_real (TREE_TYPE (arg), c2));
5894 else if (code == LT_EXPR || code == LE_EXPR)
5898 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5899 real_convert (&c2, mode, &c2);
5901 if (REAL_VALUE_ISINF (c2))
5903 /* sqrt(x) < y is always true, when y is a very large
5904 value and we don't care about NaNs or Infinities. */
5905 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5906 return omit_one_operand (type, integer_one_node, arg);
5908 /* sqrt(x) < y is x != +Inf when y is very large and we
5909 don't care about NaNs. */
5910 if (! HONOR_NANS (mode))
5911 return fold_build2 (NE_EXPR, type, arg,
5912 build_real (TREE_TYPE (arg), c2));
5914 /* sqrt(x) < y is x >= 0 when y is very large and we
5915 don't care about Infinities. */
5916 if (! HONOR_INFINITIES (mode))
5917 return fold_build2 (GE_EXPR, type, arg,
5918 build_real (TREE_TYPE (arg), dconst0));
5920 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5921 if (lang_hooks.decls.global_bindings_p () != 0
5922 || CONTAINS_PLACEHOLDER_P (arg))
5925 arg = save_expr (arg);
5926 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5927 fold_build2 (GE_EXPR, type, arg,
5928 build_real (TREE_TYPE (arg),
5930 fold_build2 (NE_EXPR, type, arg,
5931 build_real (TREE_TYPE (arg),
5935 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5936 if (! HONOR_NANS (mode))
5937 return fold_build2 (code, type, arg,
5938 build_real (TREE_TYPE (arg), c2));
5940 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5941 if (lang_hooks.decls.global_bindings_p () == 0
5942 && ! CONTAINS_PLACEHOLDER_P (arg))
5944 arg = save_expr (arg);
5945 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5946 fold_build2 (GE_EXPR, type, arg,
5947 build_real (TREE_TYPE (arg),
5949 fold_build2 (code, type, arg,
5950 build_real (TREE_TYPE (arg),
5959 /* Subroutine of fold() that optimizes comparisons against Infinities,
5960 either +Inf or -Inf.
5962 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5963 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5964 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5966 The function returns the constant folded tree if a simplification
5967 can be made, and NULL_TREE otherwise. */
5970 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5972 enum machine_mode mode;
5973 REAL_VALUE_TYPE max;
5977 mode = TYPE_MODE (TREE_TYPE (arg0));
5979 /* For negative infinity swap the sense of the comparison. */
5980 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5982 code = swap_tree_comparison (code);
5987 /* x > +Inf is always false, if with ignore sNANs. */
5988 if (HONOR_SNANS (mode))
5990 return omit_one_operand (type, integer_zero_node, arg0);
5993 /* x <= +Inf is always true, if we don't case about NaNs. */
5994 if (! HONOR_NANS (mode))
5995 return omit_one_operand (type, integer_one_node, arg0);
5997 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5998 if (lang_hooks.decls.global_bindings_p () == 0
5999 && ! CONTAINS_PLACEHOLDER_P (arg0))
6001 arg0 = save_expr (arg0);
6002 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6008 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6009 real_maxval (&max, neg, mode);
6010 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6011 arg0, build_real (TREE_TYPE (arg0), max));
6014 /* x < +Inf is always equal to x <= DBL_MAX. */
6015 real_maxval (&max, neg, mode);
6016 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6017 arg0, build_real (TREE_TYPE (arg0), max));
6020 /* x != +Inf is always equal to !(x > DBL_MAX). */
6021 real_maxval (&max, neg, mode);
6022 if (! HONOR_NANS (mode))
6023 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6024 arg0, build_real (TREE_TYPE (arg0), max));
6026 /* The transformation below creates non-gimple code and thus is
6027 not appropriate if we are in gimple form. */
6031 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6032 arg0, build_real (TREE_TYPE (arg0), max));
6033 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6042 /* Subroutine of fold() that optimizes comparisons of a division by
6043 a nonzero integer constant against an integer constant, i.e.
6046 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6047 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6048 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6050 The function returns the constant folded tree if a simplification
6051 can be made, and NULL_TREE otherwise. */
6054 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6056 tree prod, tmp, hi, lo;
6057 tree arg00 = TREE_OPERAND (arg0, 0);
6058 tree arg01 = TREE_OPERAND (arg0, 1);
6059 unsigned HOST_WIDE_INT lpart;
6060 HOST_WIDE_INT hpart;
6064 /* We have to do this the hard way to detect unsigned overflow.
6065 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6066 overflow = mul_double (TREE_INT_CST_LOW (arg01),
6067 TREE_INT_CST_HIGH (arg01),
6068 TREE_INT_CST_LOW (arg1),
6069 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
6070 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6071 prod = force_fit_type (prod, -1, overflow, false);
6072 neg_overflow = false;
6074 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
6076 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6079 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6080 overflow = add_double (TREE_INT_CST_LOW (prod),
6081 TREE_INT_CST_HIGH (prod),
6082 TREE_INT_CST_LOW (tmp),
6083 TREE_INT_CST_HIGH (tmp),
6085 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
6086 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
6087 TREE_CONSTANT_OVERFLOW (prod));
6089 else if (tree_int_cst_sgn (arg01) >= 0)
6091 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
6092 switch (tree_int_cst_sgn (arg1))
6095 neg_overflow = true;
6096 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6101 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6106 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6116 /* A negative divisor reverses the relational operators. */
6117 code = swap_tree_comparison (code);
6119 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6120 switch (tree_int_cst_sgn (arg1))
6123 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6128 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6133 neg_overflow = true;
6134 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6146 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6147 return omit_one_operand (type, integer_zero_node, arg00);
6148 if (TREE_OVERFLOW (hi))
6149 return fold_build2 (GE_EXPR, type, arg00, lo);
6150 if (TREE_OVERFLOW (lo))
6151 return fold_build2 (LE_EXPR, type, arg00, hi);
6152 return build_range_check (type, arg00, 1, lo, hi);
6155 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6156 return omit_one_operand (type, integer_one_node, arg00);
6157 if (TREE_OVERFLOW (hi))
6158 return fold_build2 (LT_EXPR, type, arg00, lo);
6159 if (TREE_OVERFLOW (lo))
6160 return fold_build2 (GT_EXPR, type, arg00, hi);
6161 return build_range_check (type, arg00, 0, lo, hi);
6164 if (TREE_OVERFLOW (lo))
6166 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6167 return omit_one_operand (type, tmp, arg00);
6169 return fold_build2 (LT_EXPR, type, arg00, lo);
6172 if (TREE_OVERFLOW (hi))
6174 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6175 return omit_one_operand (type, tmp, arg00);
6177 return fold_build2 (LE_EXPR, type, arg00, hi);
6180 if (TREE_OVERFLOW (hi))
6182 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6183 return omit_one_operand (type, tmp, arg00);
6185 return fold_build2 (GT_EXPR, type, arg00, hi);
6188 if (TREE_OVERFLOW (lo))
6190 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6191 return omit_one_operand (type, tmp, arg00);
6193 return fold_build2 (GE_EXPR, type, arg00, lo);
6203 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6204 equality/inequality test, then return a simplified form of the test
6205 using a sign testing. Otherwise return NULL. TYPE is the desired
6209 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6212 /* If this is testing a single bit, we can optimize the test. */
6213 if ((code == NE_EXPR || code == EQ_EXPR)
6214 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6215 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6217 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6218 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6219 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6221 if (arg00 != NULL_TREE
6222 /* This is only a win if casting to a signed type is cheap,
6223 i.e. when arg00's type is not a partial mode. */
6224 && TYPE_PRECISION (TREE_TYPE (arg00))
6225 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6227 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6228 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6229 result_type, fold_convert (stype, arg00),
6230 build_int_cst (stype, 0));
6237 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6238 equality/inequality test, then return a simplified form of
6239 the test using shifts and logical operations. Otherwise return
6240 NULL. TYPE is the desired result type. */
6243 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6246 /* If this is testing a single bit, we can optimize the test. */
6247 if ((code == NE_EXPR || code == EQ_EXPR)
6248 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6249 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6251 tree inner = TREE_OPERAND (arg0, 0);
6252 tree type = TREE_TYPE (arg0);
6253 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6254 enum machine_mode operand_mode = TYPE_MODE (type);
6256 tree signed_type, unsigned_type, intermediate_type;
6259 /* First, see if we can fold the single bit test into a sign-bit
6261 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6266 /* Otherwise we have (A & C) != 0 where C is a single bit,
6267 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6268 Similarly for (A & C) == 0. */
6270 /* If INNER is a right shift of a constant and it plus BITNUM does
6271 not overflow, adjust BITNUM and INNER. */
6272 if (TREE_CODE (inner) == RSHIFT_EXPR
6273 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6274 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6275 && bitnum < TYPE_PRECISION (type)
6276 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6277 bitnum - TYPE_PRECISION (type)))
6279 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6280 inner = TREE_OPERAND (inner, 0);
6283 /* If we are going to be able to omit the AND below, we must do our
6284 operations as unsigned. If we must use the AND, we have a choice.
6285 Normally unsigned is faster, but for some machines signed is. */
6286 #ifdef LOAD_EXTEND_OP
6287 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6288 && !flag_syntax_only) ? 0 : 1;
6293 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6294 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6295 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6296 inner = fold_convert (intermediate_type, inner);
6299 inner = build2 (RSHIFT_EXPR, intermediate_type,
6300 inner, size_int (bitnum));
6302 if (code == EQ_EXPR)
6303 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6304 inner, integer_one_node);
6306 /* Put the AND last so it can combine with more things. */
6307 inner = build2 (BIT_AND_EXPR, intermediate_type,
6308 inner, integer_one_node);
6310 /* Make sure to return the proper type. */
6311 inner = fold_convert (result_type, inner);
6318 /* Check whether we are allowed to reorder operands arg0 and arg1,
6319 such that the evaluation of arg1 occurs before arg0. */
6322 reorder_operands_p (tree arg0, tree arg1)
6324 if (! flag_evaluation_order)
6326 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6328 return ! TREE_SIDE_EFFECTS (arg0)
6329 && ! TREE_SIDE_EFFECTS (arg1);
6332 /* Test whether it is preferable two swap two operands, ARG0 and
6333 ARG1, for example because ARG0 is an integer constant and ARG1
6334 isn't. If REORDER is true, only recommend swapping if we can
6335 evaluate the operands in reverse order. */
6338 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6340 STRIP_SIGN_NOPS (arg0);
6341 STRIP_SIGN_NOPS (arg1);
6343 if (TREE_CODE (arg1) == INTEGER_CST)
6345 if (TREE_CODE (arg0) == INTEGER_CST)
6348 if (TREE_CODE (arg1) == REAL_CST)
6350 if (TREE_CODE (arg0) == REAL_CST)
6353 if (TREE_CODE (arg1) == COMPLEX_CST)
6355 if (TREE_CODE (arg0) == COMPLEX_CST)
6358 if (TREE_CONSTANT (arg1))
6360 if (TREE_CONSTANT (arg0))
6366 if (reorder && flag_evaluation_order
6367 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6375 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6376 for commutative and comparison operators. Ensuring a canonical
6377 form allows the optimizers to find additional redundancies without
6378 having to explicitly check for both orderings. */
6379 if (TREE_CODE (arg0) == SSA_NAME
6380 && TREE_CODE (arg1) == SSA_NAME
6381 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6387 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6388 ARG0 is extended to a wider type. */
6391 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6393 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6395 tree shorter_type, outer_type;
6399 if (arg0_unw == arg0)
6401 shorter_type = TREE_TYPE (arg0_unw);
6403 #ifdef HAVE_canonicalize_funcptr_for_compare
6404 /* Disable this optimization if we're casting a function pointer
6405 type on targets that require function pointer canonicalization. */
6406 if (HAVE_canonicalize_funcptr_for_compare
6407 && TREE_CODE (shorter_type) == POINTER_TYPE
6408 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6412 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6415 arg1_unw = get_unwidened (arg1, shorter_type);
6417 /* If possible, express the comparison in the shorter mode. */
6418 if ((code == EQ_EXPR || code == NE_EXPR
6419 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6420 && (TREE_TYPE (arg1_unw) == shorter_type
6421 || (TREE_CODE (arg1_unw) == INTEGER_CST
6422 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6423 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6424 && int_fits_type_p (arg1_unw, shorter_type))))
6425 return fold_build2 (code, type, arg0_unw,
6426 fold_convert (shorter_type, arg1_unw));
6428 if (TREE_CODE (arg1_unw) != INTEGER_CST
6429 || TREE_CODE (shorter_type) != INTEGER_TYPE
6430 || !int_fits_type_p (arg1_unw, shorter_type))
6433 /* If we are comparing with the integer that does not fit into the range
6434 of the shorter type, the result is known. */
6435 outer_type = TREE_TYPE (arg1_unw);
6436 min = lower_bound_in_type (outer_type, shorter_type);
6437 max = upper_bound_in_type (outer_type, shorter_type);
6439 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6441 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6448 return omit_one_operand (type, integer_zero_node, arg0);
6453 return omit_one_operand (type, integer_one_node, arg0);
6459 return omit_one_operand (type, integer_one_node, arg0);
6461 return omit_one_operand (type, integer_zero_node, arg0);
6466 return omit_one_operand (type, integer_zero_node, arg0);
6468 return omit_one_operand (type, integer_one_node, arg0);
6477 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6478 ARG0 just the signedness is changed. */
6481 fold_sign_changed_comparison (enum tree_code code, tree type,
6482 tree arg0, tree arg1)
6484 tree arg0_inner, tmp;
6485 tree inner_type, outer_type;
6487 if (TREE_CODE (arg0) != NOP_EXPR
6488 && TREE_CODE (arg0) != CONVERT_EXPR)
6491 outer_type = TREE_TYPE (arg0);
6492 arg0_inner = TREE_OPERAND (arg0, 0);
6493 inner_type = TREE_TYPE (arg0_inner);
6495 #ifdef HAVE_canonicalize_funcptr_for_compare
6496 /* Disable this optimization if we're casting a function pointer
6497 type on targets that require function pointer canonicalization. */
6498 if (HAVE_canonicalize_funcptr_for_compare
6499 && TREE_CODE (inner_type) == POINTER_TYPE
6500 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6504 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6507 if (TREE_CODE (arg1) != INTEGER_CST
6508 && !((TREE_CODE (arg1) == NOP_EXPR
6509 || TREE_CODE (arg1) == CONVERT_EXPR)
6510 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6513 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6518 if (TREE_CODE (arg1) == INTEGER_CST)
6520 tmp = build_int_cst_wide (inner_type,
6521 TREE_INT_CST_LOW (arg1),
6522 TREE_INT_CST_HIGH (arg1));
6523 arg1 = force_fit_type (tmp, 0,
6524 TREE_OVERFLOW (arg1),
6525 TREE_CONSTANT_OVERFLOW (arg1));
6528 arg1 = fold_convert (inner_type, arg1);
6530 return fold_build2 (code, type, arg0_inner, arg1);
6533 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6534 step of the array. Reconstructs s and delta in the case of s * delta
6535 being an integer constant (and thus already folded).
6536 ADDR is the address. MULT is the multiplicative expression.
6537 If the function succeeds, the new address expression is returned. Otherwise
6538 NULL_TREE is returned. */
6541 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6543 tree s, delta, step;
6544 tree ref = TREE_OPERAND (addr, 0), pref;
6548 /* Canonicalize op1 into a possibly non-constant delta
6549 and an INTEGER_CST s. */
6550 if (TREE_CODE (op1) == MULT_EXPR)
6552 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6557 if (TREE_CODE (arg0) == INTEGER_CST)
6562 else if (TREE_CODE (arg1) == INTEGER_CST)
6570 else if (TREE_CODE (op1) == INTEGER_CST)
6577 /* Simulate we are delta * 1. */
6579 s = integer_one_node;
6582 for (;; ref = TREE_OPERAND (ref, 0))
6584 if (TREE_CODE (ref) == ARRAY_REF)
6586 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6590 step = array_ref_element_size (ref);
6591 if (TREE_CODE (step) != INTEGER_CST)
6596 if (! tree_int_cst_equal (step, s))
6601 /* Try if delta is a multiple of step. */
6602 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6611 if (!handled_component_p (ref))
6615 /* We found the suitable array reference. So copy everything up to it,
6616 and replace the index. */
6618 pref = TREE_OPERAND (addr, 0);
6619 ret = copy_node (pref);
6624 pref = TREE_OPERAND (pref, 0);
6625 TREE_OPERAND (pos, 0) = copy_node (pref);
6626 pos = TREE_OPERAND (pos, 0);
6629 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6630 fold_convert (itype,
6631 TREE_OPERAND (pos, 1)),
6632 fold_convert (itype, delta));
6634 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6638 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6639 means A >= Y && A != MAX, but in this case we know that
6640 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6643 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6645 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6647 if (TREE_CODE (bound) == LT_EXPR)
6648 a = TREE_OPERAND (bound, 0);
6649 else if (TREE_CODE (bound) == GT_EXPR)
6650 a = TREE_OPERAND (bound, 1);
6654 typea = TREE_TYPE (a);
6655 if (!INTEGRAL_TYPE_P (typea)
6656 && !POINTER_TYPE_P (typea))
6659 if (TREE_CODE (ineq) == LT_EXPR)
6661 a1 = TREE_OPERAND (ineq, 1);
6662 y = TREE_OPERAND (ineq, 0);
6664 else if (TREE_CODE (ineq) == GT_EXPR)
6666 a1 = TREE_OPERAND (ineq, 0);
6667 y = TREE_OPERAND (ineq, 1);
6672 if (TREE_TYPE (a1) != typea)
6675 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6676 if (!integer_onep (diff))
6679 return fold_build2 (GE_EXPR, type, a, y);
6682 /* Fold a sum or difference of at least one multiplication.
6683 Returns the folded tree or NULL if no simplification could be made. */
6686 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6688 tree arg00, arg01, arg10, arg11;
6689 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6691 /* (A * C) +- (B * C) -> (A+-B) * C.
6692 (A * C) +- A -> A * (C+-1).
6693 We are most concerned about the case where C is a constant,
6694 but other combinations show up during loop reduction. Since
6695 it is not difficult, try all four possibilities. */
6697 if (TREE_CODE (arg0) == MULT_EXPR)
6699 arg00 = TREE_OPERAND (arg0, 0);
6700 arg01 = TREE_OPERAND (arg0, 1);
6705 if (!FLOAT_TYPE_P (type))
6706 arg01 = build_int_cst (type, 1);
6708 arg01 = build_real (type, dconst1);
6710 if (TREE_CODE (arg1) == MULT_EXPR)
6712 arg10 = TREE_OPERAND (arg1, 0);
6713 arg11 = TREE_OPERAND (arg1, 1);
6718 if (!FLOAT_TYPE_P (type))
6719 arg11 = build_int_cst (type, 1);
6721 arg11 = build_real (type, dconst1);
6725 if (operand_equal_p (arg01, arg11, 0))
6726 same = arg01, alt0 = arg00, alt1 = arg10;
6727 else if (operand_equal_p (arg00, arg10, 0))
6728 same = arg00, alt0 = arg01, alt1 = arg11;
6729 else if (operand_equal_p (arg00, arg11, 0))
6730 same = arg00, alt0 = arg01, alt1 = arg10;
6731 else if (operand_equal_p (arg01, arg10, 0))
6732 same = arg01, alt0 = arg00, alt1 = arg11;
6734 /* No identical multiplicands; see if we can find a common
6735 power-of-two factor in non-power-of-two multiplies. This
6736 can help in multi-dimensional array access. */
6737 else if (host_integerp (arg01, 0)
6738 && host_integerp (arg11, 0))
6740 HOST_WIDE_INT int01, int11, tmp;
6743 int01 = TREE_INT_CST_LOW (arg01);
6744 int11 = TREE_INT_CST_LOW (arg11);
6746 /* Move min of absolute values to int11. */
6747 if ((int01 >= 0 ? int01 : -int01)
6748 < (int11 >= 0 ? int11 : -int11))
6750 tmp = int01, int01 = int11, int11 = tmp;
6751 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6758 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6760 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6761 build_int_cst (TREE_TYPE (arg00),
6766 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6771 return fold_build2 (MULT_EXPR, type,
6772 fold_build2 (code, type,
6773 fold_convert (type, alt0),
6774 fold_convert (type, alt1)),
6775 fold_convert (type, same));
6780 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6781 specified by EXPR into the buffer PTR of length LEN bytes.
6782 Return the number of bytes placed in the buffer, or zero
6786 native_encode_int (tree expr, unsigned char *ptr, int len)
6788 tree type = TREE_TYPE (expr);
6789 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6790 int byte, offset, word, words;
6791 unsigned char value;
6793 if (total_bytes > len)
6795 words = total_bytes / UNITS_PER_WORD;
6797 for (byte = 0; byte < total_bytes; byte++)
6799 int bitpos = byte * BITS_PER_UNIT;
6800 if (bitpos < HOST_BITS_PER_WIDE_INT)
6801 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6803 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6804 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6806 if (total_bytes > UNITS_PER_WORD)
6808 word = byte / UNITS_PER_WORD;
6809 if (WORDS_BIG_ENDIAN)
6810 word = (words - 1) - word;
6811 offset = word * UNITS_PER_WORD;
6812 if (BYTES_BIG_ENDIAN)
6813 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6815 offset += byte % UNITS_PER_WORD;
6818 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6819 ptr[offset] = value;
6825 /* Subroutine of native_encode_expr. Encode the REAL_CST
6826 specified by EXPR into the buffer PTR of length LEN bytes.
6827 Return the number of bytes placed in the buffer, or zero
6831 native_encode_real (tree expr, unsigned char *ptr, int len)
6833 tree type = TREE_TYPE (expr);
6834 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6835 int byte, offset, word, words;
6836 unsigned char value;
6838 /* There are always 32 bits in each long, no matter the size of
6839 the hosts long. We handle floating point representations with
6843 if (total_bytes > len)
6845 words = total_bytes / UNITS_PER_WORD;
6847 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6849 for (byte = 0; byte < total_bytes; byte++)
6851 int bitpos = byte * BITS_PER_UNIT;
6852 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6854 if (total_bytes > UNITS_PER_WORD)
6856 word = byte / UNITS_PER_WORD;
6857 if (FLOAT_WORDS_BIG_ENDIAN)
6858 word = (words - 1) - word;
6859 offset = word * UNITS_PER_WORD;
6860 if (BYTES_BIG_ENDIAN)
6861 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6863 offset += byte % UNITS_PER_WORD;
6866 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6867 ptr[offset] = value;
6872 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6873 specified by EXPR into the buffer PTR of length LEN bytes.
6874 Return the number of bytes placed in the buffer, or zero
6878 native_encode_complex (tree expr, unsigned char *ptr, int len)
6883 part = TREE_REALPART (expr);
6884 rsize = native_encode_expr (part, ptr, len);
6887 part = TREE_IMAGPART (expr);
6888 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6891 return rsize + isize;
6895 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6896 specified by EXPR into the buffer PTR of length LEN bytes.
6897 Return the number of bytes placed in the buffer, or zero
6901 native_encode_vector (tree expr, unsigned char *ptr, int len)
6903 int i, size, offset, count;
6904 tree elem, elements;
6908 elements = TREE_VECTOR_CST_ELTS (expr);
6909 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
6910 for (i = 0; i < count; i++)
6914 elem = TREE_VALUE (elements);
6915 elements = TREE_CHAIN (elements);
6922 size = native_encode_expr (elem, ptr+offset, len-offset);
6928 if (offset + size > len)
6930 memset (ptr+offset, 0, size);
6940 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
6941 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
6942 buffer PTR of length LEN bytes. Return the number of bytes
6943 placed in the buffer, or zero upon failure. */
6946 native_encode_expr (tree expr, unsigned char *ptr, int len)
6948 switch (TREE_CODE (expr))
6951 return native_encode_int (expr, ptr, len);
6954 return native_encode_real (expr, ptr, len);
6957 return native_encode_complex (expr, ptr, len);
6960 return native_encode_vector (expr, ptr, len);
6968 /* Subroutine of native_interpret_expr. Interpret the contents of
6969 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
6970 If the buffer cannot be interpreted, return NULL_TREE. */
6973 native_interpret_int (tree type, unsigned char *ptr, int len)
6975 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6976 int byte, offset, word, words;
6977 unsigned char value;
6978 unsigned int HOST_WIDE_INT lo = 0;
6979 HOST_WIDE_INT hi = 0;
6981 if (total_bytes > len)
6983 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
6985 words = total_bytes / UNITS_PER_WORD;
6987 for (byte = 0; byte < total_bytes; byte++)
6989 int bitpos = byte * BITS_PER_UNIT;
6990 if (total_bytes > UNITS_PER_WORD)
6992 word = byte / UNITS_PER_WORD;
6993 if (WORDS_BIG_ENDIAN)
6994 word = (words - 1) - word;
6995 offset = word * UNITS_PER_WORD;
6996 if (BYTES_BIG_ENDIAN)
6997 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6999 offset += byte % UNITS_PER_WORD;
7002 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7003 value = ptr[offset];
7005 if (bitpos < HOST_BITS_PER_WIDE_INT)
7006 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7008 hi |= (unsigned HOST_WIDE_INT) value
7009 << (bitpos - HOST_BITS_PER_WIDE_INT);
7012 return force_fit_type (build_int_cst_wide (type, lo, hi),
7017 /* Subroutine of native_interpret_expr. Interpret the contents of
7018 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7019 If the buffer cannot be interpreted, return NULL_TREE. */
7022 native_interpret_real (tree type, unsigned char *ptr, int len)
7024 enum machine_mode mode = TYPE_MODE (type);
7025 int total_bytes = GET_MODE_SIZE (mode);
7026 int byte, offset, word, words;
7027 unsigned char value;
7028 /* There are always 32 bits in each long, no matter the size of
7029 the hosts long. We handle floating point representations with
7034 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7035 if (total_bytes > len || total_bytes > 24)
7037 words = total_bytes / UNITS_PER_WORD;
7039 memset (tmp, 0, sizeof (tmp));
7040 for (byte = 0; byte < total_bytes; byte++)
7042 int bitpos = byte * BITS_PER_UNIT;
7043 if (total_bytes > UNITS_PER_WORD)
7045 word = byte / UNITS_PER_WORD;
7046 if (FLOAT_WORDS_BIG_ENDIAN)
7047 word = (words - 1) - word;
7048 offset = word * UNITS_PER_WORD;
7049 if (BYTES_BIG_ENDIAN)
7050 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7052 offset += byte % UNITS_PER_WORD;
7055 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7056 value = ptr[offset];
7058 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7061 real_from_target (&r, tmp, mode);
7062 return build_real (type, r);
7066 /* Subroutine of native_interpret_expr. Interpret the contents of
7067 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7068 If the buffer cannot be interpreted, return NULL_TREE. */
7071 native_interpret_complex (tree type, unsigned char *ptr, int len)
7073 tree etype, rpart, ipart;
7076 etype = TREE_TYPE (type);
7077 size = GET_MODE_SIZE (TYPE_MODE (etype));
7080 rpart = native_interpret_expr (etype, ptr, size);
7083 ipart = native_interpret_expr (etype, ptr+size, size);
7086 return build_complex (type, rpart, ipart);
7090 /* Subroutine of native_interpret_expr. Interpret the contents of
7091 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7092 If the buffer cannot be interpreted, return NULL_TREE. */
7095 native_interpret_vector (tree type, unsigned char *ptr, int len)
7097 tree etype, elem, elements;
7100 etype = TREE_TYPE (type);
7101 size = GET_MODE_SIZE (TYPE_MODE (etype));
7102 count = TYPE_VECTOR_SUBPARTS (type);
7103 if (size * count > len)
7106 elements = NULL_TREE;
7107 for (i = count - 1; i >= 0; i--)
7109 elem = native_interpret_expr (etype, ptr+(i*size), size);
7112 elements = tree_cons (NULL_TREE, elem, elements);
7114 return build_vector (type, elements);
7118 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7119 the buffer PTR of length LEN as a constant of type TYPE. For
7120 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7121 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7122 return NULL_TREE. */
7125 native_interpret_expr (tree type, unsigned char *ptr, int len)
7127 switch (TREE_CODE (type))
7132 return native_interpret_int (type, ptr, len);
7135 return native_interpret_real (type, ptr, len);
7138 return native_interpret_complex (type, ptr, len);
7141 return native_interpret_vector (type, ptr, len);
7149 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7150 TYPE at compile-time. If we're unable to perform the conversion
7151 return NULL_TREE. */
7154 fold_view_convert_expr (tree type, tree expr)
7156 /* We support up to 512-bit values (for V8DFmode). */
7157 unsigned char buffer[64];
7160 /* Check that the host and target are sane. */
7161 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7164 len = native_encode_expr (expr, buffer, sizeof (buffer));
7168 return native_interpret_expr (type, buffer, len);
7172 /* Fold a unary expression of code CODE and type TYPE with operand
7173 OP0. Return the folded expression if folding is successful.
7174 Otherwise, return NULL_TREE. */
7177 fold_unary (enum tree_code code, tree type, tree op0)
7181 enum tree_code_class kind = TREE_CODE_CLASS (code);
7183 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7184 && TREE_CODE_LENGTH (code) == 1);
7189 if (code == NOP_EXPR || code == CONVERT_EXPR
7190 || code == FLOAT_EXPR || code == ABS_EXPR)
7192 /* Don't use STRIP_NOPS, because signedness of argument type
7194 STRIP_SIGN_NOPS (arg0);
7198 /* Strip any conversions that don't change the mode. This
7199 is safe for every expression, except for a comparison
7200 expression because its signedness is derived from its
7203 Note that this is done as an internal manipulation within
7204 the constant folder, in order to find the simplest
7205 representation of the arguments so that their form can be
7206 studied. In any cases, the appropriate type conversions
7207 should be put back in the tree that will get out of the
7213 if (TREE_CODE_CLASS (code) == tcc_unary)
7215 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7216 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7217 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7218 else if (TREE_CODE (arg0) == COND_EXPR)
7220 tree arg01 = TREE_OPERAND (arg0, 1);
7221 tree arg02 = TREE_OPERAND (arg0, 2);
7222 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7223 arg01 = fold_build1 (code, type, arg01);
7224 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7225 arg02 = fold_build1 (code, type, arg02);
7226 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7229 /* If this was a conversion, and all we did was to move into
7230 inside the COND_EXPR, bring it back out. But leave it if
7231 it is a conversion from integer to integer and the
7232 result precision is no wider than a word since such a
7233 conversion is cheap and may be optimized away by combine,
7234 while it couldn't if it were outside the COND_EXPR. Then return
7235 so we don't get into an infinite recursion loop taking the
7236 conversion out and then back in. */
7238 if ((code == NOP_EXPR || code == CONVERT_EXPR
7239 || code == NON_LVALUE_EXPR)
7240 && TREE_CODE (tem) == COND_EXPR
7241 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7242 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7243 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7244 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7245 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7246 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7247 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7249 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7250 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7251 || flag_syntax_only))
7252 tem = build1 (code, type,
7254 TREE_TYPE (TREE_OPERAND
7255 (TREE_OPERAND (tem, 1), 0)),
7256 TREE_OPERAND (tem, 0),
7257 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7258 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7261 else if (COMPARISON_CLASS_P (arg0))
7263 if (TREE_CODE (type) == BOOLEAN_TYPE)
7265 arg0 = copy_node (arg0);
7266 TREE_TYPE (arg0) = type;
7269 else if (TREE_CODE (type) != INTEGER_TYPE)
7270 return fold_build3 (COND_EXPR, type, arg0,
7271 fold_build1 (code, type,
7273 fold_build1 (code, type,
7274 integer_zero_node));
7283 case FIX_TRUNC_EXPR:
7285 case FIX_FLOOR_EXPR:
7286 case FIX_ROUND_EXPR:
7287 if (TREE_TYPE (op0) == type)
7290 /* If we have (type) (a CMP b) and type is an integral type, return
7291 new expression involving the new type. */
7292 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7293 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7294 TREE_OPERAND (op0, 1));
7296 /* Handle cases of two conversions in a row. */
7297 if (TREE_CODE (op0) == NOP_EXPR
7298 || TREE_CODE (op0) == CONVERT_EXPR)
7300 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7301 tree inter_type = TREE_TYPE (op0);
7302 int inside_int = INTEGRAL_TYPE_P (inside_type);
7303 int inside_ptr = POINTER_TYPE_P (inside_type);
7304 int inside_float = FLOAT_TYPE_P (inside_type);
7305 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7306 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7307 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7308 int inter_int = INTEGRAL_TYPE_P (inter_type);
7309 int inter_ptr = POINTER_TYPE_P (inter_type);
7310 int inter_float = FLOAT_TYPE_P (inter_type);
7311 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7312 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7313 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7314 int final_int = INTEGRAL_TYPE_P (type);
7315 int final_ptr = POINTER_TYPE_P (type);
7316 int final_float = FLOAT_TYPE_P (type);
7317 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7318 unsigned int final_prec = TYPE_PRECISION (type);
7319 int final_unsignedp = TYPE_UNSIGNED (type);
7321 /* In addition to the cases of two conversions in a row
7322 handled below, if we are converting something to its own
7323 type via an object of identical or wider precision, neither
7324 conversion is needed. */
7325 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7326 && (((inter_int || inter_ptr) && final_int)
7327 || (inter_float && final_float))
7328 && inter_prec >= final_prec)
7329 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7331 /* Likewise, if the intermediate and final types are either both
7332 float or both integer, we don't need the middle conversion if
7333 it is wider than the final type and doesn't change the signedness
7334 (for integers). Avoid this if the final type is a pointer
7335 since then we sometimes need the inner conversion. Likewise if
7336 the outer has a precision not equal to the size of its mode. */
7337 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7338 || (inter_float && inside_float)
7339 || (inter_vec && inside_vec))
7340 && inter_prec >= inside_prec
7341 && (inter_float || inter_vec
7342 || inter_unsignedp == inside_unsignedp)
7343 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7344 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7346 && (! final_vec || inter_prec == inside_prec))
7347 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7349 /* If we have a sign-extension of a zero-extended value, we can
7350 replace that by a single zero-extension. */
7351 if (inside_int && inter_int && final_int
7352 && inside_prec < inter_prec && inter_prec < final_prec
7353 && inside_unsignedp && !inter_unsignedp)
7354 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7356 /* Two conversions in a row are not needed unless:
7357 - some conversion is floating-point (overstrict for now), or
7358 - some conversion is a vector (overstrict for now), or
7359 - the intermediate type is narrower than both initial and
7361 - the intermediate type and innermost type differ in signedness,
7362 and the outermost type is wider than the intermediate, or
7363 - the initial type is a pointer type and the precisions of the
7364 intermediate and final types differ, or
7365 - the final type is a pointer type and the precisions of the
7366 initial and intermediate types differ.
7367 - the final type is a pointer type and the initial type not
7368 - the initial type is a pointer to an array and the final type
7370 if (! inside_float && ! inter_float && ! final_float
7371 && ! inside_vec && ! inter_vec && ! final_vec
7372 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7373 && ! (inside_int && inter_int
7374 && inter_unsignedp != inside_unsignedp
7375 && inter_prec < final_prec)
7376 && ((inter_unsignedp && inter_prec > inside_prec)
7377 == (final_unsignedp && final_prec > inter_prec))
7378 && ! (inside_ptr && inter_prec != final_prec)
7379 && ! (final_ptr && inside_prec != inter_prec)
7380 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7381 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7382 && final_ptr == inside_ptr
7384 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7385 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7386 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7389 /* Handle (T *)&A.B.C for A being of type T and B and C
7390 living at offset zero. This occurs frequently in
7391 C++ upcasting and then accessing the base. */
7392 if (TREE_CODE (op0) == ADDR_EXPR
7393 && POINTER_TYPE_P (type)
7394 && handled_component_p (TREE_OPERAND (op0, 0)))
7396 HOST_WIDE_INT bitsize, bitpos;
7398 enum machine_mode mode;
7399 int unsignedp, volatilep;
7400 tree base = TREE_OPERAND (op0, 0);
7401 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7402 &mode, &unsignedp, &volatilep, false);
7403 /* If the reference was to a (constant) zero offset, we can use
7404 the address of the base if it has the same base type
7405 as the result type. */
7406 if (! offset && bitpos == 0
7407 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7408 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7409 return fold_convert (type, build_fold_addr_expr (base));
7412 if (TREE_CODE (op0) == MODIFY_EXPR
7413 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7414 /* Detect assigning a bitfield. */
7415 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7416 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7418 /* Don't leave an assignment inside a conversion
7419 unless assigning a bitfield. */
7420 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7421 /* First do the assignment, then return converted constant. */
7422 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7423 TREE_NO_WARNING (tem) = 1;
7424 TREE_USED (tem) = 1;
7428 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7429 constants (if x has signed type, the sign bit cannot be set
7430 in c). This folds extension into the BIT_AND_EXPR. */
7431 if (INTEGRAL_TYPE_P (type)
7432 && TREE_CODE (type) != BOOLEAN_TYPE
7433 && TREE_CODE (op0) == BIT_AND_EXPR
7434 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7437 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7440 if (TYPE_UNSIGNED (TREE_TYPE (and))
7441 || (TYPE_PRECISION (type)
7442 <= TYPE_PRECISION (TREE_TYPE (and))))
7444 else if (TYPE_PRECISION (TREE_TYPE (and1))
7445 <= HOST_BITS_PER_WIDE_INT
7446 && host_integerp (and1, 1))
7448 unsigned HOST_WIDE_INT cst;
7450 cst = tree_low_cst (and1, 1);
7451 cst &= (HOST_WIDE_INT) -1
7452 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7453 change = (cst == 0);
7454 #ifdef LOAD_EXTEND_OP
7456 && !flag_syntax_only
7457 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7460 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7461 and0 = fold_convert (uns, and0);
7462 and1 = fold_convert (uns, and1);
7468 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
7469 TREE_INT_CST_HIGH (and1));
7470 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
7471 TREE_CONSTANT_OVERFLOW (and1));
7472 return fold_build2 (BIT_AND_EXPR, type,
7473 fold_convert (type, and0), tem);
7477 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7478 T2 being pointers to types of the same size. */
7479 if (POINTER_TYPE_P (type)
7480 && BINARY_CLASS_P (arg0)
7481 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7482 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7484 tree arg00 = TREE_OPERAND (arg0, 0);
7486 tree t1 = TREE_TYPE (arg00);
7487 tree tt0 = TREE_TYPE (t0);
7488 tree tt1 = TREE_TYPE (t1);
7489 tree s0 = TYPE_SIZE (tt0);
7490 tree s1 = TYPE_SIZE (tt1);
7492 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7493 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7494 TREE_OPERAND (arg0, 1));
7497 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7498 of the same precision, and X is a integer type not narrower than
7499 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7500 if (INTEGRAL_TYPE_P (type)
7501 && TREE_CODE (op0) == BIT_NOT_EXPR
7502 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7503 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7504 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7505 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7507 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7508 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7509 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7510 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7513 tem = fold_convert_const (code, type, arg0);
7514 return tem ? tem : NULL_TREE;
7516 case VIEW_CONVERT_EXPR:
7517 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7518 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7519 return fold_view_convert_expr (type, op0);
7522 if (negate_expr_p (arg0))
7523 return fold_convert (type, negate_expr (arg0));
7527 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7528 return fold_abs_const (arg0, type);
7529 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7530 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7531 /* Convert fabs((double)float) into (double)fabsf(float). */
7532 else if (TREE_CODE (arg0) == NOP_EXPR
7533 && TREE_CODE (type) == REAL_TYPE)
7535 tree targ0 = strip_float_extensions (arg0);
7537 return fold_convert (type, fold_build1 (ABS_EXPR,
7541 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7542 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7545 /* Strip sign ops from argument. */
7546 if (TREE_CODE (type) == REAL_TYPE)
7548 tem = fold_strip_sign_ops (arg0);
7550 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7555 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7556 return fold_convert (type, arg0);
7557 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7558 return build2 (COMPLEX_EXPR, type,
7559 TREE_OPERAND (arg0, 0),
7560 negate_expr (TREE_OPERAND (arg0, 1)));
7561 else if (TREE_CODE (arg0) == COMPLEX_CST)
7562 return build_complex (type, TREE_REALPART (arg0),
7563 negate_expr (TREE_IMAGPART (arg0)));
7564 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7565 return fold_build2 (TREE_CODE (arg0), type,
7566 fold_build1 (CONJ_EXPR, type,
7567 TREE_OPERAND (arg0, 0)),
7568 fold_build1 (CONJ_EXPR, type,
7569 TREE_OPERAND (arg0, 1)));
7570 else if (TREE_CODE (arg0) == CONJ_EXPR)
7571 return TREE_OPERAND (arg0, 0);
7575 if (TREE_CODE (arg0) == INTEGER_CST)
7576 return fold_not_const (arg0, type);
7577 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7578 return TREE_OPERAND (arg0, 0);
7579 /* Convert ~ (-A) to A - 1. */
7580 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7581 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7582 build_int_cst (type, 1));
7583 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7584 else if (INTEGRAL_TYPE_P (type)
7585 && ((TREE_CODE (arg0) == MINUS_EXPR
7586 && integer_onep (TREE_OPERAND (arg0, 1)))
7587 || (TREE_CODE (arg0) == PLUS_EXPR
7588 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7589 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7590 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7591 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7592 && (tem = fold_unary (BIT_NOT_EXPR, type,
7594 TREE_OPERAND (arg0, 0)))))
7595 return fold_build2 (BIT_XOR_EXPR, type, tem,
7596 fold_convert (type, TREE_OPERAND (arg0, 1)));
7597 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7598 && (tem = fold_unary (BIT_NOT_EXPR, type,
7600 TREE_OPERAND (arg0, 1)))))
7601 return fold_build2 (BIT_XOR_EXPR, type,
7602 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7606 case TRUTH_NOT_EXPR:
7607 /* The argument to invert_truthvalue must have Boolean type. */
7608 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7609 arg0 = fold_convert (boolean_type_node, arg0);
7611 /* Note that the operand of this must be an int
7612 and its values must be 0 or 1.
7613 ("true" is a fixed value perhaps depending on the language,
7614 but we don't handle values other than 1 correctly yet.) */
7615 tem = invert_truthvalue (arg0);
7616 /* Avoid infinite recursion. */
7617 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7619 return fold_convert (type, tem);
7622 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7624 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7625 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7626 TREE_OPERAND (arg0, 1));
7627 else if (TREE_CODE (arg0) == COMPLEX_CST)
7628 return TREE_REALPART (arg0);
7629 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7630 return fold_build2 (TREE_CODE (arg0), type,
7631 fold_build1 (REALPART_EXPR, type,
7632 TREE_OPERAND (arg0, 0)),
7633 fold_build1 (REALPART_EXPR, type,
7634 TREE_OPERAND (arg0, 1)));
7638 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7639 return fold_convert (type, integer_zero_node);
7640 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7641 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7642 TREE_OPERAND (arg0, 0));
7643 else if (TREE_CODE (arg0) == COMPLEX_CST)
7644 return TREE_IMAGPART (arg0);
7645 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7646 return fold_build2 (TREE_CODE (arg0), type,
7647 fold_build1 (IMAGPART_EXPR, type,
7648 TREE_OPERAND (arg0, 0)),
7649 fold_build1 (IMAGPART_EXPR, type,
7650 TREE_OPERAND (arg0, 1)));
7655 } /* switch (code) */
7658 /* Fold a binary expression of code CODE and type TYPE with operands
7659 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7660 Return the folded expression if folding is successful. Otherwise,
7661 return NULL_TREE. */
7664 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7666 enum tree_code compl_code;
7668 if (code == MIN_EXPR)
7669 compl_code = MAX_EXPR;
7670 else if (code == MAX_EXPR)
7671 compl_code = MIN_EXPR;
7675 /* MIN (MAX (a, b), b) == b. Â */
7676 if (TREE_CODE (op0) == compl_code
7677 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7678 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7680 /* MIN (MAX (b, a), b) == b. Â */
7681 if (TREE_CODE (op0) == compl_code
7682 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7683 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7684 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7686 /* MIN (a, MAX (a, b)) == a. Â */
7687 if (TREE_CODE (op1) == compl_code
7688 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7689 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7690 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7692 /* MIN (a, MAX (b, a)) == a. Â */
7693 if (TREE_CODE (op1) == compl_code
7694 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7695 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7696 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7701 /* Subroutine of fold_binary. This routine performs all of the
7702 transformations that are common to the equality/inequality
7703 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7704 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7705 fold_binary should call fold_binary. Fold a comparison with
7706 tree code CODE and type TYPE with operands OP0 and OP1. Return
7707 the folded comparison or NULL_TREE. */
7710 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7712 tree arg0, arg1, tem;
7717 STRIP_SIGN_NOPS (arg0);
7718 STRIP_SIGN_NOPS (arg1);
7720 tem = fold_relational_const (code, type, arg0, arg1);
7721 if (tem != NULL_TREE)
7724 /* If one arg is a real or integer constant, put it last. */
7725 if (tree_swap_operands_p (arg0, arg1, true))
7726 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7728 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7729 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7730 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7731 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7732 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7733 && !(flag_wrapv || flag_trapv))
7734 && (TREE_CODE (arg1) == INTEGER_CST
7735 && !TREE_OVERFLOW (arg1)))
7737 tree const1 = TREE_OPERAND (arg0, 1);
7739 tree variable = TREE_OPERAND (arg0, 0);
7742 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
7744 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
7745 TREE_TYPE (arg1), const2, const1);
7746 if (TREE_CODE (lhs) == TREE_CODE (arg1)
7747 && (TREE_CODE (lhs) != INTEGER_CST
7748 || !TREE_OVERFLOW (lhs)))
7749 return fold_build2 (code, type, variable, lhs);
7752 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7754 tree targ0 = strip_float_extensions (arg0);
7755 tree targ1 = strip_float_extensions (arg1);
7756 tree newtype = TREE_TYPE (targ0);
7758 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7759 newtype = TREE_TYPE (targ1);
7761 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7762 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7763 return fold_build2 (code, type, fold_convert (newtype, targ0),
7764 fold_convert (newtype, targ1));
7766 /* (-a) CMP (-b) -> b CMP a */
7767 if (TREE_CODE (arg0) == NEGATE_EXPR
7768 && TREE_CODE (arg1) == NEGATE_EXPR)
7769 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
7770 TREE_OPERAND (arg0, 0));
7772 if (TREE_CODE (arg1) == REAL_CST)
7774 REAL_VALUE_TYPE cst;
7775 cst = TREE_REAL_CST (arg1);
7777 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7778 if (TREE_CODE (arg0) == NEGATE_EXPR)
7779 return fold_build2 (swap_tree_comparison (code), type,
7780 TREE_OPERAND (arg0, 0),
7781 build_real (TREE_TYPE (arg1),
7782 REAL_VALUE_NEGATE (cst)));
7784 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7785 /* a CMP (-0) -> a CMP 0 */
7786 if (REAL_VALUE_MINUS_ZERO (cst))
7787 return fold_build2 (code, type, arg0,
7788 build_real (TREE_TYPE (arg1), dconst0));
7790 /* x != NaN is always true, other ops are always false. */
7791 if (REAL_VALUE_ISNAN (cst)
7792 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7794 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7795 return omit_one_operand (type, tem, arg0);
7798 /* Fold comparisons against infinity. */
7799 if (REAL_VALUE_ISINF (cst))
7801 tem = fold_inf_compare (code, type, arg0, arg1);
7802 if (tem != NULL_TREE)
7807 /* If this is a comparison of a real constant with a PLUS_EXPR
7808 or a MINUS_EXPR of a real constant, we can convert it into a
7809 comparison with a revised real constant as long as no overflow
7810 occurs when unsafe_math_optimizations are enabled. */
7811 if (flag_unsafe_math_optimizations
7812 && TREE_CODE (arg1) == REAL_CST
7813 && (TREE_CODE (arg0) == PLUS_EXPR
7814 || TREE_CODE (arg0) == MINUS_EXPR)
7815 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7816 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7817 ? MINUS_EXPR : PLUS_EXPR,
7818 arg1, TREE_OPERAND (arg0, 1), 0))
7819 && ! TREE_CONSTANT_OVERFLOW (tem))
7820 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
7822 /* Likewise, we can simplify a comparison of a real constant with
7823 a MINUS_EXPR whose first operand is also a real constant, i.e.
7824 (c1 - x) < c2 becomes x > c1-c2. */
7825 if (flag_unsafe_math_optimizations
7826 && TREE_CODE (arg1) == REAL_CST
7827 && TREE_CODE (arg0) == MINUS_EXPR
7828 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7829 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7831 && ! TREE_CONSTANT_OVERFLOW (tem))
7832 return fold_build2 (swap_tree_comparison (code), type,
7833 TREE_OPERAND (arg0, 1), tem);
7835 /* Fold comparisons against built-in math functions. */
7836 if (TREE_CODE (arg1) == REAL_CST
7837 && flag_unsafe_math_optimizations
7838 && ! flag_errno_math)
7840 enum built_in_function fcode = builtin_mathfn_code (arg0);
7842 if (fcode != END_BUILTINS)
7844 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7845 if (tem != NULL_TREE)
7851 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7852 if (TREE_CONSTANT (arg1)
7853 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7854 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7855 /* This optimization is invalid for ordered comparisons
7856 if CONST+INCR overflows or if foo+incr might overflow.
7857 This optimization is invalid for floating point due to rounding.
7858 For pointer types we assume overflow doesn't happen. */
7859 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7860 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7861 && (code == EQ_EXPR || code == NE_EXPR))))
7863 tree varop, newconst;
7865 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7867 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
7868 arg1, TREE_OPERAND (arg0, 1));
7869 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7870 TREE_OPERAND (arg0, 0),
7871 TREE_OPERAND (arg0, 1));
7875 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
7876 arg1, TREE_OPERAND (arg0, 1));
7877 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7878 TREE_OPERAND (arg0, 0),
7879 TREE_OPERAND (arg0, 1));
7883 /* If VAROP is a reference to a bitfield, we must mask
7884 the constant by the width of the field. */
7885 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7886 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
7887 && host_integerp (DECL_SIZE (TREE_OPERAND
7888 (TREE_OPERAND (varop, 0), 1)), 1))
7890 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7891 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
7892 tree folded_compare, shift;
7894 /* First check whether the comparison would come out
7895 always the same. If we don't do that we would
7896 change the meaning with the masking. */
7897 folded_compare = fold_build2 (code, type,
7898 TREE_OPERAND (varop, 0), arg1);
7899 if (TREE_CODE (folded_compare) == INTEGER_CST)
7900 return omit_one_operand (type, folded_compare, varop);
7902 shift = build_int_cst (NULL_TREE,
7903 TYPE_PRECISION (TREE_TYPE (varop)) - size);
7904 shift = fold_convert (TREE_TYPE (varop), shift);
7905 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
7907 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
7911 return fold_build2 (code, type, varop, newconst);
7914 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7915 && (TREE_CODE (arg0) == NOP_EXPR
7916 || TREE_CODE (arg0) == CONVERT_EXPR))
7918 /* If we are widening one operand of an integer comparison,
7919 see if the other operand is similarly being widened. Perhaps we
7920 can do the comparison in the narrower type. */
7921 tem = fold_widened_comparison (code, type, arg0, arg1);
7925 /* Or if we are changing signedness. */
7926 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
7931 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7932 constant, we can simplify it. */
7933 if (TREE_CODE (arg1) == INTEGER_CST
7934 && (TREE_CODE (arg0) == MIN_EXPR
7935 || TREE_CODE (arg0) == MAX_EXPR)
7936 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7938 tem = optimize_minmax_comparison (code, type, op0, op1);
7943 /* Simplify comparison of something with itself. (For IEEE
7944 floating-point, we can only do some of these simplifications.) */
7945 if (operand_equal_p (arg0, arg1, 0))
7950 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7951 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7952 return constant_boolean_node (1, type);
7957 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7958 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7959 return constant_boolean_node (1, type);
7960 return fold_build2 (EQ_EXPR, type, arg0, arg1);
7963 /* For NE, we can only do this simplification if integer
7964 or we don't honor IEEE floating point NaNs. */
7965 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7966 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7968 /* ... fall through ... */
7971 return constant_boolean_node (0, type);
7977 /* If we are comparing an expression that just has comparisons
7978 of two integer values, arithmetic expressions of those comparisons,
7979 and constants, we can simplify it. There are only three cases
7980 to check: the two values can either be equal, the first can be
7981 greater, or the second can be greater. Fold the expression for
7982 those three values. Since each value must be 0 or 1, we have
7983 eight possibilities, each of which corresponds to the constant 0
7984 or 1 or one of the six possible comparisons.
7986 This handles common cases like (a > b) == 0 but also handles
7987 expressions like ((x > y) - (y > x)) > 0, which supposedly
7988 occur in macroized code. */
7990 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7992 tree cval1 = 0, cval2 = 0;
7995 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7996 /* Don't handle degenerate cases here; they should already
7997 have been handled anyway. */
7998 && cval1 != 0 && cval2 != 0
7999 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8000 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8001 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8002 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8003 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8004 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8005 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8007 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8008 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8010 /* We can't just pass T to eval_subst in case cval1 or cval2
8011 was the same as ARG1. */
8014 = fold_build2 (code, type,
8015 eval_subst (arg0, cval1, maxval,
8019 = fold_build2 (code, type,
8020 eval_subst (arg0, cval1, maxval,
8024 = fold_build2 (code, type,
8025 eval_subst (arg0, cval1, minval,
8029 /* All three of these results should be 0 or 1. Confirm they are.
8030 Then use those values to select the proper code to use. */
8032 if (TREE_CODE (high_result) == INTEGER_CST
8033 && TREE_CODE (equal_result) == INTEGER_CST
8034 && TREE_CODE (low_result) == INTEGER_CST)
8036 /* Make a 3-bit mask with the high-order bit being the
8037 value for `>', the next for '=', and the low for '<'. */
8038 switch ((integer_onep (high_result) * 4)
8039 + (integer_onep (equal_result) * 2)
8040 + integer_onep (low_result))
8044 return omit_one_operand (type, integer_zero_node, arg0);
8065 return omit_one_operand (type, integer_one_node, arg0);
8069 return save_expr (build2 (code, type, cval1, cval2));
8070 return fold_build2 (code, type, cval1, cval2);
8075 /* Fold a comparison of the address of COMPONENT_REFs with the same
8076 type and component to a comparison of the address of the base
8077 object. In short, &x->a OP &y->a to x OP y and
8078 &x->a OP &y.a to x OP &y */
8079 if (TREE_CODE (arg0) == ADDR_EXPR
8080 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8081 && TREE_CODE (arg1) == ADDR_EXPR
8082 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8084 tree cref0 = TREE_OPERAND (arg0, 0);
8085 tree cref1 = TREE_OPERAND (arg1, 0);
8086 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8088 tree op0 = TREE_OPERAND (cref0, 0);
8089 tree op1 = TREE_OPERAND (cref1, 0);
8090 return fold_build2 (code, type,
8091 build_fold_addr_expr (op0),
8092 build_fold_addr_expr (op1));
8096 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8097 into a single range test. */
8098 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8099 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8100 && TREE_CODE (arg1) == INTEGER_CST
8101 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8102 && !integer_zerop (TREE_OPERAND (arg0, 1))
8103 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8104 && !TREE_OVERFLOW (arg1))
8106 tem = fold_div_compare (code, type, arg0, arg1);
8107 if (tem != NULL_TREE)
8114 /* Fold a binary expression of code CODE and type TYPE with operands
8115 OP0 and OP1. Return the folded expression if folding is
8116 successful. Otherwise, return NULL_TREE. */
8119 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8121 enum tree_code_class kind = TREE_CODE_CLASS (code);
8122 tree arg0, arg1, tem;
8123 tree t1 = NULL_TREE;
8125 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8126 && TREE_CODE_LENGTH (code) == 2
8128 && op1 != NULL_TREE);
8133 /* Strip any conversions that don't change the mode. This is
8134 safe for every expression, except for a comparison expression
8135 because its signedness is derived from its operands. So, in
8136 the latter case, only strip conversions that don't change the
8139 Note that this is done as an internal manipulation within the
8140 constant folder, in order to find the simplest representation
8141 of the arguments so that their form can be studied. In any
8142 cases, the appropriate type conversions should be put back in
8143 the tree that will get out of the constant folder. */
8145 if (kind == tcc_comparison)
8147 STRIP_SIGN_NOPS (arg0);
8148 STRIP_SIGN_NOPS (arg1);
8156 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8157 constant but we can't do arithmetic on them. */
8158 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8159 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8160 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8161 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8163 if (kind == tcc_binary)
8164 tem = const_binop (code, arg0, arg1, 0);
8165 else if (kind == tcc_comparison)
8166 tem = fold_relational_const (code, type, arg0, arg1);
8170 if (tem != NULL_TREE)
8172 if (TREE_TYPE (tem) != type)
8173 tem = fold_convert (type, tem);
8178 /* If this is a commutative operation, and ARG0 is a constant, move it
8179 to ARG1 to reduce the number of tests below. */
8180 if (commutative_tree_code (code)
8181 && tree_swap_operands_p (arg0, arg1, true))
8182 return fold_build2 (code, type, op1, op0);
8184 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8186 First check for cases where an arithmetic operation is applied to a
8187 compound, conditional, or comparison operation. Push the arithmetic
8188 operation inside the compound or conditional to see if any folding
8189 can then be done. Convert comparison to conditional for this purpose.
8190 The also optimizes non-constant cases that used to be done in
8193 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8194 one of the operands is a comparison and the other is a comparison, a
8195 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8196 code below would make the expression more complex. Change it to a
8197 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8198 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8200 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8201 || code == EQ_EXPR || code == NE_EXPR)
8202 && ((truth_value_p (TREE_CODE (arg0))
8203 && (truth_value_p (TREE_CODE (arg1))
8204 || (TREE_CODE (arg1) == BIT_AND_EXPR
8205 && integer_onep (TREE_OPERAND (arg1, 1)))))
8206 || (truth_value_p (TREE_CODE (arg1))
8207 && (truth_value_p (TREE_CODE (arg0))
8208 || (TREE_CODE (arg0) == BIT_AND_EXPR
8209 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8211 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8212 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8215 fold_convert (boolean_type_node, arg0),
8216 fold_convert (boolean_type_node, arg1));
8218 if (code == EQ_EXPR)
8219 tem = invert_truthvalue (tem);
8221 return fold_convert (type, tem);
8224 if (TREE_CODE_CLASS (code) == tcc_binary
8225 || TREE_CODE_CLASS (code) == tcc_comparison)
8227 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8228 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8229 fold_build2 (code, type,
8230 TREE_OPERAND (arg0, 1), op1));
8231 if (TREE_CODE (arg1) == COMPOUND_EXPR
8232 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8233 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8234 fold_build2 (code, type,
8235 op0, TREE_OPERAND (arg1, 1)));
8237 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8239 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8241 /*cond_first_p=*/1);
8242 if (tem != NULL_TREE)
8246 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8248 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8250 /*cond_first_p=*/0);
8251 if (tem != NULL_TREE)
8259 /* A + (-B) -> A - B */
8260 if (TREE_CODE (arg1) == NEGATE_EXPR)
8261 return fold_build2 (MINUS_EXPR, type,
8262 fold_convert (type, arg0),
8263 fold_convert (type, TREE_OPERAND (arg1, 0)));
8264 /* (-A) + B -> B - A */
8265 if (TREE_CODE (arg0) == NEGATE_EXPR
8266 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8267 return fold_build2 (MINUS_EXPR, type,
8268 fold_convert (type, arg1),
8269 fold_convert (type, TREE_OPERAND (arg0, 0)));
8270 /* Convert ~A + 1 to -A. */
8271 if (INTEGRAL_TYPE_P (type)
8272 && TREE_CODE (arg0) == BIT_NOT_EXPR
8273 && integer_onep (arg1))
8274 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8276 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8278 if ((TREE_CODE (arg0) == MULT_EXPR
8279 || TREE_CODE (arg1) == MULT_EXPR)
8280 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8282 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8287 if (! FLOAT_TYPE_P (type))
8289 if (integer_zerop (arg1))
8290 return non_lvalue (fold_convert (type, arg0));
8292 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8293 with a constant, and the two constants have no bits in common,
8294 we should treat this as a BIT_IOR_EXPR since this may produce more
8296 if (TREE_CODE (arg0) == BIT_AND_EXPR
8297 && TREE_CODE (arg1) == BIT_AND_EXPR
8298 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8299 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8300 && integer_zerop (const_binop (BIT_AND_EXPR,
8301 TREE_OPERAND (arg0, 1),
8302 TREE_OPERAND (arg1, 1), 0)))
8304 code = BIT_IOR_EXPR;
8308 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8309 (plus (plus (mult) (mult)) (foo)) so that we can
8310 take advantage of the factoring cases below. */
8311 if (((TREE_CODE (arg0) == PLUS_EXPR
8312 || TREE_CODE (arg0) == MINUS_EXPR)
8313 && TREE_CODE (arg1) == MULT_EXPR)
8314 || ((TREE_CODE (arg1) == PLUS_EXPR
8315 || TREE_CODE (arg1) == MINUS_EXPR)
8316 && TREE_CODE (arg0) == MULT_EXPR))
8318 tree parg0, parg1, parg, marg;
8319 enum tree_code pcode;
8321 if (TREE_CODE (arg1) == MULT_EXPR)
8322 parg = arg0, marg = arg1;
8324 parg = arg1, marg = arg0;
8325 pcode = TREE_CODE (parg);
8326 parg0 = TREE_OPERAND (parg, 0);
8327 parg1 = TREE_OPERAND (parg, 1);
8331 if (TREE_CODE (parg0) == MULT_EXPR
8332 && TREE_CODE (parg1) != MULT_EXPR)
8333 return fold_build2 (pcode, type,
8334 fold_build2 (PLUS_EXPR, type,
8335 fold_convert (type, parg0),
8336 fold_convert (type, marg)),
8337 fold_convert (type, parg1));
8338 if (TREE_CODE (parg0) != MULT_EXPR
8339 && TREE_CODE (parg1) == MULT_EXPR)
8340 return fold_build2 (PLUS_EXPR, type,
8341 fold_convert (type, parg0),
8342 fold_build2 (pcode, type,
8343 fold_convert (type, marg),
8348 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8349 of the array. Loop optimizer sometimes produce this type of
8351 if (TREE_CODE (arg0) == ADDR_EXPR)
8353 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8355 return fold_convert (type, tem);
8357 else if (TREE_CODE (arg1) == ADDR_EXPR)
8359 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8361 return fold_convert (type, tem);
8366 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8367 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8368 return non_lvalue (fold_convert (type, arg0));
8370 /* Likewise if the operands are reversed. */
8371 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8372 return non_lvalue (fold_convert (type, arg1));
8374 /* Convert X + -C into X - C. */
8375 if (TREE_CODE (arg1) == REAL_CST
8376 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8378 tem = fold_negate_const (arg1, type);
8379 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8380 return fold_build2 (MINUS_EXPR, type,
8381 fold_convert (type, arg0),
8382 fold_convert (type, tem));
8385 if (flag_unsafe_math_optimizations
8386 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8387 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8388 && (tem = distribute_real_division (code, type, arg0, arg1)))
8391 /* Convert x+x into x*2.0. */
8392 if (operand_equal_p (arg0, arg1, 0)
8393 && SCALAR_FLOAT_TYPE_P (type))
8394 return fold_build2 (MULT_EXPR, type, arg0,
8395 build_real (type, dconst2));
8397 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8398 if (flag_unsafe_math_optimizations
8399 && TREE_CODE (arg1) == PLUS_EXPR
8400 && TREE_CODE (arg0) != MULT_EXPR)
8402 tree tree10 = TREE_OPERAND (arg1, 0);
8403 tree tree11 = TREE_OPERAND (arg1, 1);
8404 if (TREE_CODE (tree11) == MULT_EXPR
8405 && TREE_CODE (tree10) == MULT_EXPR)
8408 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8409 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8412 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8413 if (flag_unsafe_math_optimizations
8414 && TREE_CODE (arg0) == PLUS_EXPR
8415 && TREE_CODE (arg1) != MULT_EXPR)
8417 tree tree00 = TREE_OPERAND (arg0, 0);
8418 tree tree01 = TREE_OPERAND (arg0, 1);
8419 if (TREE_CODE (tree01) == MULT_EXPR
8420 && TREE_CODE (tree00) == MULT_EXPR)
8423 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8424 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
8430 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
8431 is a rotate of A by C1 bits. */
8432 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
8433 is a rotate of A by B bits. */
8435 enum tree_code code0, code1;
8436 code0 = TREE_CODE (arg0);
8437 code1 = TREE_CODE (arg1);
8438 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
8439 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
8440 && operand_equal_p (TREE_OPERAND (arg0, 0),
8441 TREE_OPERAND (arg1, 0), 0)
8442 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8444 tree tree01, tree11;
8445 enum tree_code code01, code11;
8447 tree01 = TREE_OPERAND (arg0, 1);
8448 tree11 = TREE_OPERAND (arg1, 1);
8449 STRIP_NOPS (tree01);
8450 STRIP_NOPS (tree11);
8451 code01 = TREE_CODE (tree01);
8452 code11 = TREE_CODE (tree11);
8453 if (code01 == INTEGER_CST
8454 && code11 == INTEGER_CST
8455 && TREE_INT_CST_HIGH (tree01) == 0
8456 && TREE_INT_CST_HIGH (tree11) == 0
8457 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
8458 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
8459 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
8460 code0 == LSHIFT_EXPR ? tree01 : tree11);
8461 else if (code11 == MINUS_EXPR)
8463 tree tree110, tree111;
8464 tree110 = TREE_OPERAND (tree11, 0);
8465 tree111 = TREE_OPERAND (tree11, 1);
8466 STRIP_NOPS (tree110);
8467 STRIP_NOPS (tree111);
8468 if (TREE_CODE (tree110) == INTEGER_CST
8469 && 0 == compare_tree_int (tree110,
8471 (TREE_TYPE (TREE_OPERAND
8473 && operand_equal_p (tree01, tree111, 0))
8474 return build2 ((code0 == LSHIFT_EXPR
8477 type, TREE_OPERAND (arg0, 0), tree01);
8479 else if (code01 == MINUS_EXPR)
8481 tree tree010, tree011;
8482 tree010 = TREE_OPERAND (tree01, 0);
8483 tree011 = TREE_OPERAND (tree01, 1);
8484 STRIP_NOPS (tree010);
8485 STRIP_NOPS (tree011);
8486 if (TREE_CODE (tree010) == INTEGER_CST
8487 && 0 == compare_tree_int (tree010,
8489 (TREE_TYPE (TREE_OPERAND
8491 && operand_equal_p (tree11, tree011, 0))
8492 return build2 ((code0 != LSHIFT_EXPR
8495 type, TREE_OPERAND (arg0, 0), tree11);
8501 /* In most languages, can't associate operations on floats through
8502 parentheses. Rather than remember where the parentheses were, we
8503 don't associate floats at all, unless the user has specified
8504 -funsafe-math-optimizations. */
8506 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8508 tree var0, con0, lit0, minus_lit0;
8509 tree var1, con1, lit1, minus_lit1;
8511 /* Split both trees into variables, constants, and literals. Then
8512 associate each group together, the constants with literals,
8513 then the result with variables. This increases the chances of
8514 literals being recombined later and of generating relocatable
8515 expressions for the sum of a constant and literal. */
8516 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
8517 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
8518 code == MINUS_EXPR);
8520 /* Only do something if we found more than two objects. Otherwise,
8521 nothing has changed and we risk infinite recursion. */
8522 if (2 < ((var0 != 0) + (var1 != 0)
8523 + (con0 != 0) + (con1 != 0)
8524 + (lit0 != 0) + (lit1 != 0)
8525 + (minus_lit0 != 0) + (minus_lit1 != 0)))
8527 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
8528 if (code == MINUS_EXPR)
8531 var0 = associate_trees (var0, var1, code, type);
8532 con0 = associate_trees (con0, con1, code, type);
8533 lit0 = associate_trees (lit0, lit1, code, type);
8534 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
8536 /* Preserve the MINUS_EXPR if the negative part of the literal is
8537 greater than the positive part. Otherwise, the multiplicative
8538 folding code (i.e extract_muldiv) may be fooled in case
8539 unsigned constants are subtracted, like in the following
8540 example: ((X*2 + 4) - 8U)/2. */
8541 if (minus_lit0 && lit0)
8543 if (TREE_CODE (lit0) == INTEGER_CST
8544 && TREE_CODE (minus_lit0) == INTEGER_CST
8545 && tree_int_cst_lt (lit0, minus_lit0))
8547 minus_lit0 = associate_trees (minus_lit0, lit0,
8553 lit0 = associate_trees (lit0, minus_lit0,
8561 return fold_convert (type,
8562 associate_trees (var0, minus_lit0,
8566 con0 = associate_trees (con0, minus_lit0,
8568 return fold_convert (type,
8569 associate_trees (var0, con0,
8574 con0 = associate_trees (con0, lit0, code, type);
8575 return fold_convert (type, associate_trees (var0, con0,
8583 /* A - (-B) -> A + B */
8584 if (TREE_CODE (arg1) == NEGATE_EXPR)
8585 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
8586 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
8587 if (TREE_CODE (arg0) == NEGATE_EXPR
8588 && (FLOAT_TYPE_P (type)
8589 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
8590 && negate_expr_p (arg1)
8591 && reorder_operands_p (arg0, arg1))
8592 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
8593 TREE_OPERAND (arg0, 0));
8594 /* Convert -A - 1 to ~A. */
8595 if (INTEGRAL_TYPE_P (type)
8596 && TREE_CODE (arg0) == NEGATE_EXPR
8597 && integer_onep (arg1))
8598 return fold_build1 (BIT_NOT_EXPR, type,
8599 fold_convert (type, TREE_OPERAND (arg0, 0)));
8601 /* Convert -1 - A to ~A. */
8602 if (INTEGRAL_TYPE_P (type)
8603 && integer_all_onesp (arg0))
8604 return fold_build1 (BIT_NOT_EXPR, type, arg1);
8606 if (! FLOAT_TYPE_P (type))
8608 if (integer_zerop (arg0))
8609 return negate_expr (fold_convert (type, arg1));
8610 if (integer_zerop (arg1))
8611 return non_lvalue (fold_convert (type, arg0));
8613 /* Fold A - (A & B) into ~B & A. */
8614 if (!TREE_SIDE_EFFECTS (arg0)
8615 && TREE_CODE (arg1) == BIT_AND_EXPR)
8617 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
8618 return fold_build2 (BIT_AND_EXPR, type,
8619 fold_build1 (BIT_NOT_EXPR, type,
8620 TREE_OPERAND (arg1, 0)),
8622 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8623 return fold_build2 (BIT_AND_EXPR, type,
8624 fold_build1 (BIT_NOT_EXPR, type,
8625 TREE_OPERAND (arg1, 1)),
8629 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
8630 any power of 2 minus 1. */
8631 if (TREE_CODE (arg0) == BIT_AND_EXPR
8632 && TREE_CODE (arg1) == BIT_AND_EXPR
8633 && operand_equal_p (TREE_OPERAND (arg0, 0),
8634 TREE_OPERAND (arg1, 0), 0))
8636 tree mask0 = TREE_OPERAND (arg0, 1);
8637 tree mask1 = TREE_OPERAND (arg1, 1);
8638 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
8640 if (operand_equal_p (tem, mask1, 0))
8642 tem = fold_build2 (BIT_XOR_EXPR, type,
8643 TREE_OPERAND (arg0, 0), mask1);
8644 return fold_build2 (MINUS_EXPR, type, tem, mask1);
8649 /* See if ARG1 is zero and X - ARG1 reduces to X. */
8650 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
8651 return non_lvalue (fold_convert (type, arg0));
8653 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
8654 ARG0 is zero and X + ARG0 reduces to X, since that would mean
8655 (-ARG1 + ARG0) reduces to -ARG1. */
8656 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8657 return negate_expr (fold_convert (type, arg1));
8659 /* Fold &x - &x. This can happen from &x.foo - &x.
8660 This is unsafe for certain floats even in non-IEEE formats.
8661 In IEEE, it is unsafe because it does wrong for NaNs.
8662 Also note that operand_equal_p is always false if an operand
8665 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
8666 && operand_equal_p (arg0, arg1, 0))
8667 return fold_convert (type, integer_zero_node);
8669 /* A - B -> A + (-B) if B is easily negatable. */
8670 if (negate_expr_p (arg1)
8671 && ((FLOAT_TYPE_P (type)
8672 /* Avoid this transformation if B is a positive REAL_CST. */
8673 && (TREE_CODE (arg1) != REAL_CST
8674 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
8675 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
8676 return fold_build2 (PLUS_EXPR, type,
8677 fold_convert (type, arg0),
8678 fold_convert (type, negate_expr (arg1)));
8680 /* Try folding difference of addresses. */
8684 if ((TREE_CODE (arg0) == ADDR_EXPR
8685 || TREE_CODE (arg1) == ADDR_EXPR)
8686 && ptr_difference_const (arg0, arg1, &diff))
8687 return build_int_cst_type (type, diff);
8690 /* Fold &a[i] - &a[j] to i-j. */
8691 if (TREE_CODE (arg0) == ADDR_EXPR
8692 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
8693 && TREE_CODE (arg1) == ADDR_EXPR
8694 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
8696 tree aref0 = TREE_OPERAND (arg0, 0);
8697 tree aref1 = TREE_OPERAND (arg1, 0);
8698 if (operand_equal_p (TREE_OPERAND (aref0, 0),
8699 TREE_OPERAND (aref1, 0), 0))
8701 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
8702 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
8703 tree esz = array_ref_element_size (aref0);
8704 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8705 return fold_build2 (MULT_EXPR, type, diff,
8706 fold_convert (type, esz));
8711 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
8712 of the array. Loop optimizer sometimes produce this type of
8714 if (TREE_CODE (arg0) == ADDR_EXPR)
8716 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
8718 return fold_convert (type, tem);
8721 if (flag_unsafe_math_optimizations
8722 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8723 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8724 && (tem = distribute_real_division (code, type, arg0, arg1)))
8727 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
8729 if ((TREE_CODE (arg0) == MULT_EXPR
8730 || TREE_CODE (arg1) == MULT_EXPR)
8731 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8733 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8741 /* (-A) * (-B) -> A * B */
8742 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8743 return fold_build2 (MULT_EXPR, type,
8744 TREE_OPERAND (arg0, 0),
8745 negate_expr (arg1));
8746 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8747 return fold_build2 (MULT_EXPR, type,
8749 TREE_OPERAND (arg1, 0));
8751 if (! FLOAT_TYPE_P (type))
8753 if (integer_zerop (arg1))
8754 return omit_one_operand (type, arg1, arg0);
8755 if (integer_onep (arg1))
8756 return non_lvalue (fold_convert (type, arg0));
8757 /* Transform x * -1 into -x. */
8758 if (integer_all_onesp (arg1))
8759 return fold_convert (type, negate_expr (arg0));
8761 /* (a * (1 << b)) is (a << b) */
8762 if (TREE_CODE (arg1) == LSHIFT_EXPR
8763 && integer_onep (TREE_OPERAND (arg1, 0)))
8764 return fold_build2 (LSHIFT_EXPR, type, arg0,
8765 TREE_OPERAND (arg1, 1));
8766 if (TREE_CODE (arg0) == LSHIFT_EXPR
8767 && integer_onep (TREE_OPERAND (arg0, 0)))
8768 return fold_build2 (LSHIFT_EXPR, type, arg1,
8769 TREE_OPERAND (arg0, 1));
8771 if (TREE_CODE (arg1) == INTEGER_CST
8772 && 0 != (tem = extract_muldiv (op0,
8773 fold_convert (type, arg1),
8775 return fold_convert (type, tem);
8780 /* Maybe fold x * 0 to 0. The expressions aren't the same
8781 when x is NaN, since x * 0 is also NaN. Nor are they the
8782 same in modes with signed zeros, since multiplying a
8783 negative value by 0 gives -0, not +0. */
8784 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8785 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8786 && real_zerop (arg1))
8787 return omit_one_operand (type, arg1, arg0);
8788 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
8789 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8790 && real_onep (arg1))
8791 return non_lvalue (fold_convert (type, arg0));
8793 /* Transform x * -1.0 into -x. */
8794 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8795 && real_minus_onep (arg1))
8796 return fold_convert (type, negate_expr (arg0));
8798 /* Convert (C1/X)*C2 into (C1*C2)/X. */
8799 if (flag_unsafe_math_optimizations
8800 && TREE_CODE (arg0) == RDIV_EXPR
8801 && TREE_CODE (arg1) == REAL_CST
8802 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
8804 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
8807 return fold_build2 (RDIV_EXPR, type, tem,
8808 TREE_OPERAND (arg0, 1));
8811 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
8812 if (operand_equal_p (arg0, arg1, 0))
8814 tree tem = fold_strip_sign_ops (arg0);
8815 if (tem != NULL_TREE)
8817 tem = fold_convert (type, tem);
8818 return fold_build2 (MULT_EXPR, type, tem, tem);
8822 if (flag_unsafe_math_optimizations)
8824 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8825 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8827 /* Optimizations of root(...)*root(...). */
8828 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
8830 tree rootfn, arg, arglist;
8831 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8832 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8834 /* Optimize sqrt(x)*sqrt(x) as x. */
8835 if (BUILTIN_SQRT_P (fcode0)
8836 && operand_equal_p (arg00, arg10, 0)
8837 && ! HONOR_SNANS (TYPE_MODE (type)))
8840 /* Optimize root(x)*root(y) as root(x*y). */
8841 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8842 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8843 arglist = build_tree_list (NULL_TREE, arg);
8844 return build_function_call_expr (rootfn, arglist);
8847 /* Optimize expN(x)*expN(y) as expN(x+y). */
8848 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
8850 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8851 tree arg = fold_build2 (PLUS_EXPR, type,
8852 TREE_VALUE (TREE_OPERAND (arg0, 1)),
8853 TREE_VALUE (TREE_OPERAND (arg1, 1)));
8854 tree arglist = build_tree_list (NULL_TREE, arg);
8855 return build_function_call_expr (expfn, arglist);
8858 /* Optimizations of pow(...)*pow(...). */
8859 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
8860 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
8861 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
8863 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8864 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8866 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8867 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8870 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
8871 if (operand_equal_p (arg01, arg11, 0))
8873 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8874 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
8875 tree arglist = tree_cons (NULL_TREE, arg,
8876 build_tree_list (NULL_TREE,
8878 return build_function_call_expr (powfn, arglist);
8881 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
8882 if (operand_equal_p (arg00, arg10, 0))
8884 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8885 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
8886 tree arglist = tree_cons (NULL_TREE, arg00,
8887 build_tree_list (NULL_TREE,
8889 return build_function_call_expr (powfn, arglist);
8893 /* Optimize tan(x)*cos(x) as sin(x). */
8894 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
8895 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
8896 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
8897 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
8898 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
8899 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
8900 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8901 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8903 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8905 if (sinfn != NULL_TREE)
8906 return build_function_call_expr (sinfn,
8907 TREE_OPERAND (arg0, 1));
8910 /* Optimize x*pow(x,c) as pow(x,c+1). */
8911 if (fcode1 == BUILT_IN_POW
8912 || fcode1 == BUILT_IN_POWF
8913 || fcode1 == BUILT_IN_POWL)
8915 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8916 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8918 if (TREE_CODE (arg11) == REAL_CST
8919 && ! TREE_CONSTANT_OVERFLOW (arg11)
8920 && operand_equal_p (arg0, arg10, 0))
8922 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8926 c = TREE_REAL_CST (arg11);
8927 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8928 arg = build_real (type, c);
8929 arglist = build_tree_list (NULL_TREE, arg);
8930 arglist = tree_cons (NULL_TREE, arg0, arglist);
8931 return build_function_call_expr (powfn, arglist);
8935 /* Optimize pow(x,c)*x as pow(x,c+1). */
8936 if (fcode0 == BUILT_IN_POW
8937 || fcode0 == BUILT_IN_POWF
8938 || fcode0 == BUILT_IN_POWL)
8940 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8941 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8943 if (TREE_CODE (arg01) == REAL_CST
8944 && ! TREE_CONSTANT_OVERFLOW (arg01)
8945 && operand_equal_p (arg1, arg00, 0))
8947 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8951 c = TREE_REAL_CST (arg01);
8952 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8953 arg = build_real (type, c);
8954 arglist = build_tree_list (NULL_TREE, arg);
8955 arglist = tree_cons (NULL_TREE, arg1, arglist);
8956 return build_function_call_expr (powfn, arglist);
8960 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8962 && operand_equal_p (arg0, arg1, 0))
8964 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8968 tree arg = build_real (type, dconst2);
8969 tree arglist = build_tree_list (NULL_TREE, arg);
8970 arglist = tree_cons (NULL_TREE, arg0, arglist);
8971 return build_function_call_expr (powfn, arglist);
8980 if (integer_all_onesp (arg1))
8981 return omit_one_operand (type, arg1, arg0);
8982 if (integer_zerop (arg1))
8983 return non_lvalue (fold_convert (type, arg0));
8984 if (operand_equal_p (arg0, arg1, 0))
8985 return non_lvalue (fold_convert (type, arg0));
8988 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8991 t1 = build_int_cst (type, -1);
8992 t1 = force_fit_type (t1, 0, false, false);
8993 return omit_one_operand (type, t1, arg1);
8997 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8998 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9000 t1 = build_int_cst (type, -1);
9001 t1 = force_fit_type (t1, 0, false, false);
9002 return omit_one_operand (type, t1, arg0);
9005 /* Canonicalize (X & C1) | C2. */
9006 if (TREE_CODE (arg0) == BIT_AND_EXPR
9007 && TREE_CODE (arg1) == INTEGER_CST
9008 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9010 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9011 int width = TYPE_PRECISION (type);
9012 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9013 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9014 hi2 = TREE_INT_CST_HIGH (arg1);
9015 lo2 = TREE_INT_CST_LOW (arg1);
9017 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9018 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9019 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9021 if (width > HOST_BITS_PER_WIDE_INT)
9023 mhi = (unsigned HOST_WIDE_INT) -1
9024 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9030 mlo = (unsigned HOST_WIDE_INT) -1
9031 >> (HOST_BITS_PER_WIDE_INT - width);
9034 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9035 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9036 return fold_build2 (BIT_IOR_EXPR, type,
9037 TREE_OPERAND (arg0, 0), arg1);
9039 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9042 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9043 return fold_build2 (BIT_IOR_EXPR, type,
9044 fold_build2 (BIT_AND_EXPR, type,
9045 TREE_OPERAND (arg0, 0),
9046 build_int_cst_wide (type,
9052 /* (X & Y) | Y is (X, Y). */
9053 if (TREE_CODE (arg0) == BIT_AND_EXPR
9054 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9055 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9056 /* (X & Y) | X is (Y, X). */
9057 if (TREE_CODE (arg0) == BIT_AND_EXPR
9058 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9059 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9060 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9061 /* X | (X & Y) is (Y, X). */
9062 if (TREE_CODE (arg1) == BIT_AND_EXPR
9063 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9064 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9065 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9066 /* X | (Y & X) is (Y, X). */
9067 if (TREE_CODE (arg1) == BIT_AND_EXPR
9068 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9069 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9070 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9072 t1 = distribute_bit_expr (code, type, arg0, arg1);
9073 if (t1 != NULL_TREE)
9076 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9078 This results in more efficient code for machines without a NAND
9079 instruction. Combine will canonicalize to the first form
9080 which will allow use of NAND instructions provided by the
9081 backend if they exist. */
9082 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9083 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9085 return fold_build1 (BIT_NOT_EXPR, type,
9086 build2 (BIT_AND_EXPR, type,
9087 TREE_OPERAND (arg0, 0),
9088 TREE_OPERAND (arg1, 0)));
9091 /* See if this can be simplified into a rotate first. If that
9092 is unsuccessful continue in the association code. */
9096 if (integer_zerop (arg1))
9097 return non_lvalue (fold_convert (type, arg0));
9098 if (integer_all_onesp (arg1))
9099 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9100 if (operand_equal_p (arg0, arg1, 0))
9101 return omit_one_operand (type, integer_zero_node, arg0);
9104 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9105 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9107 t1 = build_int_cst (type, -1);
9108 t1 = force_fit_type (t1, 0, false, false);
9109 return omit_one_operand (type, t1, arg1);
9113 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9114 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9116 t1 = build_int_cst (type, -1);
9117 t1 = force_fit_type (t1, 0, false, false);
9118 return omit_one_operand (type, t1, arg0);
9121 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9122 with a constant, and the two constants have no bits in common,
9123 we should treat this as a BIT_IOR_EXPR since this may produce more
9125 if (TREE_CODE (arg0) == BIT_AND_EXPR
9126 && TREE_CODE (arg1) == BIT_AND_EXPR
9127 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9128 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9129 && integer_zerop (const_binop (BIT_AND_EXPR,
9130 TREE_OPERAND (arg0, 1),
9131 TREE_OPERAND (arg1, 1), 0)))
9133 code = BIT_IOR_EXPR;
9137 /* (X | Y) ^ X -> Y & ~ X*/
9138 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9139 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9141 tree t2 = TREE_OPERAND (arg0, 1);
9142 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9144 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9145 fold_convert (type, t1));
9149 /* (Y | X) ^ X -> Y & ~ X*/
9150 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9151 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9153 tree t2 = TREE_OPERAND (arg0, 0);
9154 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9156 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9157 fold_convert (type, t1));
9161 /* X ^ (X | Y) -> Y & ~ X*/
9162 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9163 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9165 tree t2 = TREE_OPERAND (arg1, 1);
9166 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9168 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9169 fold_convert (type, t1));
9173 /* X ^ (Y | X) -> Y & ~ X*/
9174 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9175 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9177 tree t2 = TREE_OPERAND (arg1, 0);
9178 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9180 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9181 fold_convert (type, t1));
9185 /* Convert ~X ^ ~Y to X ^ Y. */
9186 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9187 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9188 return fold_build2 (code, type,
9189 fold_convert (type, TREE_OPERAND (arg0, 0)),
9190 fold_convert (type, TREE_OPERAND (arg1, 0)));
9192 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9193 if (TREE_CODE (arg0) == BIT_AND_EXPR
9194 && integer_onep (TREE_OPERAND (arg0, 1))
9195 && integer_onep (arg1))
9196 return fold_build2 (EQ_EXPR, type, arg0,
9197 build_int_cst (TREE_TYPE (arg0), 0));
9199 /* Fold (X & Y) ^ Y as ~X & Y. */
9200 if (TREE_CODE (arg0) == BIT_AND_EXPR
9201 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9203 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9204 return fold_build2 (BIT_AND_EXPR, type,
9205 fold_build1 (BIT_NOT_EXPR, type, tem),
9206 fold_convert (type, arg1));
9208 /* Fold (X & Y) ^ X as ~Y & X. */
9209 if (TREE_CODE (arg0) == BIT_AND_EXPR
9210 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9211 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9213 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9214 return fold_build2 (BIT_AND_EXPR, type,
9215 fold_build1 (BIT_NOT_EXPR, type, tem),
9216 fold_convert (type, arg1));
9218 /* Fold X ^ (X & Y) as X & ~Y. */
9219 if (TREE_CODE (arg1) == BIT_AND_EXPR
9220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9222 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9223 return fold_build2 (BIT_AND_EXPR, type,
9224 fold_convert (type, arg0),
9225 fold_build1 (BIT_NOT_EXPR, type, tem));
9227 /* Fold X ^ (Y & X) as ~Y & X. */
9228 if (TREE_CODE (arg1) == BIT_AND_EXPR
9229 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9230 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9232 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9233 return fold_build2 (BIT_AND_EXPR, type,
9234 fold_build1 (BIT_NOT_EXPR, type, tem),
9235 fold_convert (type, arg0));
9238 /* See if this can be simplified into a rotate first. If that
9239 is unsuccessful continue in the association code. */
9243 if (integer_all_onesp (arg1))
9244 return non_lvalue (fold_convert (type, arg0));
9245 if (integer_zerop (arg1))
9246 return omit_one_operand (type, arg1, arg0);
9247 if (operand_equal_p (arg0, arg1, 0))
9248 return non_lvalue (fold_convert (type, arg0));
9250 /* ~X & X is always zero. */
9251 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9252 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9253 return omit_one_operand (type, integer_zero_node, arg1);
9255 /* X & ~X is always zero. */
9256 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9257 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9258 return omit_one_operand (type, integer_zero_node, arg0);
9260 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9261 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9262 && TREE_CODE (arg1) == INTEGER_CST
9263 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9264 return fold_build2 (BIT_IOR_EXPR, type,
9265 fold_build2 (BIT_AND_EXPR, type,
9266 TREE_OPERAND (arg0, 0), arg1),
9267 fold_build2 (BIT_AND_EXPR, type,
9268 TREE_OPERAND (arg0, 1), arg1));
9270 /* (X | Y) & Y is (X, Y). */
9271 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9272 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9273 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9274 /* (X | Y) & X is (Y, X). */
9275 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9276 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9277 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9278 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9279 /* X & (X | Y) is (Y, X). */
9280 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9281 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9282 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9283 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9284 /* X & (Y | X) is (Y, X). */
9285 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9286 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9287 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9288 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9290 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9291 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9292 && integer_onep (TREE_OPERAND (arg0, 1))
9293 && integer_onep (arg1))
9295 tem = TREE_OPERAND (arg0, 0);
9296 return fold_build2 (EQ_EXPR, type,
9297 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9298 build_int_cst (TREE_TYPE (tem), 1)),
9299 build_int_cst (TREE_TYPE (tem), 0));
9301 /* Fold ~X & 1 as (X & 1) == 0. */
9302 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9303 && integer_onep (arg1))
9305 tem = TREE_OPERAND (arg0, 0);
9306 return fold_build2 (EQ_EXPR, type,
9307 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9308 build_int_cst (TREE_TYPE (tem), 1)),
9309 build_int_cst (TREE_TYPE (tem), 0));
9312 /* Fold (X ^ Y) & Y as ~X & Y. */
9313 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9314 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9316 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9317 return fold_build2 (BIT_AND_EXPR, type,
9318 fold_build1 (BIT_NOT_EXPR, type, tem),
9319 fold_convert (type, arg1));
9321 /* Fold (X ^ Y) & X as ~Y & X. */
9322 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9323 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9324 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9326 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9327 return fold_build2 (BIT_AND_EXPR, type,
9328 fold_build1 (BIT_NOT_EXPR, type, tem),
9329 fold_convert (type, arg1));
9331 /* Fold X & (X ^ Y) as X & ~Y. */
9332 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9333 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9335 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9336 return fold_build2 (BIT_AND_EXPR, type,
9337 fold_convert (type, arg0),
9338 fold_build1 (BIT_NOT_EXPR, type, tem));
9340 /* Fold X & (Y ^ X) as ~Y & X. */
9341 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9342 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9343 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9345 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9346 return fold_build2 (BIT_AND_EXPR, type,
9347 fold_build1 (BIT_NOT_EXPR, type, tem),
9348 fold_convert (type, arg0));
9351 t1 = distribute_bit_expr (code, type, arg0, arg1);
9352 if (t1 != NULL_TREE)
9354 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9355 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9356 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9359 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9361 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9362 && (~TREE_INT_CST_LOW (arg1)
9363 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9364 return fold_convert (type, TREE_OPERAND (arg0, 0));
9367 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9369 This results in more efficient code for machines without a NOR
9370 instruction. Combine will canonicalize to the first form
9371 which will allow use of NOR instructions provided by the
9372 backend if they exist. */
9373 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9374 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9376 return fold_build1 (BIT_NOT_EXPR, type,
9377 build2 (BIT_IOR_EXPR, type,
9378 TREE_OPERAND (arg0, 0),
9379 TREE_OPERAND (arg1, 0)));
9385 /* Don't touch a floating-point divide by zero unless the mode
9386 of the constant can represent infinity. */
9387 if (TREE_CODE (arg1) == REAL_CST
9388 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
9389 && real_zerop (arg1))
9392 /* Optimize A / A to 1.0 if we don't care about
9393 NaNs or Infinities. Skip the transformation
9394 for non-real operands. */
9395 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
9396 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9397 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
9398 && operand_equal_p (arg0, arg1, 0))
9400 tree r = build_real (TREE_TYPE (arg0), dconst1);
9402 return omit_two_operands (type, r, arg0, arg1);
9405 /* The complex version of the above A / A optimization. */
9406 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9407 && operand_equal_p (arg0, arg1, 0))
9409 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
9410 if (! HONOR_NANS (TYPE_MODE (elem_type))
9411 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
9413 tree r = build_real (elem_type, dconst1);
9414 /* omit_two_operands will call fold_convert for us. */
9415 return omit_two_operands (type, r, arg0, arg1);
9419 /* (-A) / (-B) -> A / B */
9420 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9421 return fold_build2 (RDIV_EXPR, type,
9422 TREE_OPERAND (arg0, 0),
9423 negate_expr (arg1));
9424 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9425 return fold_build2 (RDIV_EXPR, type,
9427 TREE_OPERAND (arg1, 0));
9429 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
9430 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9431 && real_onep (arg1))
9432 return non_lvalue (fold_convert (type, arg0));
9434 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
9435 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9436 && real_minus_onep (arg1))
9437 return non_lvalue (fold_convert (type, negate_expr (arg0)));
9439 /* If ARG1 is a constant, we can convert this to a multiply by the
9440 reciprocal. This does not have the same rounding properties,
9441 so only do this if -funsafe-math-optimizations. We can actually
9442 always safely do it if ARG1 is a power of two, but it's hard to
9443 tell if it is or not in a portable manner. */
9444 if (TREE_CODE (arg1) == REAL_CST)
9446 if (flag_unsafe_math_optimizations
9447 && 0 != (tem = const_binop (code, build_real (type, dconst1),
9449 return fold_build2 (MULT_EXPR, type, arg0, tem);
9450 /* Find the reciprocal if optimizing and the result is exact. */
9454 r = TREE_REAL_CST (arg1);
9455 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
9457 tem = build_real (type, r);
9458 return fold_build2 (MULT_EXPR, type,
9459 fold_convert (type, arg0), tem);
9463 /* Convert A/B/C to A/(B*C). */
9464 if (flag_unsafe_math_optimizations
9465 && TREE_CODE (arg0) == RDIV_EXPR)
9466 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
9467 fold_build2 (MULT_EXPR, type,
9468 TREE_OPERAND (arg0, 1), arg1));
9470 /* Convert A/(B/C) to (A/B)*C. */
9471 if (flag_unsafe_math_optimizations
9472 && TREE_CODE (arg1) == RDIV_EXPR)
9473 return fold_build2 (MULT_EXPR, type,
9474 fold_build2 (RDIV_EXPR, type, arg0,
9475 TREE_OPERAND (arg1, 0)),
9476 TREE_OPERAND (arg1, 1));
9478 /* Convert C1/(X*C2) into (C1/C2)/X. */
9479 if (flag_unsafe_math_optimizations
9480 && TREE_CODE (arg1) == MULT_EXPR
9481 && TREE_CODE (arg0) == REAL_CST
9482 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
9484 tree tem = const_binop (RDIV_EXPR, arg0,
9485 TREE_OPERAND (arg1, 1), 0);
9487 return fold_build2 (RDIV_EXPR, type, tem,
9488 TREE_OPERAND (arg1, 0));
9491 if (flag_unsafe_math_optimizations)
9493 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9494 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9496 /* Optimize sin(x)/cos(x) as tan(x). */
9497 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
9498 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
9499 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
9500 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9501 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9503 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9505 if (tanfn != NULL_TREE)
9506 return build_function_call_expr (tanfn,
9507 TREE_OPERAND (arg0, 1));
9510 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
9511 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
9512 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
9513 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
9514 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9515 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9517 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
9519 if (tanfn != NULL_TREE)
9521 tree tmp = TREE_OPERAND (arg0, 1);
9522 tmp = build_function_call_expr (tanfn, tmp);
9523 return fold_build2 (RDIV_EXPR, type,
9524 build_real (type, dconst1), tmp);
9528 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
9529 NaNs or Infinities. */
9530 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
9531 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
9532 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
9534 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9535 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9537 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9538 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9539 && operand_equal_p (arg00, arg01, 0))
9541 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9543 if (cosfn != NULL_TREE)
9544 return build_function_call_expr (cosfn,
9545 TREE_OPERAND (arg0, 1));
9549 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
9550 NaNs or Infinities. */
9551 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
9552 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
9553 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
9555 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9556 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9558 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
9559 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
9560 && operand_equal_p (arg00, arg01, 0))
9562 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
9564 if (cosfn != NULL_TREE)
9566 tree tmp = TREE_OPERAND (arg0, 1);
9567 tmp = build_function_call_expr (cosfn, tmp);
9568 return fold_build2 (RDIV_EXPR, type,
9569 build_real (type, dconst1),
9575 /* Optimize pow(x,c)/x as pow(x,c-1). */
9576 if (fcode0 == BUILT_IN_POW
9577 || fcode0 == BUILT_IN_POWF
9578 || fcode0 == BUILT_IN_POWL)
9580 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9581 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
9582 if (TREE_CODE (arg01) == REAL_CST
9583 && ! TREE_CONSTANT_OVERFLOW (arg01)
9584 && operand_equal_p (arg1, arg00, 0))
9586 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9590 c = TREE_REAL_CST (arg01);
9591 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
9592 arg = build_real (type, c);
9593 arglist = build_tree_list (NULL_TREE, arg);
9594 arglist = tree_cons (NULL_TREE, arg1, arglist);
9595 return build_function_call_expr (powfn, arglist);
9599 /* Optimize x/expN(y) into x*expN(-y). */
9600 if (BUILTIN_EXPONENT_P (fcode1))
9602 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9603 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
9604 tree arglist = build_tree_list (NULL_TREE,
9605 fold_convert (type, arg));
9606 arg1 = build_function_call_expr (expfn, arglist);
9607 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9610 /* Optimize x/pow(y,z) into x*pow(y,-z). */
9611 if (fcode1 == BUILT_IN_POW
9612 || fcode1 == BUILT_IN_POWF
9613 || fcode1 == BUILT_IN_POWL)
9615 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9616 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9617 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
9618 tree neg11 = fold_convert (type, negate_expr (arg11));
9619 tree arglist = tree_cons(NULL_TREE, arg10,
9620 build_tree_list (NULL_TREE, neg11));
9621 arg1 = build_function_call_expr (powfn, arglist);
9622 return fold_build2 (MULT_EXPR, type, arg0, arg1);
9627 case TRUNC_DIV_EXPR:
9628 case FLOOR_DIV_EXPR:
9629 /* Simplify A / (B << N) where A and B are positive and B is
9630 a power of 2, to A >> (N + log2(B)). */
9631 if (TREE_CODE (arg1) == LSHIFT_EXPR
9632 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9634 tree sval = TREE_OPERAND (arg1, 0);
9635 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
9637 tree sh_cnt = TREE_OPERAND (arg1, 1);
9638 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
9640 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
9641 sh_cnt, build_int_cst (NULL_TREE, pow2));
9642 return fold_build2 (RSHIFT_EXPR, type,
9643 fold_convert (type, arg0), sh_cnt);
9648 case ROUND_DIV_EXPR:
9650 case EXACT_DIV_EXPR:
9651 if (integer_onep (arg1))
9652 return non_lvalue (fold_convert (type, arg0));
9653 if (integer_zerop (arg1))
9656 if (!TYPE_UNSIGNED (type)
9657 && TREE_CODE (arg1) == INTEGER_CST
9658 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9659 && TREE_INT_CST_HIGH (arg1) == -1)
9660 return fold_convert (type, negate_expr (arg0));
9662 /* Convert -A / -B to A / B when the type is signed and overflow is
9664 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9665 && TREE_CODE (arg0) == NEGATE_EXPR
9666 && negate_expr_p (arg1))
9667 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9668 negate_expr (arg1));
9669 if (!TYPE_UNSIGNED (type) && !flag_wrapv
9670 && TREE_CODE (arg1) == NEGATE_EXPR
9671 && negate_expr_p (arg0))
9672 return fold_build2 (code, type, negate_expr (arg0),
9673 TREE_OPERAND (arg1, 0));
9675 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
9676 operation, EXACT_DIV_EXPR.
9678 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
9679 At one time others generated faster code, it's not clear if they do
9680 after the last round to changes to the DIV code in expmed.c. */
9681 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
9682 && multiple_of_p (type, arg0, arg1))
9683 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
9685 if (TREE_CODE (arg1) == INTEGER_CST
9686 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9687 return fold_convert (type, tem);
9692 case FLOOR_MOD_EXPR:
9693 case ROUND_MOD_EXPR:
9694 case TRUNC_MOD_EXPR:
9695 /* X % 1 is always zero, but be sure to preserve any side
9697 if (integer_onep (arg1))
9698 return omit_one_operand (type, integer_zero_node, arg0);
9700 /* X % 0, return X % 0 unchanged so that we can get the
9701 proper warnings and errors. */
9702 if (integer_zerop (arg1))
9705 /* 0 % X is always zero, but be sure to preserve any side
9706 effects in X. Place this after checking for X == 0. */
9707 if (integer_zerop (arg0))
9708 return omit_one_operand (type, integer_zero_node, arg1);
9710 /* X % -1 is zero. */
9711 if (!TYPE_UNSIGNED (type)
9712 && TREE_CODE (arg1) == INTEGER_CST
9713 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
9714 && TREE_INT_CST_HIGH (arg1) == -1)
9715 return omit_one_operand (type, integer_zero_node, arg0);
9717 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
9718 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
9719 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
9720 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
9723 /* Also optimize A % (C << N) where C is a power of 2,
9724 to A & ((C << N) - 1). */
9725 if (TREE_CODE (arg1) == LSHIFT_EXPR)
9726 c = TREE_OPERAND (arg1, 0);
9728 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
9730 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1),
9731 arg1, integer_one_node);
9732 return fold_build2 (BIT_AND_EXPR, type,
9733 fold_convert (type, arg0),
9734 fold_convert (type, mask));
9738 /* X % -C is the same as X % C. */
9739 if (code == TRUNC_MOD_EXPR
9740 && !TYPE_UNSIGNED (type)
9741 && TREE_CODE (arg1) == INTEGER_CST
9742 && !TREE_CONSTANT_OVERFLOW (arg1)
9743 && TREE_INT_CST_HIGH (arg1) < 0
9745 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
9746 && !sign_bit_p (arg1, arg1))
9747 return fold_build2 (code, type, fold_convert (type, arg0),
9748 fold_convert (type, negate_expr (arg1)));
9750 /* X % -Y is the same as X % Y. */
9751 if (code == TRUNC_MOD_EXPR
9752 && !TYPE_UNSIGNED (type)
9753 && TREE_CODE (arg1) == NEGATE_EXPR
9755 return fold_build2 (code, type, fold_convert (type, arg0),
9756 fold_convert (type, TREE_OPERAND (arg1, 0)));
9758 if (TREE_CODE (arg1) == INTEGER_CST
9759 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
9760 return fold_convert (type, tem);
9766 if (integer_all_onesp (arg0))
9767 return omit_one_operand (type, arg0, arg1);
9771 /* Optimize -1 >> x for arithmetic right shifts. */
9772 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
9773 return omit_one_operand (type, arg0, arg1);
9774 /* ... fall through ... */
9778 if (integer_zerop (arg1))
9779 return non_lvalue (fold_convert (type, arg0));
9780 if (integer_zerop (arg0))
9781 return omit_one_operand (type, arg0, arg1);
9783 /* Since negative shift count is not well-defined,
9784 don't try to compute it in the compiler. */
9785 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
9788 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
9789 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
9790 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9791 && host_integerp (TREE_OPERAND (arg0, 1), false)
9792 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9794 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
9795 + TREE_INT_CST_LOW (arg1));
9797 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
9798 being well defined. */
9799 if (low >= TYPE_PRECISION (type))
9801 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
9802 low = low % TYPE_PRECISION (type);
9803 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
9804 return build_int_cst (type, 0);
9806 low = TYPE_PRECISION (type) - 1;
9809 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
9810 build_int_cst (type, low));
9813 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
9814 into x & ((unsigned)-1 >> c) for unsigned types. */
9815 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
9816 || (TYPE_UNSIGNED (type)
9817 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
9818 && host_integerp (arg1, false)
9819 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
9820 && host_integerp (TREE_OPERAND (arg0, 1), false)
9821 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
9823 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9824 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
9830 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9832 lshift = build_int_cst (type, -1);
9833 lshift = int_const_binop (code, lshift, arg1, 0);
9835 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
9839 /* Rewrite an LROTATE_EXPR by a constant into an
9840 RROTATE_EXPR by a new constant. */
9841 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
9843 tree tem = build_int_cst (NULL_TREE,
9844 GET_MODE_BITSIZE (TYPE_MODE (type)));
9845 tem = fold_convert (TREE_TYPE (arg1), tem);
9846 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
9847 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
9850 /* If we have a rotate of a bit operation with the rotate count and
9851 the second operand of the bit operation both constant,
9852 permute the two operations. */
9853 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9854 && (TREE_CODE (arg0) == BIT_AND_EXPR
9855 || TREE_CODE (arg0) == BIT_IOR_EXPR
9856 || TREE_CODE (arg0) == BIT_XOR_EXPR)
9857 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9858 return fold_build2 (TREE_CODE (arg0), type,
9859 fold_build2 (code, type,
9860 TREE_OPERAND (arg0, 0), arg1),
9861 fold_build2 (code, type,
9862 TREE_OPERAND (arg0, 1), arg1));
9864 /* Two consecutive rotates adding up to the width of the mode can
9866 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9867 && TREE_CODE (arg0) == RROTATE_EXPR
9868 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9869 && TREE_INT_CST_HIGH (arg1) == 0
9870 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
9871 && ((TREE_INT_CST_LOW (arg1)
9872 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
9873 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
9874 return TREE_OPERAND (arg0, 0);
9879 if (operand_equal_p (arg0, arg1, 0))
9880 return omit_one_operand (type, arg0, arg1);
9881 if (INTEGRAL_TYPE_P (type)
9882 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
9883 return omit_one_operand (type, arg1, arg0);
9884 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
9890 if (operand_equal_p (arg0, arg1, 0))
9891 return omit_one_operand (type, arg0, arg1);
9892 if (INTEGRAL_TYPE_P (type)
9893 && TYPE_MAX_VALUE (type)
9894 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
9895 return omit_one_operand (type, arg1, arg0);
9896 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
9901 case TRUTH_ANDIF_EXPR:
9902 /* Note that the operands of this must be ints
9903 and their values must be 0 or 1.
9904 ("true" is a fixed value perhaps depending on the language.) */
9905 /* If first arg is constant zero, return it. */
9906 if (integer_zerop (arg0))
9907 return fold_convert (type, arg0);
9908 case TRUTH_AND_EXPR:
9909 /* If either arg is constant true, drop it. */
9910 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
9911 return non_lvalue (fold_convert (type, arg1));
9912 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
9913 /* Preserve sequence points. */
9914 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
9915 return non_lvalue (fold_convert (type, arg0));
9916 /* If second arg is constant zero, result is zero, but first arg
9917 must be evaluated. */
9918 if (integer_zerop (arg1))
9919 return omit_one_operand (type, arg1, arg0);
9920 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
9921 case will be handled here. */
9922 if (integer_zerop (arg0))
9923 return omit_one_operand (type, arg0, arg1);
9925 /* !X && X is always false. */
9926 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
9927 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9928 return omit_one_operand (type, integer_zero_node, arg1);
9929 /* X && !X is always false. */
9930 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
9931 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9932 return omit_one_operand (type, integer_zero_node, arg0);
9934 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
9935 means A >= Y && A != MAX, but in this case we know that
9938 if (!TREE_SIDE_EFFECTS (arg0)
9939 && !TREE_SIDE_EFFECTS (arg1))
9941 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
9942 if (tem && !operand_equal_p (tem, arg0, 0))
9943 return fold_build2 (code, type, tem, arg1);
9945 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
9946 if (tem && !operand_equal_p (tem, arg1, 0))
9947 return fold_build2 (code, type, arg0, tem);
9951 /* We only do these simplifications if we are optimizing. */
9955 /* Check for things like (A || B) && (A || C). We can convert this
9956 to A || (B && C). Note that either operator can be any of the four
9957 truth and/or operations and the transformation will still be
9958 valid. Also note that we only care about order for the
9959 ANDIF and ORIF operators. If B contains side effects, this
9960 might change the truth-value of A. */
9961 if (TREE_CODE (arg0) == TREE_CODE (arg1)
9962 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
9963 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
9964 || TREE_CODE (arg0) == TRUTH_AND_EXPR
9965 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
9966 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
9968 tree a00 = TREE_OPERAND (arg0, 0);
9969 tree a01 = TREE_OPERAND (arg0, 1);
9970 tree a10 = TREE_OPERAND (arg1, 0);
9971 tree a11 = TREE_OPERAND (arg1, 1);
9972 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
9973 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
9974 && (code == TRUTH_AND_EXPR
9975 || code == TRUTH_OR_EXPR));
9977 if (operand_equal_p (a00, a10, 0))
9978 return fold_build2 (TREE_CODE (arg0), type, a00,
9979 fold_build2 (code, type, a01, a11));
9980 else if (commutative && operand_equal_p (a00, a11, 0))
9981 return fold_build2 (TREE_CODE (arg0), type, a00,
9982 fold_build2 (code, type, a01, a10));
9983 else if (commutative && operand_equal_p (a01, a10, 0))
9984 return fold_build2 (TREE_CODE (arg0), type, a01,
9985 fold_build2 (code, type, a00, a11));
9987 /* This case if tricky because we must either have commutative
9988 operators or else A10 must not have side-effects. */
9990 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
9991 && operand_equal_p (a01, a11, 0))
9992 return fold_build2 (TREE_CODE (arg0), type,
9993 fold_build2 (code, type, a00, a10),
9997 /* See if we can build a range comparison. */
9998 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10001 /* Check for the possibility of merging component references. If our
10002 lhs is another similar operation, try to merge its rhs with our
10003 rhs. Then try to merge our lhs and rhs. */
10004 if (TREE_CODE (arg0) == code
10005 && 0 != (tem = fold_truthop (code, type,
10006 TREE_OPERAND (arg0, 1), arg1)))
10007 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10009 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10014 case TRUTH_ORIF_EXPR:
10015 /* Note that the operands of this must be ints
10016 and their values must be 0 or true.
10017 ("true" is a fixed value perhaps depending on the language.) */
10018 /* If first arg is constant true, return it. */
10019 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10020 return fold_convert (type, arg0);
10021 case TRUTH_OR_EXPR:
10022 /* If either arg is constant zero, drop it. */
10023 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10024 return non_lvalue (fold_convert (type, arg1));
10025 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10026 /* Preserve sequence points. */
10027 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10028 return non_lvalue (fold_convert (type, arg0));
10029 /* If second arg is constant true, result is true, but we must
10030 evaluate first arg. */
10031 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10032 return omit_one_operand (type, arg1, arg0);
10033 /* Likewise for first arg, but note this only occurs here for
10035 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10036 return omit_one_operand (type, arg0, arg1);
10038 /* !X || X is always true. */
10039 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10040 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10041 return omit_one_operand (type, integer_one_node, arg1);
10042 /* X || !X is always true. */
10043 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10044 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10045 return omit_one_operand (type, integer_one_node, arg0);
10049 case TRUTH_XOR_EXPR:
10050 /* If the second arg is constant zero, drop it. */
10051 if (integer_zerop (arg1))
10052 return non_lvalue (fold_convert (type, arg0));
10053 /* If the second arg is constant true, this is a logical inversion. */
10054 if (integer_onep (arg1))
10056 /* Only call invert_truthvalue if operand is a truth value. */
10057 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10058 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10060 tem = invert_truthvalue (arg0);
10061 return non_lvalue (fold_convert (type, tem));
10063 /* Identical arguments cancel to zero. */
10064 if (operand_equal_p (arg0, arg1, 0))
10065 return omit_one_operand (type, integer_zero_node, arg0);
10067 /* !X ^ X is always true. */
10068 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10069 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10070 return omit_one_operand (type, integer_one_node, arg1);
10072 /* X ^ !X is always true. */
10073 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10074 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10075 return omit_one_operand (type, integer_one_node, arg0);
10081 tem = fold_comparison (code, type, op0, op1);
10082 if (tem != NULL_TREE)
10085 /* bool_var != 0 becomes bool_var. */
10086 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10087 && code == NE_EXPR)
10088 return non_lvalue (fold_convert (type, arg0));
10090 /* bool_var == 1 becomes bool_var. */
10091 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10092 && code == EQ_EXPR)
10093 return non_lvalue (fold_convert (type, arg0));
10095 /* bool_var != 1 becomes !bool_var. */
10096 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10097 && code == NE_EXPR)
10098 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10100 /* bool_var == 0 becomes !bool_var. */
10101 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10102 && code == EQ_EXPR)
10103 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10105 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
10106 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10107 && TREE_CODE (arg1) == INTEGER_CST)
10108 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10109 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10112 /* If this is an equality comparison of the address of a non-weak
10113 object against zero, then we know the result. */
10114 if (TREE_CODE (arg0) == ADDR_EXPR
10115 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10116 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10117 && integer_zerop (arg1))
10118 return constant_boolean_node (code != EQ_EXPR, type);
10120 /* If this is an equality comparison of the address of two non-weak,
10121 unaliased symbols neither of which are extern (since we do not
10122 have access to attributes for externs), then we know the result. */
10123 if (TREE_CODE (arg0) == ADDR_EXPR
10124 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10125 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10126 && ! lookup_attribute ("alias",
10127 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10128 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10129 && TREE_CODE (arg1) == ADDR_EXPR
10130 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10131 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10132 && ! lookup_attribute ("alias",
10133 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10134 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10136 /* We know that we're looking at the address of two
10137 non-weak, unaliased, static _DECL nodes.
10139 It is both wasteful and incorrect to call operand_equal_p
10140 to compare the two ADDR_EXPR nodes. It is wasteful in that
10141 all we need to do is test pointer equality for the arguments
10142 to the two ADDR_EXPR nodes. It is incorrect to use
10143 operand_equal_p as that function is NOT equivalent to a
10144 C equality test. It can in fact return false for two
10145 objects which would test as equal using the C equality
10147 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10148 return constant_boolean_node (equal
10149 ? code == EQ_EXPR : code != EQ_EXPR,
10153 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10154 a MINUS_EXPR of a constant, we can convert it into a comparison with
10155 a revised constant as long as no overflow occurs. */
10156 if (TREE_CODE (arg1) == INTEGER_CST
10157 && (TREE_CODE (arg0) == PLUS_EXPR
10158 || TREE_CODE (arg0) == MINUS_EXPR)
10159 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10160 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10161 ? MINUS_EXPR : PLUS_EXPR,
10162 arg1, TREE_OPERAND (arg0, 1), 0))
10163 && ! TREE_CONSTANT_OVERFLOW (tem))
10164 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10166 /* Similarly for a NEGATE_EXPR. */
10167 if (TREE_CODE (arg0) == NEGATE_EXPR
10168 && TREE_CODE (arg1) == INTEGER_CST
10169 && 0 != (tem = negate_expr (arg1))
10170 && TREE_CODE (tem) == INTEGER_CST
10171 && ! TREE_CONSTANT_OVERFLOW (tem))
10172 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10174 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10175 for !=. Don't do this for ordered comparisons due to overflow. */
10176 if (TREE_CODE (arg0) == MINUS_EXPR
10177 && integer_zerop (arg1))
10178 return fold_build2 (code, type,
10179 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10181 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10182 if (TREE_CODE (arg0) == ABS_EXPR
10183 && (integer_zerop (arg1) || real_zerop (arg1)))
10184 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10186 /* If this is an EQ or NE comparison with zero and ARG0 is
10187 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10188 two operations, but the latter can be done in one less insn
10189 on machines that have only two-operand insns or on which a
10190 constant cannot be the first operand. */
10191 if (TREE_CODE (arg0) == BIT_AND_EXPR
10192 && integer_zerop (arg1))
10194 tree arg00 = TREE_OPERAND (arg0, 0);
10195 tree arg01 = TREE_OPERAND (arg0, 1);
10196 if (TREE_CODE (arg00) == LSHIFT_EXPR
10197 && integer_onep (TREE_OPERAND (arg00, 0)))
10199 fold_build2 (code, type,
10200 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10201 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10202 arg01, TREE_OPERAND (arg00, 1)),
10203 fold_convert (TREE_TYPE (arg0),
10204 integer_one_node)),
10206 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10207 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10209 fold_build2 (code, type,
10210 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10211 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10212 arg00, TREE_OPERAND (arg01, 1)),
10213 fold_convert (TREE_TYPE (arg0),
10214 integer_one_node)),
10218 /* If this is an NE or EQ comparison of zero against the result of a
10219 signed MOD operation whose second operand is a power of 2, make
10220 the MOD operation unsigned since it is simpler and equivalent. */
10221 if (integer_zerop (arg1)
10222 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10223 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10224 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10225 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10226 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10227 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10229 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10230 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10231 fold_convert (newtype,
10232 TREE_OPERAND (arg0, 0)),
10233 fold_convert (newtype,
10234 TREE_OPERAND (arg0, 1)));
10236 return fold_build2 (code, type, newmod,
10237 fold_convert (newtype, arg1));
10240 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10241 C1 is a valid shift constant, and C2 is a power of two, i.e.
10243 if (TREE_CODE (arg0) == BIT_AND_EXPR
10244 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10245 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10247 && integer_pow2p (TREE_OPERAND (arg0, 1))
10248 && integer_zerop (arg1))
10250 tree itype = TREE_TYPE (arg0);
10251 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10252 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10254 /* Check for a valid shift count. */
10255 if (TREE_INT_CST_HIGH (arg001) == 0
10256 && TREE_INT_CST_LOW (arg001) < prec)
10258 tree arg01 = TREE_OPERAND (arg0, 1);
10259 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10260 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10261 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10262 can be rewritten as (X & (C2 << C1)) != 0. */
10263 if ((log2 + TREE_INT_CST_LOW (arg01)) < prec)
10265 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10266 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10267 return fold_build2 (code, type, tem, arg1);
10269 /* Otherwise, for signed (arithmetic) shifts,
10270 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10271 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10272 else if (!TYPE_UNSIGNED (itype))
10273 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10274 arg000, build_int_cst (itype, 0));
10275 /* Otherwise, of unsigned (logical) shifts,
10276 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10277 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10279 return omit_one_operand (type,
10280 code == EQ_EXPR ? integer_one_node
10281 : integer_zero_node,
10286 /* If this is an NE comparison of zero with an AND of one, remove the
10287 comparison since the AND will give the correct value. */
10288 if (code == NE_EXPR
10289 && integer_zerop (arg1)
10290 && TREE_CODE (arg0) == BIT_AND_EXPR
10291 && integer_onep (TREE_OPERAND (arg0, 1)))
10292 return fold_convert (type, arg0);
10294 /* If we have (A & C) == C where C is a power of 2, convert this into
10295 (A & C) != 0. Similarly for NE_EXPR. */
10296 if (TREE_CODE (arg0) == BIT_AND_EXPR
10297 && integer_pow2p (TREE_OPERAND (arg0, 1))
10298 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10299 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10300 arg0, fold_convert (TREE_TYPE (arg0),
10301 integer_zero_node));
10303 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10304 bit, then fold the expression into A < 0 or A >= 0. */
10305 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10309 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10310 Similarly for NE_EXPR. */
10311 if (TREE_CODE (arg0) == BIT_AND_EXPR
10312 && TREE_CODE (arg1) == INTEGER_CST
10313 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10315 tree notc = fold_build1 (BIT_NOT_EXPR,
10316 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10317 TREE_OPERAND (arg0, 1));
10318 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10320 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10321 if (integer_nonzerop (dandnotc))
10322 return omit_one_operand (type, rslt, arg0);
10325 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10326 Similarly for NE_EXPR. */
10327 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10328 && TREE_CODE (arg1) == INTEGER_CST
10329 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10331 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10332 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10333 TREE_OPERAND (arg0, 1), notd);
10334 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10335 if (integer_nonzerop (candnotd))
10336 return omit_one_operand (type, rslt, arg0);
10339 /* If this is a comparison of a field, we may be able to simplify it. */
10340 if (((TREE_CODE (arg0) == COMPONENT_REF
10341 && lang_hooks.can_use_bit_fields_p ())
10342 || TREE_CODE (arg0) == BIT_FIELD_REF)
10343 /* Handle the constant case even without -O
10344 to make sure the warnings are given. */
10345 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10347 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10352 /* Optimize comparisons of strlen vs zero to a compare of the
10353 first character of the string vs zero. To wit,
10354 strlen(ptr) == 0 => *ptr == 0
10355 strlen(ptr) != 0 => *ptr != 0
10356 Other cases should reduce to one of these two (or a constant)
10357 due to the return value of strlen being unsigned. */
10358 if (TREE_CODE (arg0) == CALL_EXPR
10359 && integer_zerop (arg1))
10361 tree fndecl = get_callee_fndecl (arg0);
10365 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10366 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10367 && (arglist = TREE_OPERAND (arg0, 1))
10368 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10369 && ! TREE_CHAIN (arglist))
10371 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
10372 return fold_build2 (code, type, iref,
10373 build_int_cst (TREE_TYPE (iref), 0));
10377 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10378 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10379 if (TREE_CODE (arg0) == RSHIFT_EXPR
10380 && integer_zerop (arg1)
10381 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10383 tree arg00 = TREE_OPERAND (arg0, 0);
10384 tree arg01 = TREE_OPERAND (arg0, 1);
10385 tree itype = TREE_TYPE (arg00);
10386 if (TREE_INT_CST_HIGH (arg01) == 0
10387 && TREE_INT_CST_LOW (arg01)
10388 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
10390 if (TYPE_UNSIGNED (itype))
10392 itype = lang_hooks.types.signed_type (itype);
10393 arg00 = fold_convert (itype, arg00);
10395 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10396 type, arg00, build_int_cst (itype, 0));
10400 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
10401 if (integer_zerop (arg1)
10402 && TREE_CODE (arg0) == BIT_XOR_EXPR)
10403 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10404 TREE_OPERAND (arg0, 1));
10406 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
10407 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10408 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10409 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10410 build_int_cst (TREE_TYPE (arg1), 0));
10411 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
10412 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10413 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10414 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10415 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
10416 build_int_cst (TREE_TYPE (arg1), 0));
10418 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
10419 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10420 && TREE_CODE (arg1) == INTEGER_CST
10421 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10422 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10423 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
10424 TREE_OPERAND (arg0, 1), arg1));
10426 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10427 (X & C) == 0 when C is a single bit. */
10428 if (TREE_CODE (arg0) == BIT_AND_EXPR
10429 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10430 && integer_zerop (arg1)
10431 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10433 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10434 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10435 TREE_OPERAND (arg0, 1));
10436 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10440 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10441 constant C is a power of two, i.e. a single bit. */
10442 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10443 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10444 && integer_zerop (arg1)
10445 && integer_pow2p (TREE_OPERAND (arg0, 1))
10446 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10447 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10449 tree arg00 = TREE_OPERAND (arg0, 0);
10450 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10451 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10454 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10455 when is C is a power of two, i.e. a single bit. */
10456 if (TREE_CODE (arg0) == BIT_AND_EXPR
10457 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10458 && integer_zerop (arg1)
10459 && integer_pow2p (TREE_OPERAND (arg0, 1))
10460 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10461 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10463 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10464 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
10465 arg000, TREE_OPERAND (arg0, 1));
10466 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10467 tem, build_int_cst (TREE_TYPE (tem), 0));
10470 /* If this is a comparison of two exprs that look like an
10471 ARRAY_REF of the same object, then we can fold this to a
10472 comparison of the two offsets. This is only safe for
10473 EQ_EXPR and NE_EXPR because of overflow issues. */
10475 tree base0, offset0, base1, offset1;
10477 if (extract_array_ref (arg0, &base0, &offset0)
10478 && extract_array_ref (arg1, &base1, &offset1)
10479 && operand_equal_p (base0, base1, 0))
10481 /* Handle no offsets on both sides specially. */
10482 if (offset0 == NULL_TREE && offset1 == NULL_TREE)
10483 return fold_build2 (code, type, integer_zero_node,
10484 integer_zero_node);
10486 if (!offset0 || !offset1
10487 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
10489 if (offset0 == NULL_TREE)
10490 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
10491 if (offset1 == NULL_TREE)
10492 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
10493 return fold_build2 (code, type, offset0, offset1);
10498 if (integer_zerop (arg1)
10499 && tree_expr_nonzero_p (arg0))
10501 tree res = constant_boolean_node (code==NE_EXPR, type);
10502 return omit_one_operand (type, res, arg0);
10510 tem = fold_comparison (code, type, op0, op1);
10511 if (tem != NULL_TREE)
10514 /* Transform comparisons of the form X +- C CMP X. */
10515 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
10516 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10517 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
10518 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
10519 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10520 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
10521 && !(flag_wrapv || flag_trapv))))
10523 tree arg01 = TREE_OPERAND (arg0, 1);
10524 enum tree_code code0 = TREE_CODE (arg0);
10527 if (TREE_CODE (arg01) == REAL_CST)
10528 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
10530 is_positive = tree_int_cst_sgn (arg01);
10532 /* (X - c) > X becomes false. */
10533 if (code == GT_EXPR
10534 && ((code0 == MINUS_EXPR && is_positive >= 0)
10535 || (code0 == PLUS_EXPR && is_positive <= 0)))
10536 return constant_boolean_node (0, type);
10538 /* Likewise (X + c) < X becomes false. */
10539 if (code == LT_EXPR
10540 && ((code0 == PLUS_EXPR && is_positive >= 0)
10541 || (code0 == MINUS_EXPR && is_positive <= 0)))
10542 return constant_boolean_node (0, type);
10544 /* Convert (X - c) <= X to true. */
10545 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10547 && ((code0 == MINUS_EXPR && is_positive >= 0)
10548 || (code0 == PLUS_EXPR && is_positive <= 0)))
10549 return constant_boolean_node (1, type);
10551 /* Convert (X + c) >= X to true. */
10552 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
10554 && ((code0 == PLUS_EXPR && is_positive >= 0)
10555 || (code0 == MINUS_EXPR && is_positive <= 0)))
10556 return constant_boolean_node (1, type);
10558 if (TREE_CODE (arg01) == INTEGER_CST)
10560 /* Convert X + c > X and X - c < X to true for integers. */
10561 if (code == GT_EXPR
10562 && ((code0 == PLUS_EXPR && is_positive > 0)
10563 || (code0 == MINUS_EXPR && is_positive < 0)))
10564 return constant_boolean_node (1, type);
10566 if (code == LT_EXPR
10567 && ((code0 == MINUS_EXPR && is_positive > 0)
10568 || (code0 == PLUS_EXPR && is_positive < 0)))
10569 return constant_boolean_node (1, type);
10571 /* Convert X + c <= X and X - c >= X to false for integers. */
10572 if (code == LE_EXPR
10573 && ((code0 == PLUS_EXPR && is_positive > 0)
10574 || (code0 == MINUS_EXPR && is_positive < 0)))
10575 return constant_boolean_node (0, type);
10577 if (code == GE_EXPR
10578 && ((code0 == MINUS_EXPR && is_positive > 0)
10579 || (code0 == PLUS_EXPR && is_positive < 0)))
10580 return constant_boolean_node (0, type);
10584 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10585 This transformation affects the cases which are handled in later
10586 optimizations involving comparisons with non-negative constants. */
10587 if (TREE_CODE (arg1) == INTEGER_CST
10588 && TREE_CODE (arg0) != INTEGER_CST
10589 && tree_int_cst_sgn (arg1) > 0)
10591 if (code == GE_EXPR)
10593 arg1 = const_binop (MINUS_EXPR, arg1,
10594 build_int_cst (TREE_TYPE (arg1), 1), 0);
10595 return fold_build2 (GT_EXPR, type, arg0,
10596 fold_convert (TREE_TYPE (arg0), arg1));
10598 if (code == LT_EXPR)
10600 arg1 = const_binop (MINUS_EXPR, arg1,
10601 build_int_cst (TREE_TYPE (arg1), 1), 0);
10602 return fold_build2 (LE_EXPR, type, arg0,
10603 fold_convert (TREE_TYPE (arg0), arg1));
10607 /* Comparisons with the highest or lowest possible integer of
10608 the specified size will have known values. */
10610 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
10612 if (TREE_CODE (arg1) == INTEGER_CST
10613 && ! TREE_CONSTANT_OVERFLOW (arg1)
10614 && width <= 2 * HOST_BITS_PER_WIDE_INT
10615 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10616 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10618 HOST_WIDE_INT signed_max_hi;
10619 unsigned HOST_WIDE_INT signed_max_lo;
10620 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
10622 if (width <= HOST_BITS_PER_WIDE_INT)
10624 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10629 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10631 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10637 max_lo = signed_max_lo;
10638 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10644 width -= HOST_BITS_PER_WIDE_INT;
10645 signed_max_lo = -1;
10646 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
10651 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
10653 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
10658 max_hi = signed_max_hi;
10659 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
10663 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
10664 && TREE_INT_CST_LOW (arg1) == max_lo)
10668 return omit_one_operand (type, integer_zero_node, arg0);
10671 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10674 return omit_one_operand (type, integer_one_node, arg0);
10677 return fold_build2 (NE_EXPR, type, arg0, arg1);
10679 /* The GE_EXPR and LT_EXPR cases above are not normally
10680 reached because of previous transformations. */
10685 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10687 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
10691 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10692 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10694 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
10695 return fold_build2 (NE_EXPR, type, arg0, arg1);
10699 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10701 && TREE_INT_CST_LOW (arg1) == min_lo)
10705 return omit_one_operand (type, integer_zero_node, arg0);
10708 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10711 return omit_one_operand (type, integer_one_node, arg0);
10714 return fold_build2 (NE_EXPR, type, op0, op1);
10719 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
10721 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
10725 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10726 return fold_build2 (NE_EXPR, type, arg0, arg1);
10728 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
10729 return fold_build2 (EQ_EXPR, type, arg0, arg1);
10734 else if (!in_gimple_form
10735 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
10736 && TREE_INT_CST_LOW (arg1) == signed_max_lo
10737 && TYPE_UNSIGNED (TREE_TYPE (arg1))
10738 /* signed_type does not work on pointer types. */
10739 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
10741 /* The following case also applies to X < signed_max+1
10742 and X >= signed_max+1 because previous transformations. */
10743 if (code == LE_EXPR || code == GT_EXPR)
10746 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
10747 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
10748 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
10749 type, fold_convert (st0, arg0),
10750 build_int_cst (st1, 0));
10756 /* If we are comparing an ABS_EXPR with a constant, we can
10757 convert all the cases into explicit comparisons, but they may
10758 well not be faster than doing the ABS and one comparison.
10759 But ABS (X) <= C is a range comparison, which becomes a subtraction
10760 and a comparison, and is probably faster. */
10761 if (code == LE_EXPR
10762 && TREE_CODE (arg1) == INTEGER_CST
10763 && TREE_CODE (arg0) == ABS_EXPR
10764 && ! TREE_SIDE_EFFECTS (arg0)
10765 && (0 != (tem = negate_expr (arg1)))
10766 && TREE_CODE (tem) == INTEGER_CST
10767 && ! TREE_CONSTANT_OVERFLOW (tem))
10768 return fold_build2 (TRUTH_ANDIF_EXPR, type,
10769 build2 (GE_EXPR, type,
10770 TREE_OPERAND (arg0, 0), tem),
10771 build2 (LE_EXPR, type,
10772 TREE_OPERAND (arg0, 0), arg1));
10774 /* Convert ABS_EXPR<x> >= 0 to true. */
10775 if (code == GE_EXPR
10776 && tree_expr_nonnegative_p (arg0)
10777 && (integer_zerop (arg1)
10778 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10779 && real_zerop (arg1))))
10780 return omit_one_operand (type, integer_one_node, arg0);
10782 /* Convert ABS_EXPR<x> < 0 to false. */
10783 if (code == LT_EXPR
10784 && tree_expr_nonnegative_p (arg0)
10785 && (integer_zerop (arg1) || real_zerop (arg1)))
10786 return omit_one_operand (type, integer_zero_node, arg0);
10788 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
10789 and similarly for >= into !=. */
10790 if ((code == LT_EXPR || code == GE_EXPR)
10791 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10792 && TREE_CODE (arg1) == LSHIFT_EXPR
10793 && integer_onep (TREE_OPERAND (arg1, 0)))
10794 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10795 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10796 TREE_OPERAND (arg1, 1)),
10797 build_int_cst (TREE_TYPE (arg0), 0));
10799 if ((code == LT_EXPR || code == GE_EXPR)
10800 && TYPE_UNSIGNED (TREE_TYPE (arg0))
10801 && (TREE_CODE (arg1) == NOP_EXPR
10802 || TREE_CODE (arg1) == CONVERT_EXPR)
10803 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
10804 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
10806 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
10807 fold_convert (TREE_TYPE (arg0),
10808 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
10809 TREE_OPERAND (TREE_OPERAND (arg1, 0),
10811 build_int_cst (TREE_TYPE (arg0), 0));
10815 case UNORDERED_EXPR:
10823 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10825 t1 = fold_relational_const (code, type, arg0, arg1);
10826 if (t1 != NULL_TREE)
10830 /* If the first operand is NaN, the result is constant. */
10831 if (TREE_CODE (arg0) == REAL_CST
10832 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
10833 && (code != LTGT_EXPR || ! flag_trapping_math))
10835 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10836 ? integer_zero_node
10837 : integer_one_node;
10838 return omit_one_operand (type, t1, arg1);
10841 /* If the second operand is NaN, the result is constant. */
10842 if (TREE_CODE (arg1) == REAL_CST
10843 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
10844 && (code != LTGT_EXPR || ! flag_trapping_math))
10846 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
10847 ? integer_zero_node
10848 : integer_one_node;
10849 return omit_one_operand (type, t1, arg0);
10852 /* Simplify unordered comparison of something with itself. */
10853 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
10854 && operand_equal_p (arg0, arg1, 0))
10855 return constant_boolean_node (1, type);
10857 if (code == LTGT_EXPR
10858 && !flag_trapping_math
10859 && operand_equal_p (arg0, arg1, 0))
10860 return constant_boolean_node (0, type);
10862 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
10864 tree targ0 = strip_float_extensions (arg0);
10865 tree targ1 = strip_float_extensions (arg1);
10866 tree newtype = TREE_TYPE (targ0);
10868 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
10869 newtype = TREE_TYPE (targ1);
10871 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
10872 return fold_build2 (code, type, fold_convert (newtype, targ0),
10873 fold_convert (newtype, targ1));
10878 case COMPOUND_EXPR:
10879 /* When pedantic, a compound expression can be neither an lvalue
10880 nor an integer constant expression. */
10881 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
10883 /* Don't let (0, 0) be null pointer constant. */
10884 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
10885 : fold_convert (type, arg1);
10886 return pedantic_non_lvalue (tem);
10889 if ((TREE_CODE (arg0) == REAL_CST
10890 && TREE_CODE (arg1) == REAL_CST)
10891 || (TREE_CODE (arg0) == INTEGER_CST
10892 && TREE_CODE (arg1) == INTEGER_CST))
10893 return build_complex (type, arg0, arg1);
10897 /* An ASSERT_EXPR should never be passed to fold_binary. */
10898 gcc_unreachable ();
10902 } /* switch (code) */
10905 /* Callback for walk_tree, looking for LABEL_EXPR.
10906 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10907 Do not check the sub-tree of GOTO_EXPR. */
10910 contains_label_1 (tree *tp,
10911 int *walk_subtrees,
10912 void *data ATTRIBUTE_UNUSED)
10914 switch (TREE_CODE (*tp))
10919 *walk_subtrees = 0;
10926 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10927 accessible from outside the sub-tree. Returns NULL_TREE if no
10928 addressable label is found. */
10931 contains_label_p (tree st)
10933 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10936 /* Fold a ternary expression of code CODE and type TYPE with operands
10937 OP0, OP1, and OP2. Return the folded expression if folding is
10938 successful. Otherwise, return NULL_TREE. */
10941 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10944 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10945 enum tree_code_class kind = TREE_CODE_CLASS (code);
10947 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10948 && TREE_CODE_LENGTH (code) == 3);
10950 /* Strip any conversions that don't change the mode. This is safe
10951 for every expression, except for a comparison expression because
10952 its signedness is derived from its operands. So, in the latter
10953 case, only strip conversions that don't change the signedness.
10955 Note that this is done as an internal manipulation within the
10956 constant folder, in order to find the simplest representation of
10957 the arguments so that their form can be studied. In any cases,
10958 the appropriate type conversions should be put back in the tree
10959 that will get out of the constant folder. */
10974 case COMPONENT_REF:
10975 if (TREE_CODE (arg0) == CONSTRUCTOR
10976 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10978 unsigned HOST_WIDE_INT idx;
10980 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10987 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10988 so all simple results must be passed through pedantic_non_lvalue. */
10989 if (TREE_CODE (arg0) == INTEGER_CST)
10991 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10992 tem = integer_zerop (arg0) ? op2 : op1;
10993 /* Only optimize constant conditions when the selected branch
10994 has the same type as the COND_EXPR. This avoids optimizing
10995 away "c ? x : throw", where the throw has a void type.
10996 Avoid throwing away that operand which contains label. */
10997 if ((!TREE_SIDE_EFFECTS (unused_op)
10998 || !contains_label_p (unused_op))
10999 && (! VOID_TYPE_P (TREE_TYPE (tem))
11000 || VOID_TYPE_P (type)))
11001 return pedantic_non_lvalue (tem);
11004 if (operand_equal_p (arg1, op2, 0))
11005 return pedantic_omit_one_operand (type, arg1, arg0);
11007 /* If we have A op B ? A : C, we may be able to convert this to a
11008 simpler expression, depending on the operation and the values
11009 of B and C. Signed zeros prevent all of these transformations,
11010 for reasons given above each one.
11012 Also try swapping the arguments and inverting the conditional. */
11013 if (COMPARISON_CLASS_P (arg0)
11014 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11015 arg1, TREE_OPERAND (arg0, 1))
11016 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11018 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11023 if (COMPARISON_CLASS_P (arg0)
11024 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11026 TREE_OPERAND (arg0, 1))
11027 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11029 tem = invert_truthvalue (arg0);
11030 if (COMPARISON_CLASS_P (tem))
11032 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11038 /* If the second operand is simpler than the third, swap them
11039 since that produces better jump optimization results. */
11040 if (truth_value_p (TREE_CODE (arg0))
11041 && tree_swap_operands_p (op1, op2, false))
11043 /* See if this can be inverted. If it can't, possibly because
11044 it was a floating-point inequality comparison, don't do
11046 tem = invert_truthvalue (arg0);
11048 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11049 return fold_build3 (code, type, tem, op2, op1);
11052 /* Convert A ? 1 : 0 to simply A. */
11053 if (integer_onep (op1)
11054 && integer_zerop (op2)
11055 /* If we try to convert OP0 to our type, the
11056 call to fold will try to move the conversion inside
11057 a COND, which will recurse. In that case, the COND_EXPR
11058 is probably the best choice, so leave it alone. */
11059 && type == TREE_TYPE (arg0))
11060 return pedantic_non_lvalue (arg0);
11062 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11063 over COND_EXPR in cases such as floating point comparisons. */
11064 if (integer_zerop (op1)
11065 && integer_onep (op2)
11066 && truth_value_p (TREE_CODE (arg0)))
11067 return pedantic_non_lvalue (fold_convert (type,
11068 invert_truthvalue (arg0)));
11070 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11071 if (TREE_CODE (arg0) == LT_EXPR
11072 && integer_zerop (TREE_OPERAND (arg0, 1))
11073 && integer_zerop (op2)
11074 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11075 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
11076 TREE_TYPE (tem), tem, arg1));
11078 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11079 already handled above. */
11080 if (TREE_CODE (arg0) == BIT_AND_EXPR
11081 && integer_onep (TREE_OPERAND (arg0, 1))
11082 && integer_zerop (op2)
11083 && integer_pow2p (arg1))
11085 tree tem = TREE_OPERAND (arg0, 0);
11087 if (TREE_CODE (tem) == RSHIFT_EXPR
11088 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11089 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11090 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11091 return fold_build2 (BIT_AND_EXPR, type,
11092 TREE_OPERAND (tem, 0), arg1);
11095 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11096 is probably obsolete because the first operand should be a
11097 truth value (that's why we have the two cases above), but let's
11098 leave it in until we can confirm this for all front-ends. */
11099 if (integer_zerop (op2)
11100 && TREE_CODE (arg0) == NE_EXPR
11101 && integer_zerop (TREE_OPERAND (arg0, 1))
11102 && integer_pow2p (arg1)
11103 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11104 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11105 arg1, OEP_ONLY_CONST))
11106 return pedantic_non_lvalue (fold_convert (type,
11107 TREE_OPERAND (arg0, 0)));
11109 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11110 if (integer_zerop (op2)
11111 && truth_value_p (TREE_CODE (arg0))
11112 && truth_value_p (TREE_CODE (arg1)))
11113 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11114 fold_convert (type, arg0),
11117 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11118 if (integer_onep (op2)
11119 && truth_value_p (TREE_CODE (arg0))
11120 && truth_value_p (TREE_CODE (arg1)))
11122 /* Only perform transformation if ARG0 is easily inverted. */
11123 tem = invert_truthvalue (arg0);
11124 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11125 return fold_build2 (TRUTH_ORIF_EXPR, type,
11126 fold_convert (type, tem),
11130 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11131 if (integer_zerop (arg1)
11132 && truth_value_p (TREE_CODE (arg0))
11133 && truth_value_p (TREE_CODE (op2)))
11135 /* Only perform transformation if ARG0 is easily inverted. */
11136 tem = invert_truthvalue (arg0);
11137 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
11138 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11139 fold_convert (type, tem),
11143 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11144 if (integer_onep (arg1)
11145 && truth_value_p (TREE_CODE (arg0))
11146 && truth_value_p (TREE_CODE (op2)))
11147 return fold_build2 (TRUTH_ORIF_EXPR, type,
11148 fold_convert (type, arg0),
11154 /* Check for a built-in function. */
11155 if (TREE_CODE (op0) == ADDR_EXPR
11156 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11157 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11158 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11161 case BIT_FIELD_REF:
11162 if (TREE_CODE (arg0) == VECTOR_CST
11163 && type == TREE_TYPE (TREE_TYPE (arg0))
11164 && host_integerp (arg1, 1)
11165 && host_integerp (op2, 1))
11167 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11168 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11171 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11172 && (idx % width) == 0
11173 && (idx = idx / width)
11174 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11176 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11177 while (idx-- > 0 && elements)
11178 elements = TREE_CHAIN (elements);
11180 return TREE_VALUE (elements);
11182 return fold_convert (type, integer_zero_node);
11189 } /* switch (code) */
11192 /* Perform constant folding and related simplification of EXPR.
11193 The related simplifications include x*1 => x, x*0 => 0, etc.,
11194 and application of the associative law.
11195 NOP_EXPR conversions may be removed freely (as long as we
11196 are careful not to change the type of the overall expression).
11197 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11198 but we can constant-fold them if they have constant operands. */
11200 #ifdef ENABLE_FOLD_CHECKING
11201 # define fold(x) fold_1 (x)
11202 static tree fold_1 (tree);
11208 const tree t = expr;
11209 enum tree_code code = TREE_CODE (t);
11210 enum tree_code_class kind = TREE_CODE_CLASS (code);
11213 /* Return right away if a constant. */
11214 if (kind == tcc_constant)
11217 if (IS_EXPR_CODE_CLASS (kind))
11219 tree type = TREE_TYPE (t);
11220 tree op0, op1, op2;
11222 switch (TREE_CODE_LENGTH (code))
11225 op0 = TREE_OPERAND (t, 0);
11226 tem = fold_unary (code, type, op0);
11227 return tem ? tem : expr;
11229 op0 = TREE_OPERAND (t, 0);
11230 op1 = TREE_OPERAND (t, 1);
11231 tem = fold_binary (code, type, op0, op1);
11232 return tem ? tem : expr;
11234 op0 = TREE_OPERAND (t, 0);
11235 op1 = TREE_OPERAND (t, 1);
11236 op2 = TREE_OPERAND (t, 2);
11237 tem = fold_ternary (code, type, op0, op1, op2);
11238 return tem ? tem : expr;
11247 return fold (DECL_INITIAL (t));
11251 } /* switch (code) */
11254 #ifdef ENABLE_FOLD_CHECKING
11257 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
11258 static void fold_check_failed (tree, tree);
11259 void print_fold_checksum (tree);
11261 /* When --enable-checking=fold, compute a digest of expr before
11262 and after actual fold call to see if fold did not accidentally
11263 change original expr. */
11269 struct md5_ctx ctx;
11270 unsigned char checksum_before[16], checksum_after[16];
11273 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11274 md5_init_ctx (&ctx);
11275 fold_checksum_tree (expr, &ctx, ht);
11276 md5_finish_ctx (&ctx, checksum_before);
11279 ret = fold_1 (expr);
11281 md5_init_ctx (&ctx);
11282 fold_checksum_tree (expr, &ctx, ht);
11283 md5_finish_ctx (&ctx, checksum_after);
11286 if (memcmp (checksum_before, checksum_after, 16))
11287 fold_check_failed (expr, ret);
11293 print_fold_checksum (tree expr)
11295 struct md5_ctx ctx;
11296 unsigned char checksum[16], cnt;
11299 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11300 md5_init_ctx (&ctx);
11301 fold_checksum_tree (expr, &ctx, ht);
11302 md5_finish_ctx (&ctx, checksum);
11304 for (cnt = 0; cnt < 16; ++cnt)
11305 fprintf (stderr, "%02x", checksum[cnt]);
11306 putc ('\n', stderr);
11310 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
11312 internal_error ("fold check: original tree changed by fold");
11316 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
11319 enum tree_code code;
11320 struct tree_function_decl buf;
11325 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
11326 <= sizeof (struct tree_function_decl))
11327 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
11330 slot = htab_find_slot (ht, expr, INSERT);
11334 code = TREE_CODE (expr);
11335 if (TREE_CODE_CLASS (code) == tcc_declaration
11336 && DECL_ASSEMBLER_NAME_SET_P (expr))
11338 /* Allow DECL_ASSEMBLER_NAME to be modified. */
11339 memcpy ((char *) &buf, expr, tree_size (expr));
11340 expr = (tree) &buf;
11341 SET_DECL_ASSEMBLER_NAME (expr, NULL);
11343 else if (TREE_CODE_CLASS (code) == tcc_type
11344 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
11345 || TYPE_CACHED_VALUES_P (expr)
11346 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
11348 /* Allow these fields to be modified. */
11349 memcpy ((char *) &buf, expr, tree_size (expr));
11350 expr = (tree) &buf;
11351 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
11352 TYPE_POINTER_TO (expr) = NULL;
11353 TYPE_REFERENCE_TO (expr) = NULL;
11354 if (TYPE_CACHED_VALUES_P (expr))
11356 TYPE_CACHED_VALUES_P (expr) = 0;
11357 TYPE_CACHED_VALUES (expr) = NULL;
11360 md5_process_bytes (expr, tree_size (expr), ctx);
11361 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
11362 if (TREE_CODE_CLASS (code) != tcc_type
11363 && TREE_CODE_CLASS (code) != tcc_declaration
11364 && code != TREE_LIST)
11365 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
11366 switch (TREE_CODE_CLASS (code))
11372 md5_process_bytes (TREE_STRING_POINTER (expr),
11373 TREE_STRING_LENGTH (expr), ctx);
11376 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
11377 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
11380 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
11386 case tcc_exceptional:
11390 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
11391 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
11392 expr = TREE_CHAIN (expr);
11393 goto recursive_label;
11396 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
11397 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
11403 case tcc_expression:
11404 case tcc_reference:
11405 case tcc_comparison:
11408 case tcc_statement:
11409 len = TREE_CODE_LENGTH (code);
11410 for (i = 0; i < len; ++i)
11411 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
11413 case tcc_declaration:
11414 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
11415 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
11416 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
11418 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
11419 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
11420 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
11421 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
11422 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
11424 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
11425 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
11427 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
11429 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
11430 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
11431 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
11435 if (TREE_CODE (expr) == ENUMERAL_TYPE)
11436 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
11437 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
11438 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
11439 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
11440 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
11441 if (INTEGRAL_TYPE_P (expr)
11442 || SCALAR_FLOAT_TYPE_P (expr))
11444 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
11445 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
11447 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
11448 if (TREE_CODE (expr) == RECORD_TYPE
11449 || TREE_CODE (expr) == UNION_TYPE
11450 || TREE_CODE (expr) == QUAL_UNION_TYPE)
11451 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
11452 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
11461 /* Fold a unary tree expression with code CODE of type TYPE with an
11462 operand OP0. Return a folded expression if successful. Otherwise,
11463 return a tree expression with code CODE of type TYPE with an
11467 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
11470 #ifdef ENABLE_FOLD_CHECKING
11471 unsigned char checksum_before[16], checksum_after[16];
11472 struct md5_ctx ctx;
11475 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11476 md5_init_ctx (&ctx);
11477 fold_checksum_tree (op0, &ctx, ht);
11478 md5_finish_ctx (&ctx, checksum_before);
11482 tem = fold_unary (code, type, op0);
11484 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
11486 #ifdef ENABLE_FOLD_CHECKING
11487 md5_init_ctx (&ctx);
11488 fold_checksum_tree (op0, &ctx, ht);
11489 md5_finish_ctx (&ctx, checksum_after);
11492 if (memcmp (checksum_before, checksum_after, 16))
11493 fold_check_failed (op0, tem);
11498 /* Fold a binary tree expression with code CODE of type TYPE with
11499 operands OP0 and OP1. Return a folded expression if successful.
11500 Otherwise, return a tree expression with code CODE of type TYPE
11501 with operands OP0 and OP1. */
11504 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
11508 #ifdef ENABLE_FOLD_CHECKING
11509 unsigned char checksum_before_op0[16],
11510 checksum_before_op1[16],
11511 checksum_after_op0[16],
11512 checksum_after_op1[16];
11513 struct md5_ctx ctx;
11516 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11517 md5_init_ctx (&ctx);
11518 fold_checksum_tree (op0, &ctx, ht);
11519 md5_finish_ctx (&ctx, checksum_before_op0);
11522 md5_init_ctx (&ctx);
11523 fold_checksum_tree (op1, &ctx, ht);
11524 md5_finish_ctx (&ctx, checksum_before_op1);
11528 tem = fold_binary (code, type, op0, op1);
11530 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
11532 #ifdef ENABLE_FOLD_CHECKING
11533 md5_init_ctx (&ctx);
11534 fold_checksum_tree (op0, &ctx, ht);
11535 md5_finish_ctx (&ctx, checksum_after_op0);
11538 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11539 fold_check_failed (op0, tem);
11541 md5_init_ctx (&ctx);
11542 fold_checksum_tree (op1, &ctx, ht);
11543 md5_finish_ctx (&ctx, checksum_after_op1);
11546 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11547 fold_check_failed (op1, tem);
11552 /* Fold a ternary tree expression with code CODE of type TYPE with
11553 operands OP0, OP1, and OP2. Return a folded expression if
11554 successful. Otherwise, return a tree expression with code CODE of
11555 type TYPE with operands OP0, OP1, and OP2. */
11558 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
11562 #ifdef ENABLE_FOLD_CHECKING
11563 unsigned char checksum_before_op0[16],
11564 checksum_before_op1[16],
11565 checksum_before_op2[16],
11566 checksum_after_op0[16],
11567 checksum_after_op1[16],
11568 checksum_after_op2[16];
11569 struct md5_ctx ctx;
11572 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
11573 md5_init_ctx (&ctx);
11574 fold_checksum_tree (op0, &ctx, ht);
11575 md5_finish_ctx (&ctx, checksum_before_op0);
11578 md5_init_ctx (&ctx);
11579 fold_checksum_tree (op1, &ctx, ht);
11580 md5_finish_ctx (&ctx, checksum_before_op1);
11583 md5_init_ctx (&ctx);
11584 fold_checksum_tree (op2, &ctx, ht);
11585 md5_finish_ctx (&ctx, checksum_before_op2);
11589 tem = fold_ternary (code, type, op0, op1, op2);
11591 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
11593 #ifdef ENABLE_FOLD_CHECKING
11594 md5_init_ctx (&ctx);
11595 fold_checksum_tree (op0, &ctx, ht);
11596 md5_finish_ctx (&ctx, checksum_after_op0);
11599 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
11600 fold_check_failed (op0, tem);
11602 md5_init_ctx (&ctx);
11603 fold_checksum_tree (op1, &ctx, ht);
11604 md5_finish_ctx (&ctx, checksum_after_op1);
11607 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
11608 fold_check_failed (op1, tem);
11610 md5_init_ctx (&ctx);
11611 fold_checksum_tree (op2, &ctx, ht);
11612 md5_finish_ctx (&ctx, checksum_after_op2);
11615 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
11616 fold_check_failed (op2, tem);
11621 /* Perform constant folding and related simplification of initializer
11622 expression EXPR. These behave identically to "fold_buildN" but ignore
11623 potential run-time traps and exceptions that fold must preserve. */
11625 #define START_FOLD_INIT \
11626 int saved_signaling_nans = flag_signaling_nans;\
11627 int saved_trapping_math = flag_trapping_math;\
11628 int saved_rounding_math = flag_rounding_math;\
11629 int saved_trapv = flag_trapv;\
11630 flag_signaling_nans = 0;\
11631 flag_trapping_math = 0;\
11632 flag_rounding_math = 0;\
11635 #define END_FOLD_INIT \
11636 flag_signaling_nans = saved_signaling_nans;\
11637 flag_trapping_math = saved_trapping_math;\
11638 flag_rounding_math = saved_rounding_math;\
11639 flag_trapv = saved_trapv
11642 fold_build1_initializer (enum tree_code code, tree type, tree op)
11647 result = fold_build1 (code, type, op);
11654 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
11659 result = fold_build2 (code, type, op0, op1);
11666 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
11672 result = fold_build3 (code, type, op0, op1, op2);
11678 #undef START_FOLD_INIT
11679 #undef END_FOLD_INIT
11681 /* Determine if first argument is a multiple of second argument. Return 0 if
11682 it is not, or we cannot easily determined it to be.
11684 An example of the sort of thing we care about (at this point; this routine
11685 could surely be made more general, and expanded to do what the *_DIV_EXPR's
11686 fold cases do now) is discovering that
11688 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11694 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
11696 This code also handles discovering that
11698 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
11700 is a multiple of 8 so we don't have to worry about dealing with a
11701 possible remainder.
11703 Note that we *look* inside a SAVE_EXPR only to determine how it was
11704 calculated; it is not safe for fold to do much of anything else with the
11705 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
11706 at run time. For example, the latter example above *cannot* be implemented
11707 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
11708 evaluation time of the original SAVE_EXPR is not necessarily the same at
11709 the time the new expression is evaluated. The only optimization of this
11710 sort that would be valid is changing
11712 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
11716 SAVE_EXPR (I) * SAVE_EXPR (J)
11718 (where the same SAVE_EXPR (J) is used in the original and the
11719 transformed version). */
11722 multiple_of_p (tree type, tree top, tree bottom)
11724 if (operand_equal_p (top, bottom, 0))
11727 if (TREE_CODE (type) != INTEGER_TYPE)
11730 switch (TREE_CODE (top))
11733 /* Bitwise and provides a power of two multiple. If the mask is
11734 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
11735 if (!integer_pow2p (bottom))
11740 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11741 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11745 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
11746 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
11749 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
11753 op1 = TREE_OPERAND (top, 1);
11754 /* const_binop may not detect overflow correctly,
11755 so check for it explicitly here. */
11756 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
11757 > TREE_INT_CST_LOW (op1)
11758 && TREE_INT_CST_HIGH (op1) == 0
11759 && 0 != (t1 = fold_convert (type,
11760 const_binop (LSHIFT_EXPR,
11763 && ! TREE_OVERFLOW (t1))
11764 return multiple_of_p (type, t1, bottom);
11769 /* Can't handle conversions from non-integral or wider integral type. */
11770 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
11771 || (TYPE_PRECISION (type)
11772 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
11775 /* .. fall through ... */
11778 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
11781 if (TREE_CODE (bottom) != INTEGER_CST
11782 || (TYPE_UNSIGNED (type)
11783 && (tree_int_cst_sgn (top) < 0
11784 || tree_int_cst_sgn (bottom) < 0)))
11786 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
11794 /* Return true if `t' is known to be non-negative. */
11797 tree_expr_nonnegative_p (tree t)
11799 if (t == error_mark_node)
11802 if (TYPE_UNSIGNED (TREE_TYPE (t)))
11805 switch (TREE_CODE (t))
11808 /* Query VRP to see if it has recorded any information about
11809 the range of this object. */
11810 return ssa_name_nonnegative_p (t);
11813 /* We can't return 1 if flag_wrapv is set because
11814 ABS_EXPR<INT_MIN> = INT_MIN. */
11815 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
11820 return tree_int_cst_sgn (t) >= 0;
11823 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
11826 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11827 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11828 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11830 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
11831 both unsigned and at least 2 bits shorter than the result. */
11832 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11833 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11834 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11836 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11837 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11838 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11839 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11841 unsigned int prec = MAX (TYPE_PRECISION (inner1),
11842 TYPE_PRECISION (inner2)) + 1;
11843 return prec < TYPE_PRECISION (TREE_TYPE (t));
11849 if (FLOAT_TYPE_P (TREE_TYPE (t)))
11851 /* x * x for floating point x is always non-negative. */
11852 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11854 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11855 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11858 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11859 both unsigned and their total bits is shorter than the result. */
11860 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11861 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11862 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11864 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11865 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11866 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11867 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11868 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11869 < TYPE_PRECISION (TREE_TYPE (t));
11875 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11876 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11882 case TRUNC_DIV_EXPR:
11883 case CEIL_DIV_EXPR:
11884 case FLOOR_DIV_EXPR:
11885 case ROUND_DIV_EXPR:
11886 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11887 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11889 case TRUNC_MOD_EXPR:
11890 case CEIL_MOD_EXPR:
11891 case FLOOR_MOD_EXPR:
11892 case ROUND_MOD_EXPR:
11894 case NON_LVALUE_EXPR:
11896 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11898 case COMPOUND_EXPR:
11900 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11903 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11906 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11907 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11911 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11912 tree outer_type = TREE_TYPE (t);
11914 if (TREE_CODE (outer_type) == REAL_TYPE)
11916 if (TREE_CODE (inner_type) == REAL_TYPE)
11917 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11918 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11920 if (TYPE_UNSIGNED (inner_type))
11922 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11925 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11927 if (TREE_CODE (inner_type) == REAL_TYPE)
11928 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11929 if (TREE_CODE (inner_type) == INTEGER_TYPE)
11930 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11931 && TYPE_UNSIGNED (inner_type);
11938 tree temp = TARGET_EXPR_SLOT (t);
11939 t = TARGET_EXPR_INITIAL (t);
11941 /* If the initializer is non-void, then it's a normal expression
11942 that will be assigned to the slot. */
11943 if (!VOID_TYPE_P (t))
11944 return tree_expr_nonnegative_p (t);
11946 /* Otherwise, the initializer sets the slot in some way. One common
11947 way is an assignment statement at the end of the initializer. */
11950 if (TREE_CODE (t) == BIND_EXPR)
11951 t = expr_last (BIND_EXPR_BODY (t));
11952 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11953 || TREE_CODE (t) == TRY_CATCH_EXPR)
11954 t = expr_last (TREE_OPERAND (t, 0));
11955 else if (TREE_CODE (t) == STATEMENT_LIST)
11960 if (TREE_CODE (t) == MODIFY_EXPR
11961 && TREE_OPERAND (t, 0) == temp)
11962 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11969 tree fndecl = get_callee_fndecl (t);
11970 tree arglist = TREE_OPERAND (t, 1);
11971 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11972 switch (DECL_FUNCTION_CODE (fndecl))
11974 CASE_FLT_FN (BUILT_IN_ACOS):
11975 CASE_FLT_FN (BUILT_IN_ACOSH):
11976 CASE_FLT_FN (BUILT_IN_CABS):
11977 CASE_FLT_FN (BUILT_IN_COSH):
11978 CASE_FLT_FN (BUILT_IN_ERFC):
11979 CASE_FLT_FN (BUILT_IN_EXP):
11980 CASE_FLT_FN (BUILT_IN_EXP10):
11981 CASE_FLT_FN (BUILT_IN_EXP2):
11982 CASE_FLT_FN (BUILT_IN_FABS):
11983 CASE_FLT_FN (BUILT_IN_FDIM):
11984 CASE_FLT_FN (BUILT_IN_HYPOT):
11985 CASE_FLT_FN (BUILT_IN_POW10):
11986 CASE_INT_FN (BUILT_IN_FFS):
11987 CASE_INT_FN (BUILT_IN_PARITY):
11988 CASE_INT_FN (BUILT_IN_POPCOUNT):
11992 CASE_FLT_FN (BUILT_IN_SQRT):
11993 /* sqrt(-0.0) is -0.0. */
11994 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11996 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11998 CASE_FLT_FN (BUILT_IN_ASINH):
11999 CASE_FLT_FN (BUILT_IN_ATAN):
12000 CASE_FLT_FN (BUILT_IN_ATANH):
12001 CASE_FLT_FN (BUILT_IN_CBRT):
12002 CASE_FLT_FN (BUILT_IN_CEIL):
12003 CASE_FLT_FN (BUILT_IN_ERF):
12004 CASE_FLT_FN (BUILT_IN_EXPM1):
12005 CASE_FLT_FN (BUILT_IN_FLOOR):
12006 CASE_FLT_FN (BUILT_IN_FMOD):
12007 CASE_FLT_FN (BUILT_IN_FREXP):
12008 CASE_FLT_FN (BUILT_IN_LCEIL):
12009 CASE_FLT_FN (BUILT_IN_LDEXP):
12010 CASE_FLT_FN (BUILT_IN_LFLOOR):
12011 CASE_FLT_FN (BUILT_IN_LLCEIL):
12012 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12013 CASE_FLT_FN (BUILT_IN_LLRINT):
12014 CASE_FLT_FN (BUILT_IN_LLROUND):
12015 CASE_FLT_FN (BUILT_IN_LRINT):
12016 CASE_FLT_FN (BUILT_IN_LROUND):
12017 CASE_FLT_FN (BUILT_IN_MODF):
12018 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12019 CASE_FLT_FN (BUILT_IN_POW):
12020 CASE_FLT_FN (BUILT_IN_RINT):
12021 CASE_FLT_FN (BUILT_IN_ROUND):
12022 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12023 CASE_FLT_FN (BUILT_IN_SINH):
12024 CASE_FLT_FN (BUILT_IN_TANH):
12025 CASE_FLT_FN (BUILT_IN_TRUNC):
12026 /* True if the 1st argument is nonnegative. */
12027 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12029 CASE_FLT_FN (BUILT_IN_FMAX):
12030 /* True if the 1st OR 2nd arguments are nonnegative. */
12031 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12032 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12034 CASE_FLT_FN (BUILT_IN_FMIN):
12035 /* True if the 1st AND 2nd arguments are nonnegative. */
12036 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12037 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12039 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12040 /* True if the 2nd argument is nonnegative. */
12041 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12048 /* ... fall through ... */
12051 if (truth_value_p (TREE_CODE (t)))
12052 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12056 /* We don't know sign of `t', so be conservative and return false. */
12060 /* Return true when T is an address and is known to be nonzero.
12061 For floating point we further ensure that T is not denormal.
12062 Similar logic is present in nonzero_address in rtlanal.h. */
12065 tree_expr_nonzero_p (tree t)
12067 tree type = TREE_TYPE (t);
12069 /* Doing something useful for floating point would need more work. */
12070 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12073 switch (TREE_CODE (t))
12076 /* Query VRP to see if it has recorded any information about
12077 the range of this object. */
12078 return ssa_name_nonzero_p (t);
12081 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12084 /* We used to test for !integer_zerop here. This does not work correctly
12085 if TREE_CONSTANT_OVERFLOW (t). */
12086 return (TREE_INT_CST_LOW (t) != 0
12087 || TREE_INT_CST_HIGH (t) != 0);
12090 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12092 /* With the presence of negative values it is hard
12093 to say something. */
12094 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12095 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12097 /* One of operands must be positive and the other non-negative. */
12098 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12099 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12104 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12106 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12107 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12113 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12114 tree outer_type = TREE_TYPE (t);
12116 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12117 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12123 tree base = get_base_address (TREE_OPERAND (t, 0));
12128 /* Weak declarations may link to NULL. */
12129 if (VAR_OR_FUNCTION_DECL_P (base))
12130 return !DECL_WEAK (base);
12132 /* Constants are never weak. */
12133 if (CONSTANT_CLASS_P (base))
12140 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12141 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12144 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12145 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12148 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12150 /* When both operands are nonzero, then MAX must be too. */
12151 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12154 /* MAX where operand 0 is positive is positive. */
12155 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12157 /* MAX where operand 1 is positive is positive. */
12158 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12159 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12163 case COMPOUND_EXPR:
12166 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
12169 case NON_LVALUE_EXPR:
12170 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12173 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12174 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12177 return alloca_call_p (t);
12185 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12186 attempt to fold the expression to a constant without modifying TYPE,
12189 If the expression could be simplified to a constant, then return
12190 the constant. If the expression would not be simplified to a
12191 constant, then return NULL_TREE. */
12194 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12196 tree tem = fold_binary (code, type, op0, op1);
12197 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12200 /* Given the components of a unary expression CODE, TYPE and OP0,
12201 attempt to fold the expression to a constant without modifying
12204 If the expression could be simplified to a constant, then return
12205 the constant. If the expression would not be simplified to a
12206 constant, then return NULL_TREE. */
12209 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
12211 tree tem = fold_unary (code, type, op0);
12212 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12215 /* If EXP represents referencing an element in a constant string
12216 (either via pointer arithmetic or array indexing), return the
12217 tree representing the value accessed, otherwise return NULL. */
12220 fold_read_from_constant_string (tree exp)
12222 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
12224 tree exp1 = TREE_OPERAND (exp, 0);
12228 if (TREE_CODE (exp) == INDIRECT_REF)
12229 string = string_constant (exp1, &index);
12232 tree low_bound = array_ref_low_bound (exp);
12233 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
12235 /* Optimize the special-case of a zero lower bound.
12237 We convert the low_bound to sizetype to avoid some problems
12238 with constant folding. (E.g. suppose the lower bound is 1,
12239 and its mode is QI. Without the conversion,l (ARRAY
12240 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
12241 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
12242 if (! integer_zerop (low_bound))
12243 index = size_diffop (index, fold_convert (sizetype, low_bound));
12249 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
12250 && TREE_CODE (string) == STRING_CST
12251 && TREE_CODE (index) == INTEGER_CST
12252 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
12253 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
12255 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
12256 return fold_convert (TREE_TYPE (exp),
12257 build_int_cst (NULL_TREE,
12258 (TREE_STRING_POINTER (string)
12259 [TREE_INT_CST_LOW (index)])));
12264 /* Return the tree for neg (ARG0) when ARG0 is known to be either
12265 an integer constant or real constant.
12267 TYPE is the type of the result. */
12270 fold_negate_const (tree arg0, tree type)
12272 tree t = NULL_TREE;
12274 switch (TREE_CODE (arg0))
12278 unsigned HOST_WIDE_INT low;
12279 HOST_WIDE_INT high;
12280 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12281 TREE_INT_CST_HIGH (arg0),
12283 t = build_int_cst_wide (type, low, high);
12284 t = force_fit_type (t, 1,
12285 (overflow | TREE_OVERFLOW (arg0))
12286 && !TYPE_UNSIGNED (type),
12287 TREE_CONSTANT_OVERFLOW (arg0));
12292 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12296 gcc_unreachable ();
12302 /* Return the tree for abs (ARG0) when ARG0 is known to be either
12303 an integer constant or real constant.
12305 TYPE is the type of the result. */
12308 fold_abs_const (tree arg0, tree type)
12310 tree t = NULL_TREE;
12312 switch (TREE_CODE (arg0))
12315 /* If the value is unsigned, then the absolute value is
12316 the same as the ordinary value. */
12317 if (TYPE_UNSIGNED (type))
12319 /* Similarly, if the value is non-negative. */
12320 else if (INT_CST_LT (integer_minus_one_node, arg0))
12322 /* If the value is negative, then the absolute value is
12326 unsigned HOST_WIDE_INT low;
12327 HOST_WIDE_INT high;
12328 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
12329 TREE_INT_CST_HIGH (arg0),
12331 t = build_int_cst_wide (type, low, high);
12332 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
12333 TREE_CONSTANT_OVERFLOW (arg0));
12338 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
12339 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
12345 gcc_unreachable ();
12351 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
12352 constant. TYPE is the type of the result. */
12355 fold_not_const (tree arg0, tree type)
12357 tree t = NULL_TREE;
12359 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
12361 t = build_int_cst_wide (type,
12362 ~ TREE_INT_CST_LOW (arg0),
12363 ~ TREE_INT_CST_HIGH (arg0));
12364 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
12365 TREE_CONSTANT_OVERFLOW (arg0));
12370 /* Given CODE, a relational operator, the target type, TYPE and two
12371 constant operands OP0 and OP1, return the result of the
12372 relational operation. If the result is not a compile time
12373 constant, then return NULL_TREE. */
12376 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
12378 int result, invert;
12380 /* From here on, the only cases we handle are when the result is
12381 known to be a constant. */
12383 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
12385 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
12386 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
12388 /* Handle the cases where either operand is a NaN. */
12389 if (real_isnan (c0) || real_isnan (c1))
12399 case UNORDERED_EXPR:
12413 if (flag_trapping_math)
12419 gcc_unreachable ();
12422 return constant_boolean_node (result, type);
12425 return constant_boolean_node (real_compare (code, c0, c1), type);
12428 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
12430 To compute GT, swap the arguments and do LT.
12431 To compute GE, do LT and invert the result.
12432 To compute LE, swap the arguments, do LT and invert the result.
12433 To compute NE, do EQ and invert the result.
12435 Therefore, the code below must handle only EQ and LT. */
12437 if (code == LE_EXPR || code == GT_EXPR)
12442 code = swap_tree_comparison (code);
12445 /* Note that it is safe to invert for real values here because we
12446 have already handled the one case that it matters. */
12449 if (code == NE_EXPR || code == GE_EXPR)
12452 code = invert_tree_comparison (code, false);
12455 /* Compute a result for LT or EQ if args permit;
12456 Otherwise return T. */
12457 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
12459 if (code == EQ_EXPR)
12460 result = tree_int_cst_equal (op0, op1);
12461 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
12462 result = INT_CST_LT_UNSIGNED (op0, op1);
12464 result = INT_CST_LT (op0, op1);
12471 return constant_boolean_node (result, type);
12474 /* Build an expression for the a clean point containing EXPR with type TYPE.
12475 Don't build a cleanup point expression for EXPR which don't have side
12479 fold_build_cleanup_point_expr (tree type, tree expr)
12481 /* If the expression does not have side effects then we don't have to wrap
12482 it with a cleanup point expression. */
12483 if (!TREE_SIDE_EFFECTS (expr))
12486 /* If the expression is a return, check to see if the expression inside the
12487 return has no side effects or the right hand side of the modify expression
12488 inside the return. If either don't have side effects set we don't need to
12489 wrap the expression in a cleanup point expression. Note we don't check the
12490 left hand side of the modify because it should always be a return decl. */
12491 if (TREE_CODE (expr) == RETURN_EXPR)
12493 tree op = TREE_OPERAND (expr, 0);
12494 if (!op || !TREE_SIDE_EFFECTS (op))
12496 op = TREE_OPERAND (op, 1);
12497 if (!TREE_SIDE_EFFECTS (op))
12501 return build1 (CLEANUP_POINT_EXPR, type, expr);
12504 /* Build an expression for the address of T. Folds away INDIRECT_REF to
12505 avoid confusing the gimplify process. */
12508 build_fold_addr_expr_with_type (tree t, tree ptrtype)
12510 /* The size of the object is not relevant when talking about its address. */
12511 if (TREE_CODE (t) == WITH_SIZE_EXPR)
12512 t = TREE_OPERAND (t, 0);
12514 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
12515 if (TREE_CODE (t) == INDIRECT_REF
12516 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
12518 t = TREE_OPERAND (t, 0);
12519 if (TREE_TYPE (t) != ptrtype)
12520 t = build1 (NOP_EXPR, ptrtype, t);
12526 while (handled_component_p (base))
12527 base = TREE_OPERAND (base, 0);
12529 TREE_ADDRESSABLE (base) = 1;
12531 t = build1 (ADDR_EXPR, ptrtype, t);
12538 build_fold_addr_expr (tree t)
12540 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
12543 /* Given a pointer value OP0 and a type TYPE, return a simplified version
12544 of an indirection through OP0, or NULL_TREE if no simplification is
12548 fold_indirect_ref_1 (tree type, tree op0)
12554 subtype = TREE_TYPE (sub);
12555 if (!POINTER_TYPE_P (subtype))
12558 if (TREE_CODE (sub) == ADDR_EXPR)
12560 tree op = TREE_OPERAND (sub, 0);
12561 tree optype = TREE_TYPE (op);
12562 /* *&p => p; make sure to handle *&"str"[cst] here. */
12563 if (type == optype)
12565 tree fop = fold_read_from_constant_string (op);
12571 /* *(foo *)&fooarray => fooarray[0] */
12572 else if (TREE_CODE (optype) == ARRAY_TYPE
12573 && type == TREE_TYPE (optype))
12575 tree type_domain = TYPE_DOMAIN (optype);
12576 tree min_val = size_zero_node;
12577 if (type_domain && TYPE_MIN_VALUE (type_domain))
12578 min_val = TYPE_MIN_VALUE (type_domain);
12579 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
12581 /* *(foo *)&complexfoo => __real__ complexfoo */
12582 else if (TREE_CODE (optype) == COMPLEX_TYPE
12583 && type == TREE_TYPE (optype))
12584 return fold_build1 (REALPART_EXPR, type, op);
12587 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
12588 if (TREE_CODE (sub) == PLUS_EXPR
12589 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
12591 tree op00 = TREE_OPERAND (sub, 0);
12592 tree op01 = TREE_OPERAND (sub, 1);
12596 op00type = TREE_TYPE (op00);
12597 if (TREE_CODE (op00) == ADDR_EXPR
12598 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
12599 && type == TREE_TYPE (TREE_TYPE (op00type)))
12601 tree size = TYPE_SIZE_UNIT (type);
12602 if (tree_int_cst_equal (size, op01))
12603 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
12607 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
12608 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
12609 && type == TREE_TYPE (TREE_TYPE (subtype)))
12612 tree min_val = size_zero_node;
12613 sub = build_fold_indirect_ref (sub);
12614 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
12615 if (type_domain && TYPE_MIN_VALUE (type_domain))
12616 min_val = TYPE_MIN_VALUE (type_domain);
12617 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
12623 /* Builds an expression for an indirection through T, simplifying some
12627 build_fold_indirect_ref (tree t)
12629 tree type = TREE_TYPE (TREE_TYPE (t));
12630 tree sub = fold_indirect_ref_1 (type, t);
12635 return build1 (INDIRECT_REF, type, t);
12638 /* Given an INDIRECT_REF T, return either T or a simplified version. */
12641 fold_indirect_ref (tree t)
12643 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
12651 /* Strip non-trapping, non-side-effecting tree nodes from an expression
12652 whose result is ignored. The type of the returned tree need not be
12653 the same as the original expression. */
12656 fold_ignored_result (tree t)
12658 if (!TREE_SIDE_EFFECTS (t))
12659 return integer_zero_node;
12662 switch (TREE_CODE_CLASS (TREE_CODE (t)))
12665 t = TREE_OPERAND (t, 0);
12669 case tcc_comparison:
12670 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12671 t = TREE_OPERAND (t, 0);
12672 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
12673 t = TREE_OPERAND (t, 1);
12678 case tcc_expression:
12679 switch (TREE_CODE (t))
12681 case COMPOUND_EXPR:
12682 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
12684 t = TREE_OPERAND (t, 0);
12688 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
12689 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
12691 t = TREE_OPERAND (t, 0);
12704 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
12705 This can only be applied to objects of a sizetype. */
12708 round_up (tree value, int divisor)
12710 tree div = NULL_TREE;
12712 gcc_assert (divisor > 0);
12716 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12717 have to do anything. Only do this when we are not given a const,
12718 because in that case, this check is more expensive than just
12720 if (TREE_CODE (value) != INTEGER_CST)
12722 div = build_int_cst (TREE_TYPE (value), divisor);
12724 if (multiple_of_p (TREE_TYPE (value), value, div))
12728 /* If divisor is a power of two, simplify this to bit manipulation. */
12729 if (divisor == (divisor & -divisor))
12733 t = build_int_cst (TREE_TYPE (value), divisor - 1);
12734 value = size_binop (PLUS_EXPR, value, t);
12735 t = build_int_cst (TREE_TYPE (value), -divisor);
12736 value = size_binop (BIT_AND_EXPR, value, t);
12741 div = build_int_cst (TREE_TYPE (value), divisor);
12742 value = size_binop (CEIL_DIV_EXPR, value, div);
12743 value = size_binop (MULT_EXPR, value, div);
12749 /* Likewise, but round down. */
12752 round_down (tree value, int divisor)
12754 tree div = NULL_TREE;
12756 gcc_assert (divisor > 0);
12760 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
12761 have to do anything. Only do this when we are not given a const,
12762 because in that case, this check is more expensive than just
12764 if (TREE_CODE (value) != INTEGER_CST)
12766 div = build_int_cst (TREE_TYPE (value), divisor);
12768 if (multiple_of_p (TREE_TYPE (value), value, div))
12772 /* If divisor is a power of two, simplify this to bit manipulation. */
12773 if (divisor == (divisor & -divisor))
12777 t = build_int_cst (TREE_TYPE (value), -divisor);
12778 value = size_binop (BIT_AND_EXPR, value, t);
12783 div = build_int_cst (TREE_TYPE (value), divisor);
12784 value = size_binop (FLOOR_DIV_EXPR, value, div);
12785 value = size_binop (MULT_EXPR, value, div);
12791 /* Returns the pointer to the base of the object addressed by EXP and
12792 extracts the information about the offset of the access, storing it
12793 to PBITPOS and POFFSET. */
12796 split_address_to_core_and_offset (tree exp,
12797 HOST_WIDE_INT *pbitpos, tree *poffset)
12800 enum machine_mode mode;
12801 int unsignedp, volatilep;
12802 HOST_WIDE_INT bitsize;
12804 if (TREE_CODE (exp) == ADDR_EXPR)
12806 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
12807 poffset, &mode, &unsignedp, &volatilep,
12809 core = build_fold_addr_expr (core);
12815 *poffset = NULL_TREE;
12821 /* Returns true if addresses of E1 and E2 differ by a constant, false
12822 otherwise. If they do, E1 - E2 is stored in *DIFF. */
12825 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
12828 HOST_WIDE_INT bitpos1, bitpos2;
12829 tree toffset1, toffset2, tdiff, type;
12831 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
12832 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
12834 if (bitpos1 % BITS_PER_UNIT != 0
12835 || bitpos2 % BITS_PER_UNIT != 0
12836 || !operand_equal_p (core1, core2, 0))
12839 if (toffset1 && toffset2)
12841 type = TREE_TYPE (toffset1);
12842 if (type != TREE_TYPE (toffset2))
12843 toffset2 = fold_convert (type, toffset2);
12845 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12846 if (!cst_and_fits_in_hwi (tdiff))
12849 *diff = int_cst_value (tdiff);
12851 else if (toffset1 || toffset2)
12853 /* If only one of the offsets is non-constant, the difference cannot
12860 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12864 /* Simplify the floating point expression EXP when the sign of the
12865 result is not significant. Return NULL_TREE if no simplification
12869 fold_strip_sign_ops (tree exp)
12873 switch (TREE_CODE (exp))
12877 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12878 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12882 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12884 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12885 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12886 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12887 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12888 arg0 ? arg0 : TREE_OPERAND (exp, 0),
12889 arg1 ? arg1 : TREE_OPERAND (exp, 1));