1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
140 static bool tree_expr_nonzero_p (tree);
142 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
143 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
144 and SUM1. Then this yields nonzero if overflow occurred during the
147 Overflow occurs if A and B have the same sign, but A and SUM differ in
148 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
150 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
152 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
153 We do that by representing the two-word integer in 4 words, with only
154 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
155 number. The value of the word is LOWPART + HIGHPART * BASE. */
158 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
159 #define HIGHPART(x) \
160 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
161 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
163 /* Unpack a two-word integer into 4 words.
164 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
165 WORDS points to the array of HOST_WIDE_INTs. */
168 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
170 words[0] = LOWPART (low);
171 words[1] = HIGHPART (low);
172 words[2] = LOWPART (hi);
173 words[3] = HIGHPART (hi);
176 /* Pack an array of 4 words into a two-word integer.
177 WORDS points to the array of words.
178 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
181 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
184 *low = words[0] + words[1] * BASE;
185 *hi = words[2] + words[3] * BASE;
188 /* Make the integer constant T valid for its type by setting to 0 or 1 all
189 the bits in the constant that don't belong in the type.
191 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
192 nonzero, a signed overflow has already occurred in calculating T, so
196 force_fit_type (tree t, int overflow)
198 unsigned HOST_WIDE_INT low;
202 if (TREE_CODE (t) == REAL_CST)
204 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
205 Consider doing it via real_convert now. */
209 else if (TREE_CODE (t) != INTEGER_CST)
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
219 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 TREE_INT_CST_HIGH (t)
227 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 TREE_INT_CST_HIGH (t) = 0;
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
235 /* Unsigned types do not suffer sign extension or overflow unless they
237 if (TYPE_UNSIGNED (TREE_TYPE (t))
238 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
239 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
242 /* If the value's sign bit is set, extend the sign. */
243 if (prec != 2 * HOST_BITS_PER_WIDE_INT
244 && (prec > HOST_BITS_PER_WIDE_INT
245 ? 0 != (TREE_INT_CST_HIGH (t)
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 : 0 != (TREE_INT_CST_LOW (t)
249 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
251 /* Value is negative:
252 set to 1 all the bits that are outside this type's precision. */
253 if (prec > HOST_BITS_PER_WIDE_INT)
254 TREE_INT_CST_HIGH (t)
255 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
258 TREE_INT_CST_HIGH (t) = -1;
259 if (prec < HOST_BITS_PER_WIDE_INT)
260 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
264 /* Return nonzero if signed overflow occurred. */
266 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
270 /* Add two doubleword integers with doubleword result.
271 Each argument is given as two `HOST_WIDE_INT' pieces.
272 One argument is L1 and H1; the other, L2 and H2.
273 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
276 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
277 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
278 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
280 unsigned HOST_WIDE_INT l;
284 h = h1 + h2 + (l < l1);
288 return OVERFLOW_SUM_SIGN (h1, h2, h);
291 /* Negate a doubleword integer with doubleword result.
292 Return nonzero if the operation overflows, assuming it's signed.
293 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
294 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
298 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
304 return (*hv & h1) < 0;
314 /* Multiply two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
325 HOST_WIDE_INT arg1[4];
326 HOST_WIDE_INT arg2[4];
327 HOST_WIDE_INT prod[4 * 2];
328 unsigned HOST_WIDE_INT carry;
330 unsigned HOST_WIDE_INT toplow, neglow;
331 HOST_WIDE_INT tophigh, neghigh;
333 encode (arg1, l1, h1);
334 encode (arg2, l2, h2);
336 memset (prod, 0, sizeof prod);
338 for (i = 0; i < 4; i++)
341 for (j = 0; j < 4; j++)
344 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
345 carry += arg1[i] * arg2[j];
346 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
348 prod[k] = LOWPART (carry);
349 carry = HIGHPART (carry);
354 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
356 /* Check for overflow by calculating the top half of the answer in full;
357 it should agree with the low half's sign bit. */
358 decode (prod + 4, &toplow, &tophigh);
361 neg_double (l2, h2, &neglow, &neghigh);
362 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
366 neg_double (l1, h1, &neglow, &neghigh);
367 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
369 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
372 /* Shift the doubleword integer in L1, H1 left by COUNT places
373 keeping only PREC bits of result.
374 Shift right if COUNT is negative.
375 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
376 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
379 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
380 HOST_WIDE_INT count, unsigned int prec,
381 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
383 unsigned HOST_WIDE_INT signmask;
387 rshift_double (l1, h1, -count, prec, lv, hv, arith);
391 if (SHIFT_COUNT_TRUNCATED)
394 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
396 /* Shifting by the host word size is undefined according to the
397 ANSI standard, so we must handle this as a special case. */
401 else if (count >= HOST_BITS_PER_WIDE_INT)
403 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
408 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
409 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
413 /* Sign extend all bits that are beyond the precision. */
415 signmask = -((prec > HOST_BITS_PER_WIDE_INT
416 ? ((unsigned HOST_WIDE_INT) *hv
417 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
418 : (*lv >> (prec - 1))) & 1);
420 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
422 else if (prec >= HOST_BITS_PER_WIDE_INT)
424 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
425 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
430 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
431 *lv |= signmask << prec;
435 /* Shift the doubleword integer in L1, H1 right by COUNT places
436 keeping only PREC bits of result. COUNT must be positive.
437 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
438 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
441 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
442 HOST_WIDE_INT count, unsigned int prec,
443 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
446 unsigned HOST_WIDE_INT signmask;
449 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
452 if (SHIFT_COUNT_TRUNCATED)
455 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
457 /* Shifting by the host word size is undefined according to the
458 ANSI standard, so we must handle this as a special case. */
462 else if (count >= HOST_BITS_PER_WIDE_INT)
465 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
469 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
471 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
474 /* Zero / sign extend all bits that are beyond the precision. */
476 if (count >= (HOST_WIDE_INT)prec)
481 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
483 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
485 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
486 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
491 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
492 *lv |= signmask << (prec - count);
496 /* Rotate the doubleword integer in L1, H1 left by COUNT places
497 keeping only PREC bits of result.
498 Rotate right if COUNT is negative.
499 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
502 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
503 HOST_WIDE_INT count, unsigned int prec,
504 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
506 unsigned HOST_WIDE_INT s1l, s2l;
507 HOST_WIDE_INT s1h, s2h;
513 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
514 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result. COUNT must be positive.
521 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
524 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
525 HOST_WIDE_INT count, unsigned int prec,
526 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
528 unsigned HOST_WIDE_INT s1l, s2l;
529 HOST_WIDE_INT s1h, s2h;
535 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
536 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
541 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
542 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
543 CODE is a tree code for a kind of division, one of
544 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
546 It controls how the quotient is rounded to an integer.
547 Return nonzero if the operation overflows.
548 UNS nonzero says do unsigned division. */
551 div_and_round_double (enum tree_code code, int uns,
552 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
553 HOST_WIDE_INT hnum_orig,
554 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
555 HOST_WIDE_INT hden_orig,
556 unsigned HOST_WIDE_INT *lquo,
557 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
561 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
562 HOST_WIDE_INT den[4], quo[4];
564 unsigned HOST_WIDE_INT work;
565 unsigned HOST_WIDE_INT carry = 0;
566 unsigned HOST_WIDE_INT lnum = lnum_orig;
567 HOST_WIDE_INT hnum = hnum_orig;
568 unsigned HOST_WIDE_INT lden = lden_orig;
569 HOST_WIDE_INT hden = hden_orig;
572 if (hden == 0 && lden == 0)
573 overflow = 1, lden = 1;
575 /* Calculate quotient sign and convert operands to unsigned. */
581 /* (minimum integer) / (-1) is the only overflow case. */
582 if (neg_double (lnum, hnum, &lnum, &hnum)
583 && ((HOST_WIDE_INT) lden & hden) == -1)
589 neg_double (lden, hden, &lden, &hden);
593 if (hnum == 0 && hden == 0)
594 { /* single precision */
596 /* This unsigned division rounds toward zero. */
602 { /* trivial case: dividend < divisor */
603 /* hden != 0 already checked. */
610 memset (quo, 0, sizeof quo);
612 memset (num, 0, sizeof num); /* to zero 9th element */
613 memset (den, 0, sizeof den);
615 encode (num, lnum, hnum);
616 encode (den, lden, hden);
618 /* Special code for when the divisor < BASE. */
619 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
621 /* hnum != 0 already checked. */
622 for (i = 4 - 1; i >= 0; i--)
624 work = num[i] + carry * BASE;
625 quo[i] = work / lden;
631 /* Full double precision division,
632 with thanks to Don Knuth's "Seminumerical Algorithms". */
633 int num_hi_sig, den_hi_sig;
634 unsigned HOST_WIDE_INT quo_est, scale;
636 /* Find the highest nonzero divisor digit. */
637 for (i = 4 - 1;; i--)
644 /* Insure that the first digit of the divisor is at least BASE/2.
645 This is required by the quotient digit estimation algorithm. */
647 scale = BASE / (den[den_hi_sig] + 1);
649 { /* scale divisor and dividend */
651 for (i = 0; i <= 4 - 1; i++)
653 work = (num[i] * scale) + carry;
654 num[i] = LOWPART (work);
655 carry = HIGHPART (work);
660 for (i = 0; i <= 4 - 1; i++)
662 work = (den[i] * scale) + carry;
663 den[i] = LOWPART (work);
664 carry = HIGHPART (work);
665 if (den[i] != 0) den_hi_sig = i;
672 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
674 /* Guess the next quotient digit, quo_est, by dividing the first
675 two remaining dividend digits by the high order quotient digit.
676 quo_est is never low and is at most 2 high. */
677 unsigned HOST_WIDE_INT tmp;
679 num_hi_sig = i + den_hi_sig + 1;
680 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
681 if (num[num_hi_sig] != den[den_hi_sig])
682 quo_est = work / den[den_hi_sig];
686 /* Refine quo_est so it's usually correct, and at most one high. */
687 tmp = work - quo_est * den[den_hi_sig];
689 && (den[den_hi_sig - 1] * quo_est
690 > (tmp * BASE + num[num_hi_sig - 2])))
693 /* Try QUO_EST as the quotient digit, by multiplying the
694 divisor by QUO_EST and subtracting from the remaining dividend.
695 Keep in mind that QUO_EST is the I - 1st digit. */
698 for (j = 0; j <= den_hi_sig; j++)
700 work = quo_est * den[j] + carry;
701 carry = HIGHPART (work);
702 work = num[i + j] - LOWPART (work);
703 num[i + j] = LOWPART (work);
704 carry += HIGHPART (work) != 0;
707 /* If quo_est was high by one, then num[i] went negative and
708 we need to correct things. */
709 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
712 carry = 0; /* add divisor back in */
713 for (j = 0; j <= den_hi_sig; j++)
715 work = num[i + j] + den[j] + carry;
716 carry = HIGHPART (work);
717 num[i + j] = LOWPART (work);
720 num [num_hi_sig] += carry;
723 /* Store the quotient digit. */
728 decode (quo, lquo, hquo);
731 /* If result is negative, make it so. */
733 neg_double (*lquo, *hquo, lquo, hquo);
735 /* Compute trial remainder: rem = num - (quo * den) */
736 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
737 neg_double (*lrem, *hrem, lrem, hrem);
738 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
743 case TRUNC_MOD_EXPR: /* round toward zero */
744 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
748 case FLOOR_MOD_EXPR: /* round toward negative infinity */
749 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
760 case CEIL_MOD_EXPR: /* round toward positive infinity */
761 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
763 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
771 case ROUND_MOD_EXPR: /* round to closest integer */
773 unsigned HOST_WIDE_INT labs_rem = *lrem;
774 HOST_WIDE_INT habs_rem = *hrem;
775 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
776 HOST_WIDE_INT habs_den = hden, htwice;
778 /* Get absolute values. */
780 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
782 neg_double (lden, hden, &labs_den, &habs_den);
784 /* If (2 * abs (lrem) >= abs (lden)) */
785 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
786 labs_rem, habs_rem, <wice, &htwice);
788 if (((unsigned HOST_WIDE_INT) habs_den
789 < (unsigned HOST_WIDE_INT) htwice)
790 || (((unsigned HOST_WIDE_INT) habs_den
791 == (unsigned HOST_WIDE_INT) htwice)
792 && (labs_den < ltwice)))
796 add_double (*lquo, *hquo,
797 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
800 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
812 /* Compute true remainder: rem = num - (quo * den) */
813 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
814 neg_double (*lrem, *hrem, lrem, hrem);
815 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
819 /* Return true if built-in mathematical function specified by CODE
820 preserves the sign of it argument, i.e. -f(x) == f(-x). */
823 negate_mathfn_p (enum built_in_function code)
847 /* Determine whether an expression T can be cheaply negated using
848 the function negate_expr. */
851 negate_expr_p (tree t)
853 unsigned HOST_WIDE_INT val;
860 type = TREE_TYPE (t);
863 switch (TREE_CODE (t))
866 if (TYPE_UNSIGNED (type) || ! flag_trapv)
869 /* Check that -CST will not overflow type. */
870 prec = TYPE_PRECISION (type);
871 if (prec > HOST_BITS_PER_WIDE_INT)
873 if (TREE_INT_CST_LOW (t) != 0)
875 prec -= HOST_BITS_PER_WIDE_INT;
876 val = TREE_INT_CST_HIGH (t);
879 val = TREE_INT_CST_LOW (t);
880 if (prec < HOST_BITS_PER_WIDE_INT)
881 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
882 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
889 return negate_expr_p (TREE_REALPART (t))
890 && negate_expr_p (TREE_IMAGPART (t));
893 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
895 /* -(A + B) -> (-B) - A. */
896 if (negate_expr_p (TREE_OPERAND (t, 1))
897 && reorder_operands_p (TREE_OPERAND (t, 0),
898 TREE_OPERAND (t, 1)))
900 /* -(A + B) -> (-A) - B. */
901 return negate_expr_p (TREE_OPERAND (t, 0));
904 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
905 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
906 && reorder_operands_p (TREE_OPERAND (t, 0),
907 TREE_OPERAND (t, 1));
910 if (TYPE_UNSIGNED (TREE_TYPE (t)))
916 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
917 return negate_expr_p (TREE_OPERAND (t, 1))
918 || negate_expr_p (TREE_OPERAND (t, 0));
922 /* Negate -((double)float) as (double)(-float). */
923 if (TREE_CODE (type) == REAL_TYPE)
925 tree tem = strip_float_extensions (t);
927 return negate_expr_p (tem);
932 /* Negate -f(x) as f(-x). */
933 if (negate_mathfn_p (builtin_mathfn_code (t)))
934 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
938 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
939 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
941 tree op1 = TREE_OPERAND (t, 1);
942 if (TREE_INT_CST_HIGH (op1) == 0
943 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
944 == TREE_INT_CST_LOW (op1))
955 /* Given T, an expression, return the negation of T. Allow for T to be
956 null, in which case return null. */
967 type = TREE_TYPE (t);
970 switch (TREE_CODE (t))
973 tem = fold_negate_const (t, type);
974 if (! TREE_OVERFLOW (tem)
975 || TYPE_UNSIGNED (type)
981 tem = fold_negate_const (t, type);
982 /* Two's complement FP formats, such as c4x, may overflow. */
983 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
984 return fold_convert (type, tem);
989 tree rpart = negate_expr (TREE_REALPART (t));
990 tree ipart = negate_expr (TREE_IMAGPART (t));
992 if ((TREE_CODE (rpart) == REAL_CST
993 && TREE_CODE (ipart) == REAL_CST)
994 || (TREE_CODE (rpart) == INTEGER_CST
995 && TREE_CODE (ipart) == INTEGER_CST))
996 return build_complex (type, rpart, ipart);
1001 return fold_convert (type, TREE_OPERAND (t, 0));
1004 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1006 /* -(A + B) -> (-B) - A. */
1007 if (negate_expr_p (TREE_OPERAND (t, 1))
1008 && reorder_operands_p (TREE_OPERAND (t, 0),
1009 TREE_OPERAND (t, 1)))
1011 tem = negate_expr (TREE_OPERAND (t, 1));
1012 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1013 tem, TREE_OPERAND (t, 0)));
1014 return fold_convert (type, tem);
1017 /* -(A + B) -> (-A) - B. */
1018 if (negate_expr_p (TREE_OPERAND (t, 0)))
1020 tem = negate_expr (TREE_OPERAND (t, 0));
1021 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1022 tem, TREE_OPERAND (t, 1)));
1023 return fold_convert (type, tem);
1029 /* - (A - B) -> B - A */
1030 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1031 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1032 return fold_convert (type,
1033 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1034 TREE_OPERAND (t, 1),
1035 TREE_OPERAND (t, 0))));
1039 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1045 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1047 tem = TREE_OPERAND (t, 1);
1048 if (negate_expr_p (tem))
1049 return fold_convert (type,
1050 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1051 TREE_OPERAND (t, 0),
1052 negate_expr (tem))));
1053 tem = TREE_OPERAND (t, 0);
1054 if (negate_expr_p (tem))
1055 return fold_convert (type,
1056 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1058 TREE_OPERAND (t, 1))));
1063 /* Convert -((double)float) into (double)(-float). */
1064 if (TREE_CODE (type) == REAL_TYPE)
1066 tem = strip_float_extensions (t);
1067 if (tem != t && negate_expr_p (tem))
1068 return fold_convert (type, negate_expr (tem));
1073 /* Negate -f(x) as f(-x). */
1074 if (negate_mathfn_p (builtin_mathfn_code (t))
1075 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1077 tree fndecl, arg, arglist;
1079 fndecl = get_callee_fndecl (t);
1080 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1081 arglist = build_tree_list (NULL_TREE, arg);
1082 return build_function_call_expr (fndecl, arglist);
1087 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1088 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1090 tree op1 = TREE_OPERAND (t, 1);
1091 if (TREE_INT_CST_HIGH (op1) == 0
1092 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1093 == TREE_INT_CST_LOW (op1))
1095 tree ntype = TYPE_UNSIGNED (type)
1096 ? lang_hooks.types.signed_type (type)
1097 : lang_hooks.types.unsigned_type (type);
1098 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1099 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1100 return fold_convert (type, temp);
1109 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1110 return fold_convert (type, tem);
1113 /* Split a tree IN into a constant, literal and variable parts that could be
1114 combined with CODE to make IN. "constant" means an expression with
1115 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1116 commutative arithmetic operation. Store the constant part into *CONP,
1117 the literal in *LITP and return the variable part. If a part isn't
1118 present, set it to null. If the tree does not decompose in this way,
1119 return the entire tree as the variable part and the other parts as null.
1121 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1122 case, we negate an operand that was subtracted. Except if it is a
1123 literal for which we use *MINUS_LITP instead.
1125 If NEGATE_P is true, we are negating all of IN, again except a literal
1126 for which we use *MINUS_LITP instead.
1128 If IN is itself a literal or constant, return it as appropriate.
1130 Note that we do not guarantee that any of the three values will be the
1131 same type as IN, but they will have the same signedness and mode. */
1134 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1135 tree *minus_litp, int negate_p)
1143 /* Strip any conversions that don't change the machine mode or signedness. */
1144 STRIP_SIGN_NOPS (in);
1146 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1148 else if (TREE_CODE (in) == code
1149 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1150 /* We can associate addition and subtraction together (even
1151 though the C standard doesn't say so) for integers because
1152 the value is not affected. For reals, the value might be
1153 affected, so we can't. */
1154 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1155 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1157 tree op0 = TREE_OPERAND (in, 0);
1158 tree op1 = TREE_OPERAND (in, 1);
1159 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1160 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1162 /* First see if either of the operands is a literal, then a constant. */
1163 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1164 *litp = op0, op0 = 0;
1165 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1166 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1168 if (op0 != 0 && TREE_CONSTANT (op0))
1169 *conp = op0, op0 = 0;
1170 else if (op1 != 0 && TREE_CONSTANT (op1))
1171 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1173 /* If we haven't dealt with either operand, this is not a case we can
1174 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1175 if (op0 != 0 && op1 != 0)
1180 var = op1, neg_var_p = neg1_p;
1182 /* Now do any needed negations. */
1184 *minus_litp = *litp, *litp = 0;
1186 *conp = negate_expr (*conp);
1188 var = negate_expr (var);
1190 else if (TREE_CONSTANT (in))
1198 *minus_litp = *litp, *litp = 0;
1199 else if (*minus_litp)
1200 *litp = *minus_litp, *minus_litp = 0;
1201 *conp = negate_expr (*conp);
1202 var = negate_expr (var);
1208 /* Re-associate trees split by the above function. T1 and T2 are either
1209 expressions to associate or null. Return the new expression, if any. If
1210 we build an operation, do it in TYPE and with CODE. */
1213 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1220 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1221 try to fold this since we will have infinite recursion. But do
1222 deal with any NEGATE_EXPRs. */
1223 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1224 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1226 if (code == PLUS_EXPR)
1228 if (TREE_CODE (t1) == NEGATE_EXPR)
1229 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1230 fold_convert (type, TREE_OPERAND (t1, 0)));
1231 else if (TREE_CODE (t2) == NEGATE_EXPR)
1232 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1233 fold_convert (type, TREE_OPERAND (t2, 0)));
1235 return build2 (code, type, fold_convert (type, t1),
1236 fold_convert (type, t2));
1239 return fold (build2 (code, type, fold_convert (type, t1),
1240 fold_convert (type, t2)));
1243 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1244 to produce a new constant.
1246 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1249 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1251 unsigned HOST_WIDE_INT int1l, int2l;
1252 HOST_WIDE_INT int1h, int2h;
1253 unsigned HOST_WIDE_INT low;
1255 unsigned HOST_WIDE_INT garbagel;
1256 HOST_WIDE_INT garbageh;
1258 tree type = TREE_TYPE (arg1);
1259 int uns = TYPE_UNSIGNED (type);
1261 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1263 int no_overflow = 0;
1265 int1l = TREE_INT_CST_LOW (arg1);
1266 int1h = TREE_INT_CST_HIGH (arg1);
1267 int2l = TREE_INT_CST_LOW (arg2);
1268 int2h = TREE_INT_CST_HIGH (arg2);
1273 low = int1l | int2l, hi = int1h | int2h;
1277 low = int1l ^ int2l, hi = int1h ^ int2h;
1281 low = int1l & int2l, hi = int1h & int2h;
1287 /* It's unclear from the C standard whether shifts can overflow.
1288 The following code ignores overflow; perhaps a C standard
1289 interpretation ruling is needed. */
1290 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1298 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1303 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1307 neg_double (int2l, int2h, &low, &hi);
1308 add_double (int1l, int1h, low, hi, &low, &hi);
1309 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1313 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1316 case TRUNC_DIV_EXPR:
1317 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1318 case EXACT_DIV_EXPR:
1319 /* This is a shortcut for a common special case. */
1320 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1321 && ! TREE_CONSTANT_OVERFLOW (arg1)
1322 && ! TREE_CONSTANT_OVERFLOW (arg2)
1323 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1325 if (code == CEIL_DIV_EXPR)
1328 low = int1l / int2l, hi = 0;
1332 /* ... fall through ... */
1334 case ROUND_DIV_EXPR:
1335 if (int2h == 0 && int2l == 1)
1337 low = int1l, hi = int1h;
1340 if (int1l == int2l && int1h == int2h
1341 && ! (int1l == 0 && int1h == 0))
1346 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1347 &low, &hi, &garbagel, &garbageh);
1350 case TRUNC_MOD_EXPR:
1351 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1352 /* This is a shortcut for a common special case. */
1353 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1354 && ! TREE_CONSTANT_OVERFLOW (arg1)
1355 && ! TREE_CONSTANT_OVERFLOW (arg2)
1356 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1358 if (code == CEIL_MOD_EXPR)
1360 low = int1l % int2l, hi = 0;
1364 /* ... fall through ... */
1366 case ROUND_MOD_EXPR:
1367 overflow = div_and_round_double (code, uns,
1368 int1l, int1h, int2l, int2h,
1369 &garbagel, &garbageh, &low, &hi);
1375 low = (((unsigned HOST_WIDE_INT) int1h
1376 < (unsigned HOST_WIDE_INT) int2h)
1377 || (((unsigned HOST_WIDE_INT) int1h
1378 == (unsigned HOST_WIDE_INT) int2h)
1381 low = (int1h < int2h
1382 || (int1h == int2h && int1l < int2l));
1384 if (low == (code == MIN_EXPR))
1385 low = int1l, hi = int1h;
1387 low = int2l, hi = int2h;
1394 /* If this is for a sizetype, can be represented as one (signed)
1395 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1398 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1399 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1400 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1401 return size_int_type (low, type);
1404 t = build_int_2 (low, hi);
1405 TREE_TYPE (t) = TREE_TYPE (arg1);
1410 ? (!uns || is_sizetype) && overflow
1411 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1413 | TREE_OVERFLOW (arg1)
1414 | TREE_OVERFLOW (arg2));
1416 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1417 So check if force_fit_type truncated the value. */
1419 && ! TREE_OVERFLOW (t)
1420 && (TREE_INT_CST_HIGH (t) != hi
1421 || TREE_INT_CST_LOW (t) != low))
1422 TREE_OVERFLOW (t) = 1;
1424 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1425 | TREE_CONSTANT_OVERFLOW (arg1)
1426 | TREE_CONSTANT_OVERFLOW (arg2));
1430 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1431 constant. We assume ARG1 and ARG2 have the same data type, or at least
1432 are the same kind of constant and the same machine mode.
1434 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1437 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1442 if (TREE_CODE (arg1) == INTEGER_CST)
1443 return int_const_binop (code, arg1, arg2, notrunc);
1445 if (TREE_CODE (arg1) == REAL_CST)
1447 enum machine_mode mode;
1450 REAL_VALUE_TYPE value;
1453 d1 = TREE_REAL_CST (arg1);
1454 d2 = TREE_REAL_CST (arg2);
1456 type = TREE_TYPE (arg1);
1457 mode = TYPE_MODE (type);
1459 /* Don't perform operation if we honor signaling NaNs and
1460 either operand is a NaN. */
1461 if (HONOR_SNANS (mode)
1462 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1465 /* Don't perform operation if it would raise a division
1466 by zero exception. */
1467 if (code == RDIV_EXPR
1468 && REAL_VALUES_EQUAL (d2, dconst0)
1469 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1472 /* If either operand is a NaN, just return it. Otherwise, set up
1473 for floating-point trap; we return an overflow. */
1474 if (REAL_VALUE_ISNAN (d1))
1476 else if (REAL_VALUE_ISNAN (d2))
1479 REAL_ARITHMETIC (value, code, d1, d2);
1481 t = build_real (type, real_value_truncate (mode, value));
1484 = (force_fit_type (t, 0)
1485 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1486 TREE_CONSTANT_OVERFLOW (t)
1488 | TREE_CONSTANT_OVERFLOW (arg1)
1489 | TREE_CONSTANT_OVERFLOW (arg2);
1492 if (TREE_CODE (arg1) == COMPLEX_CST)
1494 tree type = TREE_TYPE (arg1);
1495 tree r1 = TREE_REALPART (arg1);
1496 tree i1 = TREE_IMAGPART (arg1);
1497 tree r2 = TREE_REALPART (arg2);
1498 tree i2 = TREE_IMAGPART (arg2);
1504 t = build_complex (type,
1505 const_binop (PLUS_EXPR, r1, r2, notrunc),
1506 const_binop (PLUS_EXPR, i1, i2, notrunc));
1510 t = build_complex (type,
1511 const_binop (MINUS_EXPR, r1, r2, notrunc),
1512 const_binop (MINUS_EXPR, i1, i2, notrunc));
1516 t = build_complex (type,
1517 const_binop (MINUS_EXPR,
1518 const_binop (MULT_EXPR,
1520 const_binop (MULT_EXPR,
1523 const_binop (PLUS_EXPR,
1524 const_binop (MULT_EXPR,
1526 const_binop (MULT_EXPR,
1534 = const_binop (PLUS_EXPR,
1535 const_binop (MULT_EXPR, r2, r2, notrunc),
1536 const_binop (MULT_EXPR, i2, i2, notrunc),
1539 t = build_complex (type,
1541 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1542 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1543 const_binop (PLUS_EXPR,
1544 const_binop (MULT_EXPR, r1, r2,
1546 const_binop (MULT_EXPR, i1, i2,
1549 magsquared, notrunc),
1551 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1552 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1553 const_binop (MINUS_EXPR,
1554 const_binop (MULT_EXPR, i1, r2,
1556 const_binop (MULT_EXPR, r1, i2,
1559 magsquared, notrunc));
1571 /* These are the hash table functions for the hash table of INTEGER_CST
1572 nodes of a sizetype. */
1574 /* Return the hash code code X, an INTEGER_CST. */
1577 size_htab_hash (const void *x)
1581 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1582 ^ htab_hash_pointer (TREE_TYPE (t))
1583 ^ (TREE_OVERFLOW (t) << 20));
1586 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1587 is the same as that given by *Y, which is the same. */
1590 size_htab_eq (const void *x, const void *y)
1595 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1596 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1597 && TREE_TYPE (xt) == TREE_TYPE (yt)
1598 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1601 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1602 bits are given by NUMBER and of the sizetype represented by KIND. */
1605 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1607 return size_int_type (number, sizetype_tab[(int) kind]);
1610 /* Likewise, but the desired type is specified explicitly. */
1612 static GTY (()) tree new_const;
1613 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1617 size_int_type (HOST_WIDE_INT number, tree type)
1623 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1624 new_const = make_node (INTEGER_CST);
1627 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1628 hash table, we return the value from the hash table. Otherwise, we
1629 place that in the hash table and make a new node for the next time. */
1630 TREE_INT_CST_LOW (new_const) = number;
1631 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1632 TREE_TYPE (new_const) = type;
1633 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1634 = force_fit_type (new_const, 0);
1636 slot = htab_find_slot (size_htab, new_const, INSERT);
1642 new_const = make_node (INTEGER_CST);
1646 return (tree) *slot;
1649 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1650 is a tree code. The type of the result is taken from the operands.
1651 Both must be the same type integer type and it must be a size type.
1652 If the operands are constant, so is the result. */
1655 size_binop (enum tree_code code, tree arg0, tree arg1)
1657 tree type = TREE_TYPE (arg0);
1659 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1660 || type != TREE_TYPE (arg1))
1663 /* Handle the special case of two integer constants faster. */
1664 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1666 /* And some specific cases even faster than that. */
1667 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1670 && integer_zerop (arg1))
1672 else if (code == MULT_EXPR && integer_onep (arg0))
1675 /* Handle general case of two integer constants. */
1676 return int_const_binop (code, arg0, arg1, 0);
1679 if (arg0 == error_mark_node || arg1 == error_mark_node)
1680 return error_mark_node;
1682 return fold (build2 (code, type, arg0, arg1));
1685 /* Given two values, either both of sizetype or both of bitsizetype,
1686 compute the difference between the two values. Return the value
1687 in signed type corresponding to the type of the operands. */
1690 size_diffop (tree arg0, tree arg1)
1692 tree type = TREE_TYPE (arg0);
1695 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1696 || type != TREE_TYPE (arg1))
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = (type == bitsizetype || type == ubitsizetype
1704 ? sbitsizetype : ssizetype);
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1711 fold_convert (ctype, arg1));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0, arg1))
1718 return fold_convert (ctype, integer_zero_node);
1719 else if (tree_int_cst_lt (arg1, arg0))
1720 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1723 fold_convert (ctype, size_binop (MINUS_EXPR,
1728 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1729 type TYPE. If no simplification can be done return NULL_TREE. */
1732 fold_convert_const (enum tree_code code, tree type, tree arg1)
1737 if (TREE_TYPE (arg1) == type)
1740 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1744 /* If we would build a constant wider than GCC supports,
1745 leave the conversion unfolded. */
1746 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1749 /* If we are trying to make a sizetype for a small integer, use
1750 size_int to pick up cached types to reduce duplicate nodes. */
1751 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1752 && !TREE_CONSTANT_OVERFLOW (arg1)
1753 && compare_tree_int (arg1, 10000) < 0)
1754 return size_int_type (TREE_INT_CST_LOW (arg1), type);
1756 /* Given an integer constant, make new constant with new type,
1757 appropriately sign-extended or truncated. */
1758 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1759 TREE_INT_CST_HIGH (arg1));
1760 TREE_TYPE (t) = type;
1761 /* Indicate an overflow if (1) ARG1 already overflowed,
1762 or (2) force_fit_type indicates an overflow.
1763 Tell force_fit_type that an overflow has already occurred
1764 if ARG1 is a too-large unsigned value and T is signed.
1765 But don't indicate an overflow if converting a pointer. */
1767 = ((force_fit_type (t,
1768 (TREE_INT_CST_HIGH (arg1) < 0
1769 && (TYPE_UNSIGNED (type)
1770 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1771 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1772 || TREE_OVERFLOW (arg1));
1773 TREE_CONSTANT_OVERFLOW (t)
1774 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1777 else if (TREE_CODE (arg1) == REAL_CST)
1779 /* The following code implements the floating point to integer
1780 conversion rules required by the Java Language Specification,
1781 that IEEE NaNs are mapped to zero and values that overflow
1782 the target precision saturate, i.e. values greater than
1783 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1784 are mapped to INT_MIN. These semantics are allowed by the
1785 C and C++ standards that simply state that the behavior of
1786 FP-to-integer conversion is unspecified upon overflow. */
1788 HOST_WIDE_INT high, low;
1791 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1795 case FIX_TRUNC_EXPR:
1796 real_trunc (&r, VOIDmode, &x);
1800 real_ceil (&r, VOIDmode, &x);
1803 case FIX_FLOOR_EXPR:
1804 real_floor (&r, VOIDmode, &x);
1807 case FIX_ROUND_EXPR:
1808 real_round (&r, VOIDmode, &x);
1815 /* If R is NaN, return zero and show we have an overflow. */
1816 if (REAL_VALUE_ISNAN (r))
1823 /* See if R is less than the lower bound or greater than the
1828 tree lt = TYPE_MIN_VALUE (type);
1829 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1830 if (REAL_VALUES_LESS (r, l))
1833 high = TREE_INT_CST_HIGH (lt);
1834 low = TREE_INT_CST_LOW (lt);
1840 tree ut = TYPE_MAX_VALUE (type);
1843 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1844 if (REAL_VALUES_LESS (u, r))
1847 high = TREE_INT_CST_HIGH (ut);
1848 low = TREE_INT_CST_LOW (ut);
1854 REAL_VALUE_TO_INT (&low, &high, r);
1856 t = build_int_2 (low, high);
1857 TREE_TYPE (t) = type;
1859 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1860 TREE_CONSTANT_OVERFLOW (t)
1861 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1865 else if (TREE_CODE (type) == REAL_TYPE)
1867 if (TREE_CODE (arg1) == INTEGER_CST)
1868 return build_real_from_int_cst (type, arg1);
1869 if (TREE_CODE (arg1) == REAL_CST)
1871 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1873 /* We make a copy of ARG1 so that we don't modify an
1874 existing constant tree. */
1875 t = copy_node (arg1);
1876 TREE_TYPE (t) = type;
1880 t = build_real (type,
1881 real_value_truncate (TYPE_MODE (type),
1882 TREE_REAL_CST (arg1)));
1885 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1886 TREE_CONSTANT_OVERFLOW (t)
1887 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1894 /* Convert expression ARG to type TYPE. Used by the middle-end for
1895 simple conversions in preference to calling the front-end's convert. */
1898 fold_convert (tree type, tree arg)
1900 tree orig = TREE_TYPE (arg);
1906 if (TREE_CODE (arg) == ERROR_MARK
1907 || TREE_CODE (type) == ERROR_MARK
1908 || TREE_CODE (orig) == ERROR_MARK)
1909 return error_mark_node;
1911 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1912 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1913 TYPE_MAIN_VARIANT (orig)))
1914 return fold (build1 (NOP_EXPR, type, arg));
1916 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1917 || TREE_CODE (type) == OFFSET_TYPE)
1919 if (TREE_CODE (arg) == INTEGER_CST)
1921 tem = fold_convert_const (NOP_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1925 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1926 || TREE_CODE (orig) == OFFSET_TYPE)
1927 return fold (build1 (NOP_EXPR, type, arg));
1928 if (TREE_CODE (orig) == COMPLEX_TYPE)
1930 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1931 return fold_convert (type, tem);
1933 if (TREE_CODE (orig) == VECTOR_TYPE
1934 && GET_MODE_SIZE (TYPE_MODE (type))
1935 == GET_MODE_SIZE (TYPE_MODE (orig)))
1936 return fold (build1 (NOP_EXPR, type, arg));
1938 else if (TREE_CODE (type) == REAL_TYPE)
1940 if (TREE_CODE (arg) == INTEGER_CST)
1942 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1946 else if (TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (NOP_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1953 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1954 return fold (build1 (FLOAT_EXPR, type, arg));
1955 if (TREE_CODE (orig) == REAL_TYPE)
1956 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1960 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1961 return fold_convert (type, tem);
1964 else if (TREE_CODE (type) == COMPLEX_TYPE)
1966 if (INTEGRAL_TYPE_P (orig)
1967 || POINTER_TYPE_P (orig)
1968 || TREE_CODE (orig) == REAL_TYPE)
1969 return build2 (COMPLEX_EXPR, type,
1970 fold_convert (TREE_TYPE (type), arg),
1971 fold_convert (TREE_TYPE (type), integer_zero_node));
1972 if (TREE_CODE (orig) == COMPLEX_TYPE)
1976 if (TREE_CODE (arg) == COMPLEX_EXPR)
1978 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1979 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1980 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1983 arg = save_expr (arg);
1984 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1985 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1986 rpart = fold_convert (TREE_TYPE (type), rpart);
1987 ipart = fold_convert (TREE_TYPE (type), ipart);
1988 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1991 else if (TREE_CODE (type) == VECTOR_TYPE)
1993 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1994 && GET_MODE_SIZE (TYPE_MODE (type))
1995 == GET_MODE_SIZE (TYPE_MODE (orig)))
1996 return fold (build1 (NOP_EXPR, type, arg));
1997 if (TREE_CODE (orig) == VECTOR_TYPE
1998 && GET_MODE_SIZE (TYPE_MODE (type))
1999 == GET_MODE_SIZE (TYPE_MODE (orig)))
2000 return fold (build1 (NOP_EXPR, type, arg));
2002 else if (VOID_TYPE_P (type))
2003 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2025 case ARRAY_RANGE_REF:
2031 case PREINCREMENT_EXPR:
2032 case PREDECREMENT_EXPR:
2035 case TRY_CATCH_EXPR:
2036 case WITH_CLEANUP_EXPR:
2047 /* Assume the worst for front-end tree codes. */
2048 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2052 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2055 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2056 Zero means allow extended lvalues. */
2058 int pedantic_lvalues;
2060 /* When pedantic, return an expr equal to X but certainly not valid as a
2061 pedantic lvalue. Otherwise, return X. */
2064 pedantic_non_lvalue (tree x)
2066 if (pedantic_lvalues)
2067 return non_lvalue (x);
2072 /* Given a tree comparison code, return the code that is the logical inverse
2073 of the given code. It is not safe to do this for floating-point
2074 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2075 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2077 static enum tree_code
2078 invert_tree_comparison (enum tree_code code, bool honor_nans)
2080 if (honor_nans && flag_trapping_math)
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2122 swap_tree_comparison (enum tree_code code)
2143 /* Convert a comparison tree code from an enum tree_code representation
2144 into a compcode bit-based encoding. This function is the inverse of
2145 compcode_to_comparison. */
2147 static enum comparison_code
2148 comparison_to_compcode (enum tree_code code)
2165 return COMPCODE_ORD;
2166 case UNORDERED_EXPR:
2167 return COMPCODE_UNORD;
2169 return COMPCODE_UNLT;
2171 return COMPCODE_UNEQ;
2173 return COMPCODE_UNLE;
2175 return COMPCODE_UNGT;
2177 return COMPCODE_LTGT;
2179 return COMPCODE_UNGE;
2185 /* Convert a compcode bit-based encoding of a comparison operator back
2186 to GCC's enum tree_code representation. This function is the
2187 inverse of comparison_to_compcode. */
2189 static enum tree_code
2190 compcode_to_comparison (enum comparison_code code)
2207 return ORDERED_EXPR;
2208 case COMPCODE_UNORD:
2209 return UNORDERED_EXPR;
2227 /* Return a tree for the comparison which is the combination of
2228 doing the AND or OR (depending on CODE) of the two operations LCODE
2229 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2230 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2231 if this makes the transformation invalid. */
2234 combine_comparisons (enum tree_code code, enum tree_code lcode,
2235 enum tree_code rcode, tree truth_type,
2236 tree ll_arg, tree lr_arg)
2238 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2239 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2240 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2241 enum comparison_code compcode;
2245 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2246 compcode = lcompcode & rcompcode;
2249 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2250 compcode = lcompcode | rcompcode;
2259 /* Eliminate unordered comparisons, as well as LTGT and ORD
2260 which are not used unless the mode has NaNs. */
2261 compcode &= ~COMPCODE_UNORD;
2262 if (compcode == COMPCODE_LTGT)
2263 compcode = COMPCODE_NE;
2264 else if (compcode == COMPCODE_ORD)
2265 compcode = COMPCODE_TRUE;
2267 else if (flag_trapping_math)
2269 /* Check that the original operation and the optimized ones will trap
2270 under the same condition. */
2271 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2272 && (lcompcode != COMPCODE_EQ)
2273 && (lcompcode != COMPCODE_ORD);
2274 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2275 && (rcompcode != COMPCODE_EQ)
2276 && (rcompcode != COMPCODE_ORD);
2277 bool trap = (compcode & COMPCODE_UNORD) == 0
2278 && (compcode != COMPCODE_EQ)
2279 && (compcode != COMPCODE_ORD);
2281 /* In a short-circuited boolean expression the LHS might be
2282 such that the RHS, if evaluated, will never trap. For
2283 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2284 if neither x nor y is NaN. (This is a mixed blessing: for
2285 example, the expression above will never trap, hence
2286 optimizing it to x < y would be invalid). */
2287 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2288 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2291 /* If the comparison was short-circuited, and only the RHS
2292 trapped, we may now generate a spurious trap. */
2294 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2297 /* If we changed the conditions that cause a trap, we lose. */
2298 if ((ltrap || rtrap) != trap)
2302 if (compcode == COMPCODE_TRUE)
2303 return constant_boolean_node (true, truth_type);
2304 else if (compcode == COMPCODE_FALSE)
2305 return constant_boolean_node (false, truth_type);
2307 return fold (build2 (compcode_to_comparison (compcode),
2308 truth_type, ll_arg, lr_arg));
2311 /* Return nonzero if CODE is a tree code that represents a truth value. */
2314 truth_value_p (enum tree_code code)
2316 return (TREE_CODE_CLASS (code) == '<'
2317 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2318 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2319 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2322 /* Return nonzero if two operands (typically of the same tree node)
2323 are necessarily equal. If either argument has side-effects this
2324 function returns zero. FLAGS modifies behavior as follows:
2326 If OEP_ONLY_CONST is set, only return nonzero for constants.
2327 This function tests whether the operands are indistinguishable;
2328 it does not test whether they are equal using C's == operation.
2329 The distinction is important for IEEE floating point, because
2330 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2331 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2333 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2334 even though it may hold multiple values during a function.
2335 This is because a GCC tree node guarantees that nothing else is
2336 executed between the evaluation of its "operands" (which may often
2337 be evaluated in arbitrary order). Hence if the operands themselves
2338 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2339 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2340 unset means assuming isochronic (or instantaneous) tree equivalence.
2341 Unless comparing arbitrary expression trees, such as from different
2342 statements, this flag can usually be left unset.
2344 If OEP_PURE_SAME is set, then pure functions with identical arguments
2345 are considered the same. It is used when the caller has other ways
2346 to ensure that global memory is unchanged in between. */
2349 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2351 /* If one is specified and the other isn't, they aren't equal and if
2352 neither is specified, they are.
2354 ??? This is temporary and is meant only to handle the cases of the
2355 optional operands for COMPONENT_REF and ARRAY_REF. */
2356 if ((arg0 && !arg1) || (!arg0 && arg1))
2358 else if (!arg0 && !arg1)
2360 /* If either is ERROR_MARK, they aren't equal. */
2361 else if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2364 /* If both types don't have the same signedness, then we can't consider
2365 them equal. We must check this before the STRIP_NOPS calls
2366 because they may change the signedness of the arguments. */
2367 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2373 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2374 /* This is needed for conversions and for COMPONENT_REF.
2375 Might as well play it safe and always test this. */
2376 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2377 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2378 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2381 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2382 We don't care about side effects in that case because the SAVE_EXPR
2383 takes care of that for us. In all other cases, two expressions are
2384 equal if they have no side effects. If we have two identical
2385 expressions with side effects that should be treated the same due
2386 to the only side effects being identical SAVE_EXPR's, that will
2387 be detected in the recursive calls below. */
2388 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2389 && (TREE_CODE (arg0) == SAVE_EXPR
2390 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2393 /* Next handle constant cases, those for which we can return 1 even
2394 if ONLY_CONST is set. */
2395 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2396 switch (TREE_CODE (arg0))
2399 return (! TREE_CONSTANT_OVERFLOW (arg0)
2400 && ! TREE_CONSTANT_OVERFLOW (arg1)
2401 && tree_int_cst_equal (arg0, arg1));
2404 return (! TREE_CONSTANT_OVERFLOW (arg0)
2405 && ! TREE_CONSTANT_OVERFLOW (arg1)
2406 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2407 TREE_REAL_CST (arg1)));
2413 if (TREE_CONSTANT_OVERFLOW (arg0)
2414 || TREE_CONSTANT_OVERFLOW (arg1))
2417 v1 = TREE_VECTOR_CST_ELTS (arg0);
2418 v2 = TREE_VECTOR_CST_ELTS (arg1);
2421 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2424 v1 = TREE_CHAIN (v1);
2425 v2 = TREE_CHAIN (v2);
2432 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2434 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2438 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2439 && ! memcmp (TREE_STRING_POINTER (arg0),
2440 TREE_STRING_POINTER (arg1),
2441 TREE_STRING_LENGTH (arg0)));
2444 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2450 if (flags & OEP_ONLY_CONST)
2453 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2456 /* Two conversions are equal only if signedness and modes match. */
2457 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2458 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2459 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2462 return operand_equal_p (TREE_OPERAND (arg0, 0),
2463 TREE_OPERAND (arg1, 0), flags);
2467 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2468 TREE_OPERAND (arg1, 0), flags)
2469 && operand_equal_p (TREE_OPERAND (arg0, 1),
2470 TREE_OPERAND (arg1, 1), flags))
2473 /* For commutative ops, allow the other order. */
2474 return (commutative_tree_code (TREE_CODE (arg0))
2475 && operand_equal_p (TREE_OPERAND (arg0, 0),
2476 TREE_OPERAND (arg1, 1), flags)
2477 && operand_equal_p (TREE_OPERAND (arg0, 1),
2478 TREE_OPERAND (arg1, 0), flags));
2481 /* If either of the pointer (or reference) expressions we are
2482 dereferencing contain a side effect, these cannot be equal. */
2483 if (TREE_SIDE_EFFECTS (arg0)
2484 || TREE_SIDE_EFFECTS (arg1))
2487 switch (TREE_CODE (arg0))
2492 return operand_equal_p (TREE_OPERAND (arg0, 0),
2493 TREE_OPERAND (arg1, 0), flags);
2496 case ARRAY_RANGE_REF:
2497 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2498 TREE_OPERAND (arg1, 0), flags)
2499 && operand_equal_p (TREE_OPERAND (arg0, 1),
2500 TREE_OPERAND (arg1, 1), flags)
2501 && operand_equal_p (TREE_OPERAND (arg0, 2),
2502 TREE_OPERAND (arg1, 2), flags)
2503 && operand_equal_p (TREE_OPERAND (arg0, 3),
2504 TREE_OPERAND (arg1, 3), flags));
2508 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2509 TREE_OPERAND (arg1, 0), flags)
2510 && operand_equal_p (TREE_OPERAND (arg0, 1),
2511 TREE_OPERAND (arg1, 1), flags)
2512 && operand_equal_p (TREE_OPERAND (arg0, 2),
2513 TREE_OPERAND (arg1, 2), flags));
2517 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2518 TREE_OPERAND (arg1, 0), flags)
2519 && operand_equal_p (TREE_OPERAND (arg0, 1),
2520 TREE_OPERAND (arg1, 1), flags)
2521 && operand_equal_p (TREE_OPERAND (arg0, 2),
2522 TREE_OPERAND (arg1, 2), flags));
2528 switch (TREE_CODE (arg0))
2531 case TRUTH_NOT_EXPR:
2532 return operand_equal_p (TREE_OPERAND (arg0, 0),
2533 TREE_OPERAND (arg1, 0), flags);
2535 case TRUTH_ANDIF_EXPR:
2536 case TRUTH_ORIF_EXPR:
2537 return operand_equal_p (TREE_OPERAND (arg0, 0),
2538 TREE_OPERAND (arg1, 0), flags)
2539 && operand_equal_p (TREE_OPERAND (arg0, 1),
2540 TREE_OPERAND (arg1, 1), flags);
2542 case TRUTH_AND_EXPR:
2544 case TRUTH_XOR_EXPR:
2545 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2546 TREE_OPERAND (arg1, 0), flags)
2547 && operand_equal_p (TREE_OPERAND (arg0, 1),
2548 TREE_OPERAND (arg1, 1), flags))
2549 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2550 TREE_OPERAND (arg1, 1), flags)
2551 && operand_equal_p (TREE_OPERAND (arg0, 1),
2552 TREE_OPERAND (arg1, 0), flags));
2555 /* If the CALL_EXPRs call different functions, then they
2556 clearly can not be equal. */
2557 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2558 TREE_OPERAND (arg1, 0), flags))
2562 unsigned int cef = call_expr_flags (arg0);
2563 if (flags & OEP_PURE_SAME)
2564 cef &= ECF_CONST | ECF_PURE;
2571 /* Now see if all the arguments are the same. operand_equal_p
2572 does not handle TREE_LIST, so we walk the operands here
2573 feeding them to operand_equal_p. */
2574 arg0 = TREE_OPERAND (arg0, 1);
2575 arg1 = TREE_OPERAND (arg1, 1);
2576 while (arg0 && arg1)
2578 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2582 arg0 = TREE_CHAIN (arg0);
2583 arg1 = TREE_CHAIN (arg1);
2586 /* If we get here and both argument lists are exhausted
2587 then the CALL_EXPRs are equal. */
2588 return ! (arg0 || arg1);
2595 /* Consider __builtin_sqrt equal to sqrt. */
2596 return (TREE_CODE (arg0) == FUNCTION_DECL
2597 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2598 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2599 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2606 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2607 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2609 When in doubt, return 0. */
2612 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2614 int unsignedp1, unsignedpo;
2615 tree primarg0, primarg1, primother;
2616 unsigned int correct_width;
2618 if (operand_equal_p (arg0, arg1, 0))
2621 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2622 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2625 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2626 and see if the inner values are the same. This removes any
2627 signedness comparison, which doesn't matter here. */
2628 primarg0 = arg0, primarg1 = arg1;
2629 STRIP_NOPS (primarg0);
2630 STRIP_NOPS (primarg1);
2631 if (operand_equal_p (primarg0, primarg1, 0))
2634 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2635 actual comparison operand, ARG0.
2637 First throw away any conversions to wider types
2638 already present in the operands. */
2640 primarg1 = get_narrower (arg1, &unsignedp1);
2641 primother = get_narrower (other, &unsignedpo);
2643 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2644 if (unsignedp1 == unsignedpo
2645 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2646 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2648 tree type = TREE_TYPE (arg0);
2650 /* Make sure shorter operand is extended the right way
2651 to match the longer operand. */
2652 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2653 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2655 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2662 /* See if ARG is an expression that is either a comparison or is performing
2663 arithmetic on comparisons. The comparisons must only be comparing
2664 two different values, which will be stored in *CVAL1 and *CVAL2; if
2665 they are nonzero it means that some operands have already been found.
2666 No variables may be used anywhere else in the expression except in the
2667 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2668 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2670 If this is true, return 1. Otherwise, return zero. */
2673 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2675 enum tree_code code = TREE_CODE (arg);
2676 char class = TREE_CODE_CLASS (code);
2678 /* We can handle some of the 'e' cases here. */
2679 if (class == 'e' && code == TRUTH_NOT_EXPR)
2681 else if (class == 'e'
2682 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2683 || code == COMPOUND_EXPR))
2686 else if (class == 'e' && code == SAVE_EXPR
2687 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2689 /* If we've already found a CVAL1 or CVAL2, this expression is
2690 two complex to handle. */
2691 if (*cval1 || *cval2)
2701 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2704 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2705 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2706 cval1, cval2, save_p));
2712 if (code == COND_EXPR)
2713 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2714 cval1, cval2, save_p)
2715 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2716 cval1, cval2, save_p)
2717 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2718 cval1, cval2, save_p));
2722 /* First see if we can handle the first operand, then the second. For
2723 the second operand, we know *CVAL1 can't be zero. It must be that
2724 one side of the comparison is each of the values; test for the
2725 case where this isn't true by failing if the two operands
2728 if (operand_equal_p (TREE_OPERAND (arg, 0),
2729 TREE_OPERAND (arg, 1), 0))
2733 *cval1 = TREE_OPERAND (arg, 0);
2734 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2736 else if (*cval2 == 0)
2737 *cval2 = TREE_OPERAND (arg, 0);
2738 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2743 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2745 else if (*cval2 == 0)
2746 *cval2 = TREE_OPERAND (arg, 1);
2747 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2759 /* ARG is a tree that is known to contain just arithmetic operations and
2760 comparisons. Evaluate the operations in the tree substituting NEW0 for
2761 any occurrence of OLD0 as an operand of a comparison and likewise for
2765 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2767 tree type = TREE_TYPE (arg);
2768 enum tree_code code = TREE_CODE (arg);
2769 char class = TREE_CODE_CLASS (code);
2771 /* We can handle some of the 'e' cases here. */
2772 if (class == 'e' && code == TRUTH_NOT_EXPR)
2774 else if (class == 'e'
2775 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2781 return fold (build1 (code, type,
2782 eval_subst (TREE_OPERAND (arg, 0),
2783 old0, new0, old1, new1)));
2786 return fold (build2 (code, type,
2787 eval_subst (TREE_OPERAND (arg, 0),
2788 old0, new0, old1, new1),
2789 eval_subst (TREE_OPERAND (arg, 1),
2790 old0, new0, old1, new1)));
2796 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2799 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2802 return fold (build3 (code, type,
2803 eval_subst (TREE_OPERAND (arg, 0),
2804 old0, new0, old1, new1),
2805 eval_subst (TREE_OPERAND (arg, 1),
2806 old0, new0, old1, new1),
2807 eval_subst (TREE_OPERAND (arg, 2),
2808 old0, new0, old1, new1)));
2812 /* Fall through - ??? */
2816 tree arg0 = TREE_OPERAND (arg, 0);
2817 tree arg1 = TREE_OPERAND (arg, 1);
2819 /* We need to check both for exact equality and tree equality. The
2820 former will be true if the operand has a side-effect. In that
2821 case, we know the operand occurred exactly once. */
2823 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2825 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2828 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2830 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2833 return fold (build2 (code, type, arg0, arg1));
2841 /* Return a tree for the case when the result of an expression is RESULT
2842 converted to TYPE and OMITTED was previously an operand of the expression
2843 but is now not needed (e.g., we folded OMITTED * 0).
2845 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2846 the conversion of RESULT to TYPE. */
2849 omit_one_operand (tree type, tree result, tree omitted)
2851 tree t = fold_convert (type, result);
2853 if (TREE_SIDE_EFFECTS (omitted))
2854 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2856 return non_lvalue (t);
2859 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2862 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2864 tree t = fold_convert (type, result);
2866 if (TREE_SIDE_EFFECTS (omitted))
2867 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2869 return pedantic_non_lvalue (t);
2872 /* Return a tree for the case when the result of an expression is RESULT
2873 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2874 of the expression but are now not needed.
2876 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2877 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2878 evaluated before OMITTED2. Otherwise, if neither has side effects,
2879 just do the conversion of RESULT to TYPE. */
2882 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2884 tree t = fold_convert (type, result);
2886 if (TREE_SIDE_EFFECTS (omitted2))
2887 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2888 if (TREE_SIDE_EFFECTS (omitted1))
2889 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2891 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2895 /* Return a simplified tree node for the truth-negation of ARG. This
2896 never alters ARG itself. We assume that ARG is an operation that
2897 returns a truth value (0 or 1).
2899 FIXME: one would think we would fold the result, but it causes
2900 problems with the dominator optimizer. */
2902 invert_truthvalue (tree arg)
2904 tree type = TREE_TYPE (arg);
2905 enum tree_code code = TREE_CODE (arg);
2907 if (code == ERROR_MARK)
2910 /* If this is a comparison, we can simply invert it, except for
2911 floating-point non-equality comparisons, in which case we just
2912 enclose a TRUTH_NOT_EXPR around what we have. */
2914 if (TREE_CODE_CLASS (code) == '<')
2916 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2917 if (FLOAT_TYPE_P (op_type)
2918 && flag_trapping_math
2919 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2920 && code != NE_EXPR && code != EQ_EXPR)
2921 return build1 (TRUTH_NOT_EXPR, type, arg);
2924 code = invert_tree_comparison (code,
2925 HONOR_NANS (TYPE_MODE (op_type)));
2926 if (code == ERROR_MARK)
2927 return build1 (TRUTH_NOT_EXPR, type, arg);
2929 return build2 (code, type,
2930 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2937 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2939 case TRUTH_AND_EXPR:
2940 return build2 (TRUTH_OR_EXPR, type,
2941 invert_truthvalue (TREE_OPERAND (arg, 0)),
2942 invert_truthvalue (TREE_OPERAND (arg, 1)));
2945 return build2 (TRUTH_AND_EXPR, type,
2946 invert_truthvalue (TREE_OPERAND (arg, 0)),
2947 invert_truthvalue (TREE_OPERAND (arg, 1)));
2949 case TRUTH_XOR_EXPR:
2950 /* Here we can invert either operand. We invert the first operand
2951 unless the second operand is a TRUTH_NOT_EXPR in which case our
2952 result is the XOR of the first operand with the inside of the
2953 negation of the second operand. */
2955 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2956 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2957 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2959 return build2 (TRUTH_XOR_EXPR, type,
2960 invert_truthvalue (TREE_OPERAND (arg, 0)),
2961 TREE_OPERAND (arg, 1));
2963 case TRUTH_ANDIF_EXPR:
2964 return build2 (TRUTH_ORIF_EXPR, type,
2965 invert_truthvalue (TREE_OPERAND (arg, 0)),
2966 invert_truthvalue (TREE_OPERAND (arg, 1)));
2968 case TRUTH_ORIF_EXPR:
2969 return build2 (TRUTH_ANDIF_EXPR, type,
2970 invert_truthvalue (TREE_OPERAND (arg, 0)),
2971 invert_truthvalue (TREE_OPERAND (arg, 1)));
2973 case TRUTH_NOT_EXPR:
2974 return TREE_OPERAND (arg, 0);
2977 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2978 invert_truthvalue (TREE_OPERAND (arg, 1)),
2979 invert_truthvalue (TREE_OPERAND (arg, 2)));
2982 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2983 invert_truthvalue (TREE_OPERAND (arg, 1)));
2985 case NON_LVALUE_EXPR:
2986 return invert_truthvalue (TREE_OPERAND (arg, 0));
2989 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2994 return build1 (TREE_CODE (arg), type,
2995 invert_truthvalue (TREE_OPERAND (arg, 0)));
2998 if (!integer_onep (TREE_OPERAND (arg, 1)))
3000 return build2 (EQ_EXPR, type, arg,
3001 fold_convert (type, integer_zero_node));
3004 return build1 (TRUTH_NOT_EXPR, type, arg);
3006 case CLEANUP_POINT_EXPR:
3007 return build1 (CLEANUP_POINT_EXPR, type,
3008 invert_truthvalue (TREE_OPERAND (arg, 0)));
3013 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
3015 return build1 (TRUTH_NOT_EXPR, type, arg);
3018 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3019 operands are another bit-wise operation with a common input. If so,
3020 distribute the bit operations to save an operation and possibly two if
3021 constants are involved. For example, convert
3022 (A | B) & (A | C) into A | (B & C)
3023 Further simplification will occur if B and C are constants.
3025 If this optimization cannot be done, 0 will be returned. */
3028 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3034 || TREE_CODE (arg0) == code
3035 || (TREE_CODE (arg0) != BIT_AND_EXPR
3036 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3039 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3041 common = TREE_OPERAND (arg0, 0);
3042 left = TREE_OPERAND (arg0, 1);
3043 right = TREE_OPERAND (arg1, 1);
3045 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3047 common = TREE_OPERAND (arg0, 0);
3048 left = TREE_OPERAND (arg0, 1);
3049 right = TREE_OPERAND (arg1, 0);
3051 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3053 common = TREE_OPERAND (arg0, 1);
3054 left = TREE_OPERAND (arg0, 0);
3055 right = TREE_OPERAND (arg1, 1);
3057 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3059 common = TREE_OPERAND (arg0, 1);
3060 left = TREE_OPERAND (arg0, 0);
3061 right = TREE_OPERAND (arg1, 0);
3066 return fold (build2 (TREE_CODE (arg0), type, common,
3067 fold (build2 (code, type, left, right))));
3070 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3071 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3074 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3077 tree result = build3 (BIT_FIELD_REF, type, inner,
3078 size_int (bitsize), bitsize_int (bitpos));
3080 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3085 /* Optimize a bit-field compare.
3087 There are two cases: First is a compare against a constant and the
3088 second is a comparison of two items where the fields are at the same
3089 bit position relative to the start of a chunk (byte, halfword, word)
3090 large enough to contain it. In these cases we can avoid the shift
3091 implicit in bitfield extractions.
3093 For constants, we emit a compare of the shifted constant with the
3094 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3095 compared. For two fields at the same position, we do the ANDs with the
3096 similar mask and compare the result of the ANDs.
3098 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3099 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3100 are the left and right operands of the comparison, respectively.
3102 If the optimization described above can be done, we return the resulting
3103 tree. Otherwise we return zero. */
3106 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3109 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3110 tree type = TREE_TYPE (lhs);
3111 tree signed_type, unsigned_type;
3112 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3113 enum machine_mode lmode, rmode, nmode;
3114 int lunsignedp, runsignedp;
3115 int lvolatilep = 0, rvolatilep = 0;
3116 tree linner, rinner = NULL_TREE;
3120 /* Get all the information about the extractions being done. If the bit size
3121 if the same as the size of the underlying object, we aren't doing an
3122 extraction at all and so can do nothing. We also don't want to
3123 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3124 then will no longer be able to replace it. */
3125 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3126 &lunsignedp, &lvolatilep);
3127 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3128 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3133 /* If this is not a constant, we can only do something if bit positions,
3134 sizes, and signedness are the same. */
3135 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3136 &runsignedp, &rvolatilep);
3138 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3139 || lunsignedp != runsignedp || offset != 0
3140 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3144 /* See if we can find a mode to refer to this field. We should be able to,
3145 but fail if we can't. */
3146 nmode = get_best_mode (lbitsize, lbitpos,
3147 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3148 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3149 TYPE_ALIGN (TREE_TYPE (rinner))),
3150 word_mode, lvolatilep || rvolatilep);
3151 if (nmode == VOIDmode)
3154 /* Set signed and unsigned types of the precision of this mode for the
3156 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3157 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3159 /* Compute the bit position and size for the new reference and our offset
3160 within it. If the new reference is the same size as the original, we
3161 won't optimize anything, so return zero. */
3162 nbitsize = GET_MODE_BITSIZE (nmode);
3163 nbitpos = lbitpos & ~ (nbitsize - 1);
3165 if (nbitsize == lbitsize)
3168 if (BYTES_BIG_ENDIAN)
3169 lbitpos = nbitsize - lbitsize - lbitpos;
3171 /* Make the mask to be used against the extracted field. */
3172 mask = build_int_2 (~0, ~0);
3173 TREE_TYPE (mask) = unsigned_type;
3174 force_fit_type (mask, 0);
3175 mask = fold_convert (unsigned_type, mask);
3176 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3177 mask = const_binop (RSHIFT_EXPR, mask,
3178 size_int (nbitsize - lbitsize - lbitpos), 0);
3181 /* If not comparing with constant, just rework the comparison
3183 return build2 (code, compare_type,
3184 build2 (BIT_AND_EXPR, unsigned_type,
3185 make_bit_field_ref (linner, unsigned_type,
3186 nbitsize, nbitpos, 1),
3188 build2 (BIT_AND_EXPR, unsigned_type,
3189 make_bit_field_ref (rinner, unsigned_type,
3190 nbitsize, nbitpos, 1),
3193 /* Otherwise, we are handling the constant case. See if the constant is too
3194 big for the field. Warn and return a tree of for 0 (false) if so. We do
3195 this not only for its own sake, but to avoid having to test for this
3196 error case below. If we didn't, we might generate wrong code.
3198 For unsigned fields, the constant shifted right by the field length should
3199 be all zero. For signed fields, the high-order bits should agree with
3204 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3205 fold_convert (unsigned_type, rhs),
3206 size_int (lbitsize), 0)))
3208 warning ("comparison is always %d due to width of bit-field",
3210 return constant_boolean_node (code == NE_EXPR, compare_type);
3215 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3216 size_int (lbitsize - 1), 0);
3217 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3219 warning ("comparison is always %d due to width of bit-field",
3221 return constant_boolean_node (code == NE_EXPR, compare_type);
3225 /* Single-bit compares should always be against zero. */
3226 if (lbitsize == 1 && ! integer_zerop (rhs))
3228 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3229 rhs = fold_convert (type, integer_zero_node);
3232 /* Make a new bitfield reference, shift the constant over the
3233 appropriate number of bits and mask it with the computed mask
3234 (in case this was a signed field). If we changed it, make a new one. */
3235 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3238 TREE_SIDE_EFFECTS (lhs) = 1;
3239 TREE_THIS_VOLATILE (lhs) = 1;
3242 rhs = fold (const_binop (BIT_AND_EXPR,
3243 const_binop (LSHIFT_EXPR,
3244 fold_convert (unsigned_type, rhs),
3245 size_int (lbitpos), 0),
3248 return build2 (code, compare_type,
3249 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3253 /* Subroutine for fold_truthop: decode a field reference.
3255 If EXP is a comparison reference, we return the innermost reference.
3257 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3258 set to the starting bit number.
3260 If the innermost field can be completely contained in a mode-sized
3261 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3263 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3264 otherwise it is not changed.
3266 *PUNSIGNEDP is set to the signedness of the field.
3268 *PMASK is set to the mask used. This is either contained in a
3269 BIT_AND_EXPR or derived from the width of the field.
3271 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3273 Return 0 if this is not a component reference or is one that we can't
3274 do anything with. */
3277 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3278 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3279 int *punsignedp, int *pvolatilep,
3280 tree *pmask, tree *pand_mask)
3282 tree outer_type = 0;
3284 tree mask, inner, offset;
3286 unsigned int precision;
3288 /* All the optimizations using this function assume integer fields.
3289 There are problems with FP fields since the type_for_size call
3290 below can fail for, e.g., XFmode. */
3291 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3294 /* We are interested in the bare arrangement of bits, so strip everything
3295 that doesn't affect the machine mode. However, record the type of the
3296 outermost expression if it may matter below. */
3297 if (TREE_CODE (exp) == NOP_EXPR
3298 || TREE_CODE (exp) == CONVERT_EXPR
3299 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3300 outer_type = TREE_TYPE (exp);
3303 if (TREE_CODE (exp) == BIT_AND_EXPR)
3305 and_mask = TREE_OPERAND (exp, 1);
3306 exp = TREE_OPERAND (exp, 0);
3307 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3308 if (TREE_CODE (and_mask) != INTEGER_CST)
3312 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3313 punsignedp, pvolatilep);
3314 if ((inner == exp && and_mask == 0)
3315 || *pbitsize < 0 || offset != 0
3316 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3319 /* If the number of bits in the reference is the same as the bitsize of
3320 the outer type, then the outer type gives the signedness. Otherwise
3321 (in case of a small bitfield) the signedness is unchanged. */
3322 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3323 *punsignedp = TYPE_UNSIGNED (outer_type);
3325 /* Compute the mask to access the bitfield. */
3326 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3327 precision = TYPE_PRECISION (unsigned_type);
3329 mask = build_int_2 (~0, ~0);
3330 TREE_TYPE (mask) = unsigned_type;
3331 force_fit_type (mask, 0);
3332 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3333 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3335 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3337 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3338 fold_convert (unsigned_type, and_mask), mask));
3341 *pand_mask = and_mask;
3345 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3349 all_ones_mask_p (tree mask, int size)
3351 tree type = TREE_TYPE (mask);
3352 unsigned int precision = TYPE_PRECISION (type);
3355 tmask = build_int_2 (~0, ~0);
3356 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3357 force_fit_type (tmask, 0);
3359 tree_int_cst_equal (mask,
3360 const_binop (RSHIFT_EXPR,
3361 const_binop (LSHIFT_EXPR, tmask,
3362 size_int (precision - size),
3364 size_int (precision - size), 0));
3367 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3368 represents the sign bit of EXP's type. If EXP represents a sign
3369 or zero extension, also test VAL against the unextended type.
3370 The return value is the (sub)expression whose sign bit is VAL,
3371 or NULL_TREE otherwise. */
3374 sign_bit_p (tree exp, tree val)
3376 unsigned HOST_WIDE_INT mask_lo, lo;
3377 HOST_WIDE_INT mask_hi, hi;
3381 /* Tree EXP must have an integral type. */
3382 t = TREE_TYPE (exp);
3383 if (! INTEGRAL_TYPE_P (t))
3386 /* Tree VAL must be an integer constant. */
3387 if (TREE_CODE (val) != INTEGER_CST
3388 || TREE_CONSTANT_OVERFLOW (val))
3391 width = TYPE_PRECISION (t);
3392 if (width > HOST_BITS_PER_WIDE_INT)
3394 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3397 mask_hi = ((unsigned HOST_WIDE_INT) -1
3398 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3404 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3407 mask_lo = ((unsigned HOST_WIDE_INT) -1
3408 >> (HOST_BITS_PER_WIDE_INT - width));
3411 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3412 treat VAL as if it were unsigned. */
3413 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3414 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3417 /* Handle extension from a narrower type. */
3418 if (TREE_CODE (exp) == NOP_EXPR
3419 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3420 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3425 /* Subroutine for fold_truthop: determine if an operand is simple enough
3426 to be evaluated unconditionally. */
3429 simple_operand_p (tree exp)
3431 /* Strip any conversions that don't change the machine mode. */
3432 while ((TREE_CODE (exp) == NOP_EXPR
3433 || TREE_CODE (exp) == CONVERT_EXPR)
3434 && (TYPE_MODE (TREE_TYPE (exp))
3435 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3436 exp = TREE_OPERAND (exp, 0);
3438 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3440 && ! TREE_ADDRESSABLE (exp)
3441 && ! TREE_THIS_VOLATILE (exp)
3442 && ! DECL_NONLOCAL (exp)
3443 /* Don't regard global variables as simple. They may be
3444 allocated in ways unknown to the compiler (shared memory,
3445 #pragma weak, etc). */
3446 && ! TREE_PUBLIC (exp)
3447 && ! DECL_EXTERNAL (exp)
3448 /* Loading a static variable is unduly expensive, but global
3449 registers aren't expensive. */
3450 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3453 /* The following functions are subroutines to fold_range_test and allow it to
3454 try to change a logical combination of comparisons into a range test.
3457 X == 2 || X == 3 || X == 4 || X == 5
3461 (unsigned) (X - 2) <= 3
3463 We describe each set of comparisons as being either inside or outside
3464 a range, using a variable named like IN_P, and then describe the
3465 range with a lower and upper bound. If one of the bounds is omitted,
3466 it represents either the highest or lowest value of the type.
3468 In the comments below, we represent a range by two numbers in brackets
3469 preceded by a "+" to designate being inside that range, or a "-" to
3470 designate being outside that range, so the condition can be inverted by
3471 flipping the prefix. An omitted bound is represented by a "-". For
3472 example, "- [-, 10]" means being outside the range starting at the lowest
3473 possible value and ending at 10, in other words, being greater than 10.
3474 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3477 We set up things so that the missing bounds are handled in a consistent
3478 manner so neither a missing bound nor "true" and "false" need to be
3479 handled using a special case. */
3481 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3482 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3483 and UPPER1_P are nonzero if the respective argument is an upper bound
3484 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3485 must be specified for a comparison. ARG1 will be converted to ARG0's
3486 type if both are specified. */
3489 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3490 tree arg1, int upper1_p)
3496 /* If neither arg represents infinity, do the normal operation.
3497 Else, if not a comparison, return infinity. Else handle the special
3498 comparison rules. Note that most of the cases below won't occur, but
3499 are handled for consistency. */
3501 if (arg0 != 0 && arg1 != 0)
3503 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3504 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3506 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3509 if (TREE_CODE_CLASS (code) != '<')
3512 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3513 for neither. In real maths, we cannot assume open ended ranges are
3514 the same. But, this is computer arithmetic, where numbers are finite.
3515 We can therefore make the transformation of any unbounded range with
3516 the value Z, Z being greater than any representable number. This permits
3517 us to treat unbounded ranges as equal. */
3518 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3519 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3523 result = sgn0 == sgn1;
3526 result = sgn0 != sgn1;
3529 result = sgn0 < sgn1;
3532 result = sgn0 <= sgn1;
3535 result = sgn0 > sgn1;
3538 result = sgn0 >= sgn1;
3544 return constant_boolean_node (result, type);
3547 /* Given EXP, a logical expression, set the range it is testing into
3548 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3549 actually being tested. *PLOW and *PHIGH will be made of the same type
3550 as the returned expression. If EXP is not a comparison, we will most
3551 likely not be returning a useful value and range. */
3554 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3556 enum tree_code code;
3557 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3558 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3560 tree low, high, n_low, n_high;
3562 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3563 and see if we can refine the range. Some of the cases below may not
3564 happen, but it doesn't seem worth worrying about this. We "continue"
3565 the outer loop when we've changed something; otherwise we "break"
3566 the switch, which will "break" the while. */
3569 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3573 code = TREE_CODE (exp);
3574 exp_type = TREE_TYPE (exp);
3576 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3578 if (first_rtl_op (code) > 0)
3579 arg0 = TREE_OPERAND (exp, 0);
3580 if (TREE_CODE_CLASS (code) == '<'
3581 || TREE_CODE_CLASS (code) == '1'
3582 || TREE_CODE_CLASS (code) == '2')
3583 arg0_type = TREE_TYPE (arg0);
3584 if (TREE_CODE_CLASS (code) == '2'
3585 || TREE_CODE_CLASS (code) == '<'
3586 || (TREE_CODE_CLASS (code) == 'e'
3587 && TREE_CODE_LENGTH (code) > 1))
3588 arg1 = TREE_OPERAND (exp, 1);
3593 case TRUTH_NOT_EXPR:
3594 in_p = ! in_p, exp = arg0;
3597 case EQ_EXPR: case NE_EXPR:
3598 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3599 /* We can only do something if the range is testing for zero
3600 and if the second operand is an integer constant. Note that
3601 saying something is "in" the range we make is done by
3602 complementing IN_P since it will set in the initial case of
3603 being not equal to zero; "out" is leaving it alone. */
3604 if (low == 0 || high == 0
3605 || ! integer_zerop (low) || ! integer_zerop (high)
3606 || TREE_CODE (arg1) != INTEGER_CST)
3611 case NE_EXPR: /* - [c, c] */
3614 case EQ_EXPR: /* + [c, c] */
3615 in_p = ! in_p, low = high = arg1;
3617 case GT_EXPR: /* - [-, c] */
3618 low = 0, high = arg1;
3620 case GE_EXPR: /* + [c, -] */
3621 in_p = ! in_p, low = arg1, high = 0;
3623 case LT_EXPR: /* - [c, -] */
3624 low = arg1, high = 0;
3626 case LE_EXPR: /* + [-, c] */
3627 in_p = ! in_p, low = 0, high = arg1;
3633 /* If this is an unsigned comparison, we also know that EXP is
3634 greater than or equal to zero. We base the range tests we make
3635 on that fact, so we record it here so we can parse existing
3636 range tests. We test arg0_type since often the return type
3637 of, e.g. EQ_EXPR, is boolean. */
3638 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3640 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3641 1, fold_convert (arg0_type, integer_zero_node),
3645 in_p = n_in_p, low = n_low, high = n_high;
3647 /* If the high bound is missing, but we have a nonzero low
3648 bound, reverse the range so it goes from zero to the low bound
3650 if (high == 0 && low && ! integer_zerop (low))
3653 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3654 integer_one_node, 0);
3655 low = fold_convert (arg0_type, integer_zero_node);
3663 /* (-x) IN [a,b] -> x in [-b, -a] */
3664 n_low = range_binop (MINUS_EXPR, exp_type,
3665 fold_convert (exp_type, integer_zero_node),
3667 n_high = range_binop (MINUS_EXPR, exp_type,
3668 fold_convert (exp_type, integer_zero_node),
3670 low = n_low, high = n_high;
3676 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3677 fold_convert (exp_type, integer_one_node));
3680 case PLUS_EXPR: case MINUS_EXPR:
3681 if (TREE_CODE (arg1) != INTEGER_CST)
3684 /* If EXP is signed, any overflow in the computation is undefined,
3685 so we don't worry about it so long as our computations on
3686 the bounds don't overflow. For unsigned, overflow is defined
3687 and this is exactly the right thing. */
3688 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3689 arg0_type, low, 0, arg1, 0);
3690 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3691 arg0_type, high, 1, arg1, 0);
3692 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3693 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3696 /* Check for an unsigned range which has wrapped around the maximum
3697 value thus making n_high < n_low, and normalize it. */
3698 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3700 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3701 integer_one_node, 0);
3702 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3703 integer_one_node, 0);
3705 /* If the range is of the form +/- [ x+1, x ], we won't
3706 be able to normalize it. But then, it represents the
3707 whole range or the empty set, so make it
3709 if (tree_int_cst_equal (n_low, low)
3710 && tree_int_cst_equal (n_high, high))
3716 low = n_low, high = n_high;
3721 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3722 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3725 if (! INTEGRAL_TYPE_P (arg0_type)
3726 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3727 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3730 n_low = low, n_high = high;
3733 n_low = fold_convert (arg0_type, n_low);
3736 n_high = fold_convert (arg0_type, n_high);
3739 /* If we're converting arg0 from an unsigned type, to exp,
3740 a signed type, we will be doing the comparison as unsigned.
3741 The tests above have already verified that LOW and HIGH
3744 So we have to ensure that we will handle large unsigned
3745 values the same way that the current signed bounds treat
3748 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3751 tree equiv_type = lang_hooks.types.type_for_mode
3752 (TYPE_MODE (arg0_type), 1);
3754 /* A range without an upper bound is, naturally, unbounded.
3755 Since convert would have cropped a very large value, use
3756 the max value for the destination type. */
3758 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3759 : TYPE_MAX_VALUE (arg0_type);
3761 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3762 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3763 fold_convert (arg0_type,
3765 fold_convert (arg0_type,
3766 integer_one_node)));
3768 /* If the low bound is specified, "and" the range with the
3769 range for which the original unsigned value will be
3773 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3774 1, n_low, n_high, 1,
3775 fold_convert (arg0_type, integer_zero_node),
3779 in_p = (n_in_p == in_p);
3783 /* Otherwise, "or" the range with the range of the input
3784 that will be interpreted as negative. */
3785 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3786 0, n_low, n_high, 1,
3787 fold_convert (arg0_type, integer_zero_node),
3791 in_p = (in_p != n_in_p);
3796 low = n_low, high = n_high;
3806 /* If EXP is a constant, we can evaluate whether this is true or false. */
3807 if (TREE_CODE (exp) == INTEGER_CST)
3809 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3811 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3817 *pin_p = in_p, *plow = low, *phigh = high;
3821 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3822 type, TYPE, return an expression to test if EXP is in (or out of, depending
3823 on IN_P) the range. Return 0 if the test couldn't be created. */
3826 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3828 tree etype = TREE_TYPE (exp);
3833 value = build_range_check (type, exp, 1, low, high);
3835 return invert_truthvalue (value);
3840 if (low == 0 && high == 0)
3841 return fold_convert (type, integer_one_node);
3844 return fold (build2 (LE_EXPR, type, exp, high));
3847 return fold (build2 (GE_EXPR, type, exp, low));
3849 if (operand_equal_p (low, high, 0))
3850 return fold (build2 (EQ_EXPR, type, exp, low));
3852 if (integer_zerop (low))
3854 if (! TYPE_UNSIGNED (etype))
3856 etype = lang_hooks.types.unsigned_type (etype);
3857 high = fold_convert (etype, high);
3858 exp = fold_convert (etype, exp);
3860 return build_range_check (type, exp, 1, 0, high);
3863 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3864 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3866 unsigned HOST_WIDE_INT lo;
3870 prec = TYPE_PRECISION (etype);
3871 if (prec <= HOST_BITS_PER_WIDE_INT)
3874 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3878 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3879 lo = (unsigned HOST_WIDE_INT) -1;
3882 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3884 if (TYPE_UNSIGNED (etype))
3886 etype = lang_hooks.types.signed_type (etype);
3887 exp = fold_convert (etype, exp);
3889 return fold (build2 (GT_EXPR, type, exp,
3890 fold_convert (etype, integer_zero_node)));
3894 value = const_binop (MINUS_EXPR, high, low, 0);
3895 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3897 tree utype, minv, maxv;
3899 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3900 for the type in question, as we rely on this here. */
3901 switch (TREE_CODE (etype))
3906 utype = lang_hooks.types.unsigned_type (etype);
3907 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3908 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3909 integer_one_node, 1);
3910 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3911 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3915 high = fold_convert (etype, high);
3916 low = fold_convert (etype, low);
3917 exp = fold_convert (etype, exp);
3918 value = const_binop (MINUS_EXPR, high, low, 0);
3926 if (value != 0 && ! TREE_OVERFLOW (value))
3927 return build_range_check (type,
3928 fold (build2 (MINUS_EXPR, etype, exp, low)),
3929 1, fold_convert (etype, integer_zero_node),
3935 /* Given two ranges, see if we can merge them into one. Return 1 if we
3936 can, 0 if we can't. Set the output range into the specified parameters. */
3939 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3940 tree high0, int in1_p, tree low1, tree high1)
3948 int lowequal = ((low0 == 0 && low1 == 0)
3949 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3950 low0, 0, low1, 0)));
3951 int highequal = ((high0 == 0 && high1 == 0)
3952 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3953 high0, 1, high1, 1)));
3955 /* Make range 0 be the range that starts first, or ends last if they
3956 start at the same value. Swap them if it isn't. */
3957 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3960 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3961 high1, 1, high0, 1))))
3963 temp = in0_p, in0_p = in1_p, in1_p = temp;
3964 tem = low0, low0 = low1, low1 = tem;
3965 tem = high0, high0 = high1, high1 = tem;
3968 /* Now flag two cases, whether the ranges are disjoint or whether the
3969 second range is totally subsumed in the first. Note that the tests
3970 below are simplified by the ones above. */
3971 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3972 high0, 1, low1, 0));
3973 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3974 high1, 1, high0, 1));
3976 /* We now have four cases, depending on whether we are including or
3977 excluding the two ranges. */
3980 /* If they don't overlap, the result is false. If the second range
3981 is a subset it is the result. Otherwise, the range is from the start
3982 of the second to the end of the first. */
3984 in_p = 0, low = high = 0;
3986 in_p = 1, low = low1, high = high1;
3988 in_p = 1, low = low1, high = high0;
3991 else if (in0_p && ! in1_p)
3993 /* If they don't overlap, the result is the first range. If they are
3994 equal, the result is false. If the second range is a subset of the
3995 first, and the ranges begin at the same place, we go from just after
3996 the end of the first range to the end of the second. If the second
3997 range is not a subset of the first, or if it is a subset and both
3998 ranges end at the same place, the range starts at the start of the
3999 first range and ends just before the second range.
4000 Otherwise, we can't describe this as a single range. */
4002 in_p = 1, low = low0, high = high0;
4003 else if (lowequal && highequal)
4004 in_p = 0, low = high = 0;
4005 else if (subset && lowequal)
4007 in_p = 1, high = high0;
4008 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4009 integer_one_node, 0);
4011 else if (! subset || highequal)
4013 in_p = 1, low = low0;
4014 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4015 integer_one_node, 0);
4021 else if (! in0_p && in1_p)
4023 /* If they don't overlap, the result is the second range. If the second
4024 is a subset of the first, the result is false. Otherwise,
4025 the range starts just after the first range and ends at the
4026 end of the second. */
4028 in_p = 1, low = low1, high = high1;
4029 else if (subset || highequal)
4030 in_p = 0, low = high = 0;
4033 in_p = 1, high = high1;
4034 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4035 integer_one_node, 0);
4041 /* The case where we are excluding both ranges. Here the complex case
4042 is if they don't overlap. In that case, the only time we have a
4043 range is if they are adjacent. If the second is a subset of the
4044 first, the result is the first. Otherwise, the range to exclude
4045 starts at the beginning of the first range and ends at the end of the
4049 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4050 range_binop (PLUS_EXPR, NULL_TREE,
4052 integer_one_node, 1),
4054 in_p = 0, low = low0, high = high1;
4057 /* Canonicalize - [min, x] into - [-, x]. */
4058 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4059 switch (TREE_CODE (TREE_TYPE (low0)))
4062 if (TYPE_PRECISION (TREE_TYPE (low0))
4063 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4068 if (tree_int_cst_equal (low0,
4069 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4073 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4074 && integer_zerop (low0))
4081 /* Canonicalize - [x, max] into - [x, -]. */
4082 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4083 switch (TREE_CODE (TREE_TYPE (high1)))
4086 if (TYPE_PRECISION (TREE_TYPE (high1))
4087 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4092 if (tree_int_cst_equal (high1,
4093 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4097 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4098 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4100 integer_one_node, 1)))
4107 /* The ranges might be also adjacent between the maximum and
4108 minimum values of the given type. For
4109 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4110 return + [x + 1, y - 1]. */
4111 if (low0 == 0 && high1 == 0)
4113 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4114 integer_one_node, 1);
4115 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4116 integer_one_node, 0);
4117 if (low == 0 || high == 0)
4127 in_p = 0, low = low0, high = high0;
4129 in_p = 0, low = low0, high = high1;
4132 *pin_p = in_p, *plow = low, *phigh = high;
4137 /* Subroutine of fold, looking inside expressions of the form
4138 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4139 of the COND_EXPR. This function is being used also to optimize
4140 A op B ? C : A, by reversing the comparison first.
4142 Return a folded expression whose code is not a COND_EXPR
4143 anymore, or NULL_TREE if no folding opportunity is found. */
4146 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4148 enum tree_code comp_code = TREE_CODE (arg0);
4149 tree arg00 = TREE_OPERAND (arg0, 0);
4150 tree arg01 = TREE_OPERAND (arg0, 1);
4151 tree arg1_type = TREE_TYPE (arg1);
4157 /* If we have A op 0 ? A : -A, consider applying the following
4160 A == 0? A : -A same as -A
4161 A != 0? A : -A same as A
4162 A >= 0? A : -A same as abs (A)
4163 A > 0? A : -A same as abs (A)
4164 A <= 0? A : -A same as -abs (A)
4165 A < 0? A : -A same as -abs (A)
4167 None of these transformations work for modes with signed
4168 zeros. If A is +/-0, the first two transformations will
4169 change the sign of the result (from +0 to -0, or vice
4170 versa). The last four will fix the sign of the result,
4171 even though the original expressions could be positive or
4172 negative, depending on the sign of A.
4174 Note that all these transformations are correct if A is
4175 NaN, since the two alternatives (A and -A) are also NaNs. */
4176 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4177 ? real_zerop (arg01)
4178 : integer_zerop (arg01))
4179 && TREE_CODE (arg2) == NEGATE_EXPR
4180 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4184 tem = fold_convert (arg1_type, arg1);
4185 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4187 return pedantic_non_lvalue (fold_convert (type, arg1));
4190 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4191 arg1 = fold_convert (lang_hooks.types.signed_type
4192 (TREE_TYPE (arg1)), arg1);
4193 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4194 return pedantic_non_lvalue (fold_convert (type, tem));
4197 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4198 arg1 = fold_convert (lang_hooks.types.signed_type
4199 (TREE_TYPE (arg1)), arg1);
4200 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4201 return negate_expr (fold_convert (type, tem));
4206 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4207 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4208 both transformations are correct when A is NaN: A != 0
4209 is then true, and A == 0 is false. */
4211 if (integer_zerop (arg01) && integer_zerop (arg2))
4213 if (comp_code == NE_EXPR)
4214 return pedantic_non_lvalue (fold_convert (type, arg1));
4215 else if (comp_code == EQ_EXPR)
4216 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
4219 /* Try some transformations of A op B ? A : B.
4221 A == B? A : B same as B
4222 A != B? A : B same as A
4223 A >= B? A : B same as max (A, B)
4224 A > B? A : B same as max (B, A)
4225 A <= B? A : B same as min (A, B)
4226 A < B? A : B same as min (B, A)
4228 As above, these transformations don't work in the presence
4229 of signed zeros. For example, if A and B are zeros of
4230 opposite sign, the first two transformations will change
4231 the sign of the result. In the last four, the original
4232 expressions give different results for (A=+0, B=-0) and
4233 (A=-0, B=+0), but the transformed expressions do not.
4235 The first two transformations are correct if either A or B
4236 is a NaN. In the first transformation, the condition will
4237 be false, and B will indeed be chosen. In the case of the
4238 second transformation, the condition A != B will be true,
4239 and A will be chosen.
4241 The conversions to max() and min() are not correct if B is
4242 a number and A is not. The conditions in the original
4243 expressions will be false, so all four give B. The min()
4244 and max() versions would give a NaN instead. */
4245 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4247 tree comp_op0 = arg00;
4248 tree comp_op1 = arg01;
4249 tree comp_type = TREE_TYPE (comp_op0);
4251 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4252 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4262 return pedantic_non_lvalue (fold_convert (type, arg2));
4264 return pedantic_non_lvalue (fold_convert (type, arg1));
4267 /* In C++ a ?: expression can be an lvalue, so put the
4268 operand which will be used if they are equal first
4269 so that we can convert this back to the
4270 corresponding COND_EXPR. */
4271 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4272 return pedantic_non_lvalue (
4273 fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
4274 (comp_code == LE_EXPR
4275 ? comp_op0 : comp_op1),
4276 (comp_code == LE_EXPR
4277 ? comp_op1 : comp_op0)))));
4281 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4282 return pedantic_non_lvalue (
4283 fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
4284 (comp_code == GE_EXPR
4285 ? comp_op0 : comp_op1),
4286 (comp_code == GE_EXPR
4287 ? comp_op1 : comp_op0)))));
4294 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4295 we might still be able to simplify this. For example,
4296 if C1 is one less or one more than C2, this might have started
4297 out as a MIN or MAX and been transformed by this function.
4298 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4300 if (INTEGRAL_TYPE_P (type)
4301 && TREE_CODE (arg01) == INTEGER_CST
4302 && TREE_CODE (arg2) == INTEGER_CST)
4306 /* We can replace A with C1 in this case. */
4307 arg1 = fold_convert (type, arg01);
4308 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4311 /* If C1 is C2 + 1, this is min(A, C2). */
4312 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4314 && operand_equal_p (arg01,
4315 const_binop (PLUS_EXPR, arg2,
4316 integer_one_node, 0),
4318 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4319 type, arg1, arg2)));
4323 /* If C1 is C2 - 1, this is min(A, C2). */
4324 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4326 && operand_equal_p (arg01,
4327 const_binop (MINUS_EXPR, arg2,
4328 integer_one_node, 0),
4330 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4331 type, arg1, arg2)));
4335 /* If C1 is C2 - 1, this is max(A, C2). */
4336 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4338 && operand_equal_p (arg01,
4339 const_binop (MINUS_EXPR, arg2,
4340 integer_one_node, 0),
4342 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4343 type, arg1, arg2)));
4347 /* If C1 is C2 + 1, this is max(A, C2). */
4348 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4350 && operand_equal_p (arg01,
4351 const_binop (PLUS_EXPR, arg2,
4352 integer_one_node, 0),
4354 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4355 type, arg1, arg2)));
4368 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4369 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4372 /* EXP is some logical combination of boolean tests. See if we can
4373 merge it into some range test. Return the new tree if so. */
4376 fold_range_test (tree exp)
4378 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4379 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4380 int in0_p, in1_p, in_p;
4381 tree low0, low1, low, high0, high1, high;
4382 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4383 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4386 /* If this is an OR operation, invert both sides; we will invert
4387 again at the end. */
4389 in0_p = ! in0_p, in1_p = ! in1_p;
4391 /* If both expressions are the same, if we can merge the ranges, and we
4392 can build the range test, return it or it inverted. If one of the
4393 ranges is always true or always false, consider it to be the same
4394 expression as the other. */
4395 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4396 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4398 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4400 : rhs != 0 ? rhs : integer_zero_node,
4402 return or_op ? invert_truthvalue (tem) : tem;
4404 /* On machines where the branch cost is expensive, if this is a
4405 short-circuited branch and the underlying object on both sides
4406 is the same, make a non-short-circuit operation. */
4407 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4408 && lhs != 0 && rhs != 0
4409 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4410 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4411 && operand_equal_p (lhs, rhs, 0))
4413 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4414 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4415 which cases we can't do this. */
4416 if (simple_operand_p (lhs))
4417 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4418 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4419 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4420 TREE_OPERAND (exp, 1));
4422 else if (lang_hooks.decls.global_bindings_p () == 0
4423 && ! CONTAINS_PLACEHOLDER_P (lhs))
4425 tree common = save_expr (lhs);
4427 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4428 or_op ? ! in0_p : in0_p,
4430 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4431 or_op ? ! in1_p : in1_p,
4433 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4434 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4435 TREE_TYPE (exp), lhs, rhs);
4442 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4443 bit value. Arrange things so the extra bits will be set to zero if and
4444 only if C is signed-extended to its full width. If MASK is nonzero,
4445 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4448 unextend (tree c, int p, int unsignedp, tree mask)
4450 tree type = TREE_TYPE (c);
4451 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4454 if (p == modesize || unsignedp)
4457 /* We work by getting just the sign bit into the low-order bit, then
4458 into the high-order bit, then sign-extend. We then XOR that value
4460 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4461 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4463 /* We must use a signed type in order to get an arithmetic right shift.
4464 However, we must also avoid introducing accidental overflows, so that
4465 a subsequent call to integer_zerop will work. Hence we must
4466 do the type conversion here. At this point, the constant is either
4467 zero or one, and the conversion to a signed type can never overflow.
4468 We could get an overflow if this conversion is done anywhere else. */
4469 if (TYPE_UNSIGNED (type))
4470 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4472 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4473 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4475 temp = const_binop (BIT_AND_EXPR, temp,
4476 fold_convert (TREE_TYPE (c), mask), 0);
4477 /* If necessary, convert the type back to match the type of C. */
4478 if (TYPE_UNSIGNED (type))
4479 temp = fold_convert (type, temp);
4481 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4484 /* Find ways of folding logical expressions of LHS and RHS:
4485 Try to merge two comparisons to the same innermost item.
4486 Look for range tests like "ch >= '0' && ch <= '9'".
4487 Look for combinations of simple terms on machines with expensive branches
4488 and evaluate the RHS unconditionally.
4490 For example, if we have p->a == 2 && p->b == 4 and we can make an
4491 object large enough to span both A and B, we can do this with a comparison
4492 against the object ANDed with the a mask.
4494 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4495 operations to do this with one comparison.
4497 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4498 function and the one above.
4500 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4501 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4503 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4506 We return the simplified tree or 0 if no optimization is possible. */
4509 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4511 /* If this is the "or" of two comparisons, we can do something if
4512 the comparisons are NE_EXPR. If this is the "and", we can do something
4513 if the comparisons are EQ_EXPR. I.e.,
4514 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4516 WANTED_CODE is this operation code. For single bit fields, we can
4517 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4518 comparison for one-bit fields. */
4520 enum tree_code wanted_code;
4521 enum tree_code lcode, rcode;
4522 tree ll_arg, lr_arg, rl_arg, rr_arg;
4523 tree ll_inner, lr_inner, rl_inner, rr_inner;
4524 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4525 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4526 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4527 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4528 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4529 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4530 enum machine_mode lnmode, rnmode;
4531 tree ll_mask, lr_mask, rl_mask, rr_mask;
4532 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4533 tree l_const, r_const;
4534 tree lntype, rntype, result;
4535 int first_bit, end_bit;
4538 /* Start by getting the comparison codes. Fail if anything is volatile.
4539 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4540 it were surrounded with a NE_EXPR. */
4542 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4545 lcode = TREE_CODE (lhs);
4546 rcode = TREE_CODE (rhs);
4548 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4550 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4554 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4556 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4560 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4563 ll_arg = TREE_OPERAND (lhs, 0);
4564 lr_arg = TREE_OPERAND (lhs, 1);
4565 rl_arg = TREE_OPERAND (rhs, 0);
4566 rr_arg = TREE_OPERAND (rhs, 1);
4568 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4569 if (simple_operand_p (ll_arg)
4570 && simple_operand_p (lr_arg))
4573 if (operand_equal_p (ll_arg, rl_arg, 0)
4574 && operand_equal_p (lr_arg, rr_arg, 0))
4576 result = combine_comparisons (code, lcode, rcode,
4577 truth_type, ll_arg, lr_arg);
4581 else if (operand_equal_p (ll_arg, rr_arg, 0)
4582 && operand_equal_p (lr_arg, rl_arg, 0))
4584 result = combine_comparisons (code, lcode,
4585 swap_tree_comparison (rcode),
4586 truth_type, ll_arg, lr_arg);
4592 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4593 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4595 /* If the RHS can be evaluated unconditionally and its operands are
4596 simple, it wins to evaluate the RHS unconditionally on machines
4597 with expensive branches. In this case, this isn't a comparison
4598 that can be merged. Avoid doing this if the RHS is a floating-point
4599 comparison since those can trap. */
4601 if (BRANCH_COST >= 2
4602 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4603 && simple_operand_p (rl_arg)
4604 && simple_operand_p (rr_arg))
4606 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4607 if (code == TRUTH_OR_EXPR
4608 && lcode == NE_EXPR && integer_zerop (lr_arg)
4609 && rcode == NE_EXPR && integer_zerop (rr_arg)
4610 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4611 return build2 (NE_EXPR, truth_type,
4612 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4614 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4616 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4617 if (code == TRUTH_AND_EXPR
4618 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4619 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4620 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4621 return build2 (EQ_EXPR, truth_type,
4622 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4624 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4626 return build2 (code, truth_type, lhs, rhs);
4629 /* See if the comparisons can be merged. Then get all the parameters for
4632 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4633 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4637 ll_inner = decode_field_reference (ll_arg,
4638 &ll_bitsize, &ll_bitpos, &ll_mode,
4639 &ll_unsignedp, &volatilep, &ll_mask,
4641 lr_inner = decode_field_reference (lr_arg,
4642 &lr_bitsize, &lr_bitpos, &lr_mode,
4643 &lr_unsignedp, &volatilep, &lr_mask,
4645 rl_inner = decode_field_reference (rl_arg,
4646 &rl_bitsize, &rl_bitpos, &rl_mode,
4647 &rl_unsignedp, &volatilep, &rl_mask,
4649 rr_inner = decode_field_reference (rr_arg,
4650 &rr_bitsize, &rr_bitpos, &rr_mode,
4651 &rr_unsignedp, &volatilep, &rr_mask,
4654 /* It must be true that the inner operation on the lhs of each
4655 comparison must be the same if we are to be able to do anything.
4656 Then see if we have constants. If not, the same must be true for
4658 if (volatilep || ll_inner == 0 || rl_inner == 0
4659 || ! operand_equal_p (ll_inner, rl_inner, 0))
4662 if (TREE_CODE (lr_arg) == INTEGER_CST
4663 && TREE_CODE (rr_arg) == INTEGER_CST)
4664 l_const = lr_arg, r_const = rr_arg;
4665 else if (lr_inner == 0 || rr_inner == 0
4666 || ! operand_equal_p (lr_inner, rr_inner, 0))
4669 l_const = r_const = 0;
4671 /* If either comparison code is not correct for our logical operation,
4672 fail. However, we can convert a one-bit comparison against zero into
4673 the opposite comparison against that bit being set in the field. */
4675 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4676 if (lcode != wanted_code)
4678 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4680 /* Make the left operand unsigned, since we are only interested
4681 in the value of one bit. Otherwise we are doing the wrong
4690 /* This is analogous to the code for l_const above. */
4691 if (rcode != wanted_code)
4693 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4702 /* After this point all optimizations will generate bit-field
4703 references, which we might not want. */
4704 if (! lang_hooks.can_use_bit_fields_p ())
4707 /* See if we can find a mode that contains both fields being compared on
4708 the left. If we can't, fail. Otherwise, update all constants and masks
4709 to be relative to a field of that size. */
4710 first_bit = MIN (ll_bitpos, rl_bitpos);
4711 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4712 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4713 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4715 if (lnmode == VOIDmode)
4718 lnbitsize = GET_MODE_BITSIZE (lnmode);
4719 lnbitpos = first_bit & ~ (lnbitsize - 1);
4720 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4721 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4723 if (BYTES_BIG_ENDIAN)
4725 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4726 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4729 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4730 size_int (xll_bitpos), 0);
4731 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4732 size_int (xrl_bitpos), 0);
4736 l_const = fold_convert (lntype, l_const);
4737 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4738 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4739 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4740 fold (build1 (BIT_NOT_EXPR,
4744 warning ("comparison is always %d", wanted_code == NE_EXPR);
4746 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4751 r_const = fold_convert (lntype, r_const);
4752 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4753 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4754 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4755 fold (build1 (BIT_NOT_EXPR,
4759 warning ("comparison is always %d", wanted_code == NE_EXPR);
4761 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4765 /* If the right sides are not constant, do the same for it. Also,
4766 disallow this optimization if a size or signedness mismatch occurs
4767 between the left and right sides. */
4770 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4771 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4772 /* Make sure the two fields on the right
4773 correspond to the left without being swapped. */
4774 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4777 first_bit = MIN (lr_bitpos, rr_bitpos);
4778 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4779 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4780 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4782 if (rnmode == VOIDmode)
4785 rnbitsize = GET_MODE_BITSIZE (rnmode);
4786 rnbitpos = first_bit & ~ (rnbitsize - 1);
4787 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4788 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4790 if (BYTES_BIG_ENDIAN)
4792 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4793 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4796 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4797 size_int (xlr_bitpos), 0);
4798 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4799 size_int (xrr_bitpos), 0);
4801 /* Make a mask that corresponds to both fields being compared.
4802 Do this for both items being compared. If the operands are the
4803 same size and the bits being compared are in the same position
4804 then we can do this by masking both and comparing the masked
4806 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4807 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4808 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4810 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4811 ll_unsignedp || rl_unsignedp);
4812 if (! all_ones_mask_p (ll_mask, lnbitsize))
4813 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4815 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4816 lr_unsignedp || rr_unsignedp);
4817 if (! all_ones_mask_p (lr_mask, rnbitsize))
4818 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4820 return build2 (wanted_code, truth_type, lhs, rhs);
4823 /* There is still another way we can do something: If both pairs of
4824 fields being compared are adjacent, we may be able to make a wider
4825 field containing them both.
4827 Note that we still must mask the lhs/rhs expressions. Furthermore,
4828 the mask must be shifted to account for the shift done by
4829 make_bit_field_ref. */
4830 if ((ll_bitsize + ll_bitpos == rl_bitpos
4831 && lr_bitsize + lr_bitpos == rr_bitpos)
4832 || (ll_bitpos == rl_bitpos + rl_bitsize
4833 && lr_bitpos == rr_bitpos + rr_bitsize))
4837 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4838 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4839 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4840 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4842 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4843 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4844 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4845 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4847 /* Convert to the smaller type before masking out unwanted bits. */
4849 if (lntype != rntype)
4851 if (lnbitsize > rnbitsize)
4853 lhs = fold_convert (rntype, lhs);
4854 ll_mask = fold_convert (rntype, ll_mask);
4857 else if (lnbitsize < rnbitsize)
4859 rhs = fold_convert (lntype, rhs);
4860 lr_mask = fold_convert (lntype, lr_mask);
4865 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4866 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4868 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4869 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4871 return build2 (wanted_code, truth_type, lhs, rhs);
4877 /* Handle the case of comparisons with constants. If there is something in
4878 common between the masks, those bits of the constants must be the same.
4879 If not, the condition is always false. Test for this to avoid generating
4880 incorrect code below. */
4881 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4882 if (! integer_zerop (result)
4883 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4884 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4886 if (wanted_code == NE_EXPR)
4888 warning ("`or' of unmatched not-equal tests is always 1");
4889 return constant_boolean_node (true, truth_type);
4893 warning ("`and' of mutually exclusive equal-tests is always 0");
4894 return constant_boolean_node (false, truth_type);
4898 /* Construct the expression we will return. First get the component
4899 reference we will make. Unless the mask is all ones the width of
4900 that field, perform the mask operation. Then compare with the
4902 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4903 ll_unsignedp || rl_unsignedp);
4905 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4906 if (! all_ones_mask_p (ll_mask, lnbitsize))
4907 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4909 return build2 (wanted_code, truth_type, result,
4910 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4913 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4917 optimize_minmax_comparison (tree t)
4919 tree type = TREE_TYPE (t);
4920 tree arg0 = TREE_OPERAND (t, 0);
4921 enum tree_code op_code;
4922 tree comp_const = TREE_OPERAND (t, 1);
4924 int consts_equal, consts_lt;
4927 STRIP_SIGN_NOPS (arg0);
4929 op_code = TREE_CODE (arg0);
4930 minmax_const = TREE_OPERAND (arg0, 1);
4931 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4932 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4933 inner = TREE_OPERAND (arg0, 0);
4935 /* If something does not permit us to optimize, return the original tree. */
4936 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4937 || TREE_CODE (comp_const) != INTEGER_CST
4938 || TREE_CONSTANT_OVERFLOW (comp_const)
4939 || TREE_CODE (minmax_const) != INTEGER_CST
4940 || TREE_CONSTANT_OVERFLOW (minmax_const))
4943 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4944 and GT_EXPR, doing the rest with recursive calls using logical
4946 switch (TREE_CODE (t))
4948 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4950 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4954 fold (build2 (TRUTH_ORIF_EXPR, type,
4955 optimize_minmax_comparison
4956 (build2 (EQ_EXPR, type, arg0, comp_const)),
4957 optimize_minmax_comparison
4958 (build2 (GT_EXPR, type, arg0, comp_const))));
4961 if (op_code == MAX_EXPR && consts_equal)
4962 /* MAX (X, 0) == 0 -> X <= 0 */
4963 return fold (build2 (LE_EXPR, type, inner, comp_const));
4965 else if (op_code == MAX_EXPR && consts_lt)
4966 /* MAX (X, 0) == 5 -> X == 5 */
4967 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4969 else if (op_code == MAX_EXPR)
4970 /* MAX (X, 0) == -1 -> false */
4971 return omit_one_operand (type, integer_zero_node, inner);
4973 else if (consts_equal)
4974 /* MIN (X, 0) == 0 -> X >= 0 */
4975 return fold (build2 (GE_EXPR, type, inner, comp_const));
4978 /* MIN (X, 0) == 5 -> false */
4979 return omit_one_operand (type, integer_zero_node, inner);
4982 /* MIN (X, 0) == -1 -> X == -1 */
4983 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4986 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4987 /* MAX (X, 0) > 0 -> X > 0
4988 MAX (X, 0) > 5 -> X > 5 */
4989 return fold (build2 (GT_EXPR, type, inner, comp_const));
4991 else if (op_code == MAX_EXPR)
4992 /* MAX (X, 0) > -1 -> true */
4993 return omit_one_operand (type, integer_one_node, inner);
4995 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4996 /* MIN (X, 0) > 0 -> false
4997 MIN (X, 0) > 5 -> false */
4998 return omit_one_operand (type, integer_zero_node, inner);
5001 /* MIN (X, 0) > -1 -> X > -1 */
5002 return fold (build2 (GT_EXPR, type, inner, comp_const));
5009 /* T is an integer expression that is being multiplied, divided, or taken a
5010 modulus (CODE says which and what kind of divide or modulus) by a
5011 constant C. See if we can eliminate that operation by folding it with
5012 other operations already in T. WIDE_TYPE, if non-null, is a type that
5013 should be used for the computation if wider than our type.
5015 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5016 (X * 2) + (Y * 4). We must, however, be assured that either the original
5017 expression would not overflow or that overflow is undefined for the type
5018 in the language in question.
5020 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5021 the machine has a multiply-accumulate insn or that this is part of an
5022 addressing calculation.
5024 If we return a non-null expression, it is an equivalent form of the
5025 original computation, but need not be in the original type. */
5028 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5030 /* To avoid exponential search depth, refuse to allow recursion past
5031 three levels. Beyond that (1) it's highly unlikely that we'll find
5032 something interesting and (2) we've probably processed it before
5033 when we built the inner expression. */
5042 ret = extract_muldiv_1 (t, c, code, wide_type);
5049 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5051 tree type = TREE_TYPE (t);
5052 enum tree_code tcode = TREE_CODE (t);
5053 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5054 > GET_MODE_SIZE (TYPE_MODE (type)))
5055 ? wide_type : type);
5057 int same_p = tcode == code;
5058 tree op0 = NULL_TREE, op1 = NULL_TREE;
5060 /* Don't deal with constants of zero here; they confuse the code below. */
5061 if (integer_zerop (c))
5064 if (TREE_CODE_CLASS (tcode) == '1')
5065 op0 = TREE_OPERAND (t, 0);
5067 if (TREE_CODE_CLASS (tcode) == '2')
5068 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5070 /* Note that we need not handle conditional operations here since fold
5071 already handles those cases. So just do arithmetic here. */
5075 /* For a constant, we can always simplify if we are a multiply
5076 or (for divide and modulus) if it is a multiple of our constant. */
5077 if (code == MULT_EXPR
5078 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5079 return const_binop (code, fold_convert (ctype, t),
5080 fold_convert (ctype, c), 0);
5083 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5084 /* If op0 is an expression ... */
5085 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5086 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5087 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5088 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5089 /* ... and is unsigned, and its type is smaller than ctype,
5090 then we cannot pass through as widening. */
5091 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5092 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5093 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5094 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5095 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5096 /* ... or its type is larger than ctype,
5097 then we cannot pass through this truncation. */
5098 || (GET_MODE_SIZE (TYPE_MODE (ctype))
5099 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5100 /* ... or signedness changes for division or modulus,
5101 then we cannot pass through this conversion. */
5102 || (code != MULT_EXPR
5103 && (TYPE_UNSIGNED (ctype)
5104 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5107 /* Pass the constant down and see if we can make a simplification. If
5108 we can, replace this expression with the inner simplification for
5109 possible later conversion to our or some other type. */
5110 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5111 && TREE_CODE (t2) == INTEGER_CST
5112 && ! TREE_CONSTANT_OVERFLOW (t2)
5113 && (0 != (t1 = extract_muldiv (op0, t2, code,
5115 ? ctype : NULL_TREE))))
5119 case NEGATE_EXPR: case ABS_EXPR:
5120 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5121 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5124 case MIN_EXPR: case MAX_EXPR:
5125 /* If widening the type changes the signedness, then we can't perform
5126 this optimization as that changes the result. */
5127 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5130 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5131 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5132 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5134 if (tree_int_cst_sgn (c) < 0)
5135 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5137 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5138 fold_convert (ctype, t2)));
5142 case LSHIFT_EXPR: case RSHIFT_EXPR:
5143 /* If the second operand is constant, this is a multiplication
5144 or floor division, by a power of two, so we can treat it that
5145 way unless the multiplier or divisor overflows. */
5146 if (TREE_CODE (op1) == INTEGER_CST
5147 /* const_binop may not detect overflow correctly,
5148 so check for it explicitly here. */
5149 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5150 && TREE_INT_CST_HIGH (op1) == 0
5151 && 0 != (t1 = fold_convert (ctype,
5152 const_binop (LSHIFT_EXPR,
5155 && ! TREE_OVERFLOW (t1))
5156 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5157 ? MULT_EXPR : FLOOR_DIV_EXPR,
5158 ctype, fold_convert (ctype, op0), t1),
5159 c, code, wide_type);
5162 case PLUS_EXPR: case MINUS_EXPR:
5163 /* See if we can eliminate the operation on both sides. If we can, we
5164 can return a new PLUS or MINUS. If we can't, the only remaining
5165 cases where we can do anything are if the second operand is a
5167 t1 = extract_muldiv (op0, c, code, wide_type);
5168 t2 = extract_muldiv (op1, c, code, wide_type);
5169 if (t1 != 0 && t2 != 0
5170 && (code == MULT_EXPR
5171 /* If not multiplication, we can only do this if both operands
5172 are divisible by c. */
5173 || (multiple_of_p (ctype, op0, c)
5174 && multiple_of_p (ctype, op1, c))))
5175 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5176 fold_convert (ctype, t2)));
5178 /* If this was a subtraction, negate OP1 and set it to be an addition.
5179 This simplifies the logic below. */
5180 if (tcode == MINUS_EXPR)
5181 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5183 if (TREE_CODE (op1) != INTEGER_CST)
5186 /* If either OP1 or C are negative, this optimization is not safe for
5187 some of the division and remainder types while for others we need
5188 to change the code. */
5189 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5191 if (code == CEIL_DIV_EXPR)
5192 code = FLOOR_DIV_EXPR;
5193 else if (code == FLOOR_DIV_EXPR)
5194 code = CEIL_DIV_EXPR;
5195 else if (code != MULT_EXPR
5196 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5200 /* If it's a multiply or a division/modulus operation of a multiple
5201 of our constant, do the operation and verify it doesn't overflow. */
5202 if (code == MULT_EXPR
5203 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5205 op1 = const_binop (code, fold_convert (ctype, op1),
5206 fold_convert (ctype, c), 0);
5207 /* We allow the constant to overflow with wrapping semantics. */
5209 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5215 /* If we have an unsigned type is not a sizetype, we cannot widen
5216 the operation since it will change the result if the original
5217 computation overflowed. */
5218 if (TYPE_UNSIGNED (ctype)
5219 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5223 /* If we were able to eliminate our operation from the first side,
5224 apply our operation to the second side and reform the PLUS. */
5225 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5226 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5228 /* The last case is if we are a multiply. In that case, we can
5229 apply the distributive law to commute the multiply and addition
5230 if the multiplication of the constants doesn't overflow. */
5231 if (code == MULT_EXPR)
5232 return fold (build2 (tcode, ctype,
5233 fold (build2 (code, ctype,
5234 fold_convert (ctype, op0),
5235 fold_convert (ctype, c))),
5241 /* We have a special case here if we are doing something like
5242 (C * 8) % 4 since we know that's zero. */
5243 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5244 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5245 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5246 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5247 return omit_one_operand (type, integer_zero_node, op0);
5249 /* ... fall through ... */
5251 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5252 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5253 /* If we can extract our operation from the LHS, do so and return a
5254 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5255 do something only if the second operand is a constant. */
5257 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5258 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5259 fold_convert (ctype, op1)));
5260 else if (tcode == MULT_EXPR && code == MULT_EXPR
5261 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5262 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5263 fold_convert (ctype, t1)));
5264 else if (TREE_CODE (op1) != INTEGER_CST)
5267 /* If these are the same operation types, we can associate them
5268 assuming no overflow. */
5270 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5271 fold_convert (ctype, c), 0))
5272 && ! TREE_OVERFLOW (t1))
5273 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5275 /* If these operations "cancel" each other, we have the main
5276 optimizations of this pass, which occur when either constant is a
5277 multiple of the other, in which case we replace this with either an
5278 operation or CODE or TCODE.
5280 If we have an unsigned type that is not a sizetype, we cannot do
5281 this since it will change the result if the original computation
5283 if ((! TYPE_UNSIGNED (ctype)
5284 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5286 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5287 || (tcode == MULT_EXPR
5288 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5289 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5291 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5292 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5293 fold_convert (ctype,
5294 const_binop (TRUNC_DIV_EXPR,
5296 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5297 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5298 fold_convert (ctype,
5299 const_binop (TRUNC_DIV_EXPR,
5311 /* Return a node which has the indicated constant VALUE (either 0 or
5312 1), and is of the indicated TYPE. */
5315 constant_boolean_node (int value, tree type)
5317 if (type == integer_type_node)
5318 return value ? integer_one_node : integer_zero_node;
5319 else if (type == boolean_type_node)
5320 return value ? boolean_true_node : boolean_false_node;
5321 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5322 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5323 : integer_zero_node);
5326 tree t = build_int_2 (value, 0);
5328 TREE_TYPE (t) = type;
5333 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5334 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5335 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5336 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5337 COND is the first argument to CODE; otherwise (as in the example
5338 given here), it is the second argument. TYPE is the type of the
5339 original expression. Return NULL_TREE if no simplification is
5343 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5344 tree cond, tree arg, int cond_first_p)
5346 tree test, true_value, false_value;
5347 tree lhs = NULL_TREE;
5348 tree rhs = NULL_TREE;
5350 /* This transformation is only worthwhile if we don't have to wrap
5351 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5352 one of the branches once its pushed inside the COND_EXPR. */
5353 if (!TREE_CONSTANT (arg))
5356 if (TREE_CODE (cond) == COND_EXPR)
5358 test = TREE_OPERAND (cond, 0);
5359 true_value = TREE_OPERAND (cond, 1);
5360 false_value = TREE_OPERAND (cond, 2);
5361 /* If this operand throws an expression, then it does not make
5362 sense to try to perform a logical or arithmetic operation
5364 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5366 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5371 tree testtype = TREE_TYPE (cond);
5373 true_value = constant_boolean_node (true, testtype);
5374 false_value = constant_boolean_node (false, testtype);
5378 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5379 : build2 (code, type, arg, true_value));
5381 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5382 : build2 (code, type, arg, false_value));
5384 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5385 return fold_convert (type, test);
5389 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5391 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5392 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5393 ADDEND is the same as X.
5395 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5396 and finite. The problematic cases are when X is zero, and its mode
5397 has signed zeros. In the case of rounding towards -infinity,
5398 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5399 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5402 fold_real_zero_addition_p (tree type, tree addend, int negate)
5404 if (!real_zerop (addend))
5407 /* Don't allow the fold with -fsignaling-nans. */
5408 if (HONOR_SNANS (TYPE_MODE (type)))
5411 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5412 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5415 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5416 if (TREE_CODE (addend) == REAL_CST
5417 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5420 /* The mode has signed zeros, and we have to honor their sign.
5421 In this situation, there is only one case we can return true for.
5422 X - 0 is the same as X unless rounding towards -infinity is
5424 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5427 /* Subroutine of fold() that checks comparisons of built-in math
5428 functions against real constants.
5430 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5431 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5432 is the type of the result and ARG0 and ARG1 are the operands of the
5433 comparison. ARG1 must be a TREE_REAL_CST.
5435 The function returns the constant folded tree if a simplification
5436 can be made, and NULL_TREE otherwise. */
5439 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5440 tree type, tree arg0, tree arg1)
5444 if (BUILTIN_SQRT_P (fcode))
5446 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5447 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5449 c = TREE_REAL_CST (arg1);
5450 if (REAL_VALUE_NEGATIVE (c))
5452 /* sqrt(x) < y is always false, if y is negative. */
5453 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5454 return omit_one_operand (type, integer_zero_node, arg);
5456 /* sqrt(x) > y is always true, if y is negative and we
5457 don't care about NaNs, i.e. negative values of x. */
5458 if (code == NE_EXPR || !HONOR_NANS (mode))
5459 return omit_one_operand (type, integer_one_node, arg);
5461 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5462 return fold (build2 (GE_EXPR, type, arg,
5463 build_real (TREE_TYPE (arg), dconst0)));
5465 else if (code == GT_EXPR || code == GE_EXPR)
5469 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5470 real_convert (&c2, mode, &c2);
5472 if (REAL_VALUE_ISINF (c2))
5474 /* sqrt(x) > y is x == +Inf, when y is very large. */
5475 if (HONOR_INFINITIES (mode))
5476 return fold (build2 (EQ_EXPR, type, arg,
5477 build_real (TREE_TYPE (arg), c2)));
5479 /* sqrt(x) > y is always false, when y is very large
5480 and we don't care about infinities. */
5481 return omit_one_operand (type, integer_zero_node, arg);
5484 /* sqrt(x) > c is the same as x > c*c. */
5485 return fold (build2 (code, type, arg,
5486 build_real (TREE_TYPE (arg), c2)));
5488 else if (code == LT_EXPR || code == LE_EXPR)
5492 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5493 real_convert (&c2, mode, &c2);
5495 if (REAL_VALUE_ISINF (c2))
5497 /* sqrt(x) < y is always true, when y is a very large
5498 value and we don't care about NaNs or Infinities. */
5499 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5500 return omit_one_operand (type, integer_one_node, arg);
5502 /* sqrt(x) < y is x != +Inf when y is very large and we
5503 don't care about NaNs. */
5504 if (! HONOR_NANS (mode))
5505 return fold (build2 (NE_EXPR, type, arg,
5506 build_real (TREE_TYPE (arg), c2)));
5508 /* sqrt(x) < y is x >= 0 when y is very large and we
5509 don't care about Infinities. */
5510 if (! HONOR_INFINITIES (mode))
5511 return fold (build2 (GE_EXPR, type, arg,
5512 build_real (TREE_TYPE (arg), dconst0)));
5514 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5515 if (lang_hooks.decls.global_bindings_p () != 0
5516 || CONTAINS_PLACEHOLDER_P (arg))
5519 arg = save_expr (arg);
5520 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5521 fold (build2 (GE_EXPR, type, arg,
5522 build_real (TREE_TYPE (arg),
5524 fold (build2 (NE_EXPR, type, arg,
5525 build_real (TREE_TYPE (arg),
5529 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5530 if (! HONOR_NANS (mode))
5531 return fold (build2 (code, type, arg,
5532 build_real (TREE_TYPE (arg), c2)));
5534 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5535 if (lang_hooks.decls.global_bindings_p () == 0
5536 && ! CONTAINS_PLACEHOLDER_P (arg))
5538 arg = save_expr (arg);
5539 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5540 fold (build2 (GE_EXPR, type, arg,
5541 build_real (TREE_TYPE (arg),
5543 fold (build2 (code, type, arg,
5544 build_real (TREE_TYPE (arg),
5553 /* Subroutine of fold() that optimizes comparisons against Infinities,
5554 either +Inf or -Inf.
5556 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5557 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5558 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5560 The function returns the constant folded tree if a simplification
5561 can be made, and NULL_TREE otherwise. */
5564 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5566 enum machine_mode mode;
5567 REAL_VALUE_TYPE max;
5571 mode = TYPE_MODE (TREE_TYPE (arg0));
5573 /* For negative infinity swap the sense of the comparison. */
5574 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5576 code = swap_tree_comparison (code);
5581 /* x > +Inf is always false, if with ignore sNANs. */
5582 if (HONOR_SNANS (mode))
5584 return omit_one_operand (type, integer_zero_node, arg0);
5587 /* x <= +Inf is always true, if we don't case about NaNs. */
5588 if (! HONOR_NANS (mode))
5589 return omit_one_operand (type, integer_one_node, arg0);
5591 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5592 if (lang_hooks.decls.global_bindings_p () == 0
5593 && ! CONTAINS_PLACEHOLDER_P (arg0))
5595 arg0 = save_expr (arg0);
5596 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5602 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5603 real_maxval (&max, neg, mode);
5604 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5605 arg0, build_real (TREE_TYPE (arg0), max)));
5608 /* x < +Inf is always equal to x <= DBL_MAX. */
5609 real_maxval (&max, neg, mode);
5610 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5611 arg0, build_real (TREE_TYPE (arg0), max)));
5614 /* x != +Inf is always equal to !(x > DBL_MAX). */
5615 real_maxval (&max, neg, mode);
5616 if (! HONOR_NANS (mode))
5617 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5618 arg0, build_real (TREE_TYPE (arg0), max)));
5620 /* The transformation below creates non-gimple code and thus is
5621 not appropriate if we are in gimple form. */
5625 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5626 arg0, build_real (TREE_TYPE (arg0), max)));
5627 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5636 /* Subroutine of fold() that optimizes comparisons of a division by
5637 a nonzero integer constant against an integer constant, i.e.
5640 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5641 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5642 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5644 The function returns the constant folded tree if a simplification
5645 can be made, and NULL_TREE otherwise. */
5648 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5650 tree prod, tmp, hi, lo;
5651 tree arg00 = TREE_OPERAND (arg0, 0);
5652 tree arg01 = TREE_OPERAND (arg0, 1);
5653 unsigned HOST_WIDE_INT lpart;
5654 HOST_WIDE_INT hpart;
5657 /* We have to do this the hard way to detect unsigned overflow.
5658 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5659 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5660 TREE_INT_CST_HIGH (arg01),
5661 TREE_INT_CST_LOW (arg1),
5662 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5663 prod = build_int_2 (lpart, hpart);
5664 TREE_TYPE (prod) = TREE_TYPE (arg00);
5665 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5666 || TREE_INT_CST_HIGH (prod) != hpart
5667 || TREE_INT_CST_LOW (prod) != lpart;
5668 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5670 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5672 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5675 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5676 overflow = add_double (TREE_INT_CST_LOW (prod),
5677 TREE_INT_CST_HIGH (prod),
5678 TREE_INT_CST_LOW (tmp),
5679 TREE_INT_CST_HIGH (tmp),
5681 hi = build_int_2 (lpart, hpart);
5682 TREE_TYPE (hi) = TREE_TYPE (arg00);
5683 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5684 || TREE_INT_CST_HIGH (hi) != hpart
5685 || TREE_INT_CST_LOW (hi) != lpart
5686 || TREE_OVERFLOW (prod);
5687 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5689 else if (tree_int_cst_sgn (arg01) >= 0)
5691 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5692 switch (tree_int_cst_sgn (arg1))
5695 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5700 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5705 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5715 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5716 switch (tree_int_cst_sgn (arg1))
5719 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5724 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5729 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5741 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5742 return omit_one_operand (type, integer_zero_node, arg00);
5743 if (TREE_OVERFLOW (hi))
5744 return fold (build2 (GE_EXPR, type, arg00, lo));
5745 if (TREE_OVERFLOW (lo))
5746 return fold (build2 (LE_EXPR, type, arg00, hi));
5747 return build_range_check (type, arg00, 1, lo, hi);
5750 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5751 return omit_one_operand (type, integer_one_node, arg00);
5752 if (TREE_OVERFLOW (hi))
5753 return fold (build2 (LT_EXPR, type, arg00, lo));
5754 if (TREE_OVERFLOW (lo))
5755 return fold (build2 (GT_EXPR, type, arg00, hi));
5756 return build_range_check (type, arg00, 0, lo, hi);
5759 if (TREE_OVERFLOW (lo))
5760 return omit_one_operand (type, integer_zero_node, arg00);
5761 return fold (build2 (LT_EXPR, type, arg00, lo));
5764 if (TREE_OVERFLOW (hi))
5765 return omit_one_operand (type, integer_one_node, arg00);
5766 return fold (build2 (LE_EXPR, type, arg00, hi));
5769 if (TREE_OVERFLOW (hi))
5770 return omit_one_operand (type, integer_zero_node, arg00);
5771 return fold (build2 (GT_EXPR, type, arg00, hi));
5774 if (TREE_OVERFLOW (lo))
5775 return omit_one_operand (type, integer_one_node, arg00);
5776 return fold (build2 (GE_EXPR, type, arg00, lo));
5786 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5787 equality/inequality test, then return a simplified form of
5788 the test using shifts and logical operations. Otherwise return
5789 NULL. TYPE is the desired result type. */
5792 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5795 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5797 if (code == TRUTH_NOT_EXPR)
5799 code = TREE_CODE (arg0);
5800 if (code != NE_EXPR && code != EQ_EXPR)
5803 /* Extract the arguments of the EQ/NE. */
5804 arg1 = TREE_OPERAND (arg0, 1);
5805 arg0 = TREE_OPERAND (arg0, 0);
5807 /* This requires us to invert the code. */
5808 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5811 /* If this is testing a single bit, we can optimize the test. */
5812 if ((code == NE_EXPR || code == EQ_EXPR)
5813 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5814 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5816 tree inner = TREE_OPERAND (arg0, 0);
5817 tree type = TREE_TYPE (arg0);
5818 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5819 enum machine_mode operand_mode = TYPE_MODE (type);
5821 tree signed_type, unsigned_type, intermediate_type;
5824 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5825 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5826 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5827 if (arg00 != NULL_TREE
5828 /* This is only a win if casting to a signed type is cheap,
5829 i.e. when arg00's type is not a partial mode. */
5830 && TYPE_PRECISION (TREE_TYPE (arg00))
5831 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5833 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5834 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5835 result_type, fold_convert (stype, arg00),
5836 fold_convert (stype, integer_zero_node)));
5839 /* Otherwise we have (A & C) != 0 where C is a single bit,
5840 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5841 Similarly for (A & C) == 0. */
5843 /* If INNER is a right shift of a constant and it plus BITNUM does
5844 not overflow, adjust BITNUM and INNER. */
5845 if (TREE_CODE (inner) == RSHIFT_EXPR
5846 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5847 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5848 && bitnum < TYPE_PRECISION (type)
5849 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5850 bitnum - TYPE_PRECISION (type)))
5852 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5853 inner = TREE_OPERAND (inner, 0);
5856 /* If we are going to be able to omit the AND below, we must do our
5857 operations as unsigned. If we must use the AND, we have a choice.
5858 Normally unsigned is faster, but for some machines signed is. */
5859 #ifdef LOAD_EXTEND_OP
5860 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5865 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5866 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5867 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5868 inner = fold_convert (intermediate_type, inner);
5871 inner = build2 (RSHIFT_EXPR, intermediate_type,
5872 inner, size_int (bitnum));
5874 if (code == EQ_EXPR)
5875 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5876 inner, integer_one_node));
5878 /* Put the AND last so it can combine with more things. */
5879 inner = build2 (BIT_AND_EXPR, intermediate_type,
5880 inner, integer_one_node);
5882 /* Make sure to return the proper type. */
5883 inner = fold_convert (result_type, inner);
5890 /* Check whether we are allowed to reorder operands arg0 and arg1,
5891 such that the evaluation of arg1 occurs before arg0. */
5894 reorder_operands_p (tree arg0, tree arg1)
5896 if (! flag_evaluation_order)
5898 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5900 return ! TREE_SIDE_EFFECTS (arg0)
5901 && ! TREE_SIDE_EFFECTS (arg1);
5904 /* Test whether it is preferable two swap two operands, ARG0 and
5905 ARG1, for example because ARG0 is an integer constant and ARG1
5906 isn't. If REORDER is true, only recommend swapping if we can
5907 evaluate the operands in reverse order. */
5910 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5912 STRIP_SIGN_NOPS (arg0);
5913 STRIP_SIGN_NOPS (arg1);
5915 if (TREE_CODE (arg1) == INTEGER_CST)
5917 if (TREE_CODE (arg0) == INTEGER_CST)
5920 if (TREE_CODE (arg1) == REAL_CST)
5922 if (TREE_CODE (arg0) == REAL_CST)
5925 if (TREE_CODE (arg1) == COMPLEX_CST)
5927 if (TREE_CODE (arg0) == COMPLEX_CST)
5930 if (TREE_CONSTANT (arg1))
5932 if (TREE_CONSTANT (arg0))
5938 if (reorder && flag_evaluation_order
5939 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5947 if (reorder && flag_evaluation_order
5948 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5956 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5957 for commutative and comparison operators. Ensuring a canonical
5958 form allows the optimizers to find additional redundancies without
5959 having to explicitly check for both orderings. */
5960 if (TREE_CODE (arg0) == SSA_NAME
5961 && TREE_CODE (arg1) == SSA_NAME
5962 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5968 /* Perform constant folding and related simplification of EXPR.
5969 The related simplifications include x*1 => x, x*0 => 0, etc.,
5970 and application of the associative law.
5971 NOP_EXPR conversions may be removed freely (as long as we
5972 are careful not to change the type of the overall expression).
5973 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5974 but we can constant-fold them if they have constant operands. */
5976 #ifdef ENABLE_FOLD_CHECKING
5977 # define fold(x) fold_1 (x)
5978 static tree fold_1 (tree);
5984 const tree t = expr;
5985 const tree type = TREE_TYPE (expr);
5986 tree t1 = NULL_TREE;
5988 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5989 enum tree_code code = TREE_CODE (t);
5990 int kind = TREE_CODE_CLASS (code);
5992 /* WINS will be nonzero when the switch is done
5993 if all operands are constant. */
5996 /* Return right away if a constant. */
6000 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6004 /* Special case for conversion ops that can have fixed point args. */
6005 arg0 = TREE_OPERAND (t, 0);
6007 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6009 STRIP_SIGN_NOPS (arg0);
6011 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
6012 subop = TREE_REALPART (arg0);
6016 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
6017 && TREE_CODE (subop) != REAL_CST)
6018 /* Note that TREE_CONSTANT isn't enough:
6019 static var addresses are constant but we can't
6020 do arithmetic on them. */
6023 else if (IS_EXPR_CODE_CLASS (kind))
6025 int len = first_rtl_op (code);
6027 for (i = 0; i < len; i++)
6029 tree op = TREE_OPERAND (t, i);
6033 continue; /* Valid for CALL_EXPR, at least. */
6035 /* Strip any conversions that don't change the mode. This is
6036 safe for every expression, except for a comparison expression
6037 because its signedness is derived from its operands. So, in
6038 the latter case, only strip conversions that don't change the
6041 Note that this is done as an internal manipulation within the
6042 constant folder, in order to find the simplest representation
6043 of the arguments so that their form can be studied. In any
6044 cases, the appropriate type conversions should be put back in
6045 the tree that will get out of the constant folder. */
6047 STRIP_SIGN_NOPS (op);
6051 if (TREE_CODE (op) == COMPLEX_CST)
6052 subop = TREE_REALPART (op);
6056 if (TREE_CODE (subop) != INTEGER_CST
6057 && TREE_CODE (subop) != REAL_CST)
6058 /* Note that TREE_CONSTANT isn't enough:
6059 static var addresses are constant but we can't
6060 do arithmetic on them. */
6070 /* If this is a commutative operation, and ARG0 is a constant, move it
6071 to ARG1 to reduce the number of tests below. */
6072 if (commutative_tree_code (code)
6073 && tree_swap_operands_p (arg0, arg1, true))
6074 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6075 TREE_OPERAND (t, 0)));
6077 /* Now WINS is set as described above,
6078 ARG0 is the first operand of EXPR,
6079 and ARG1 is the second operand (if it has more than one operand).
6081 First check for cases where an arithmetic operation is applied to a
6082 compound, conditional, or comparison operation. Push the arithmetic
6083 operation inside the compound or conditional to see if any folding
6084 can then be done. Convert comparison to conditional for this purpose.
6085 The also optimizes non-constant cases that used to be done in
6088 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6089 one of the operands is a comparison and the other is a comparison, a
6090 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6091 code below would make the expression more complex. Change it to a
6092 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6093 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6095 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6096 || code == EQ_EXPR || code == NE_EXPR)
6097 && ((truth_value_p (TREE_CODE (arg0))
6098 && (truth_value_p (TREE_CODE (arg1))
6099 || (TREE_CODE (arg1) == BIT_AND_EXPR
6100 && integer_onep (TREE_OPERAND (arg1, 1)))))
6101 || (truth_value_p (TREE_CODE (arg1))
6102 && (truth_value_p (TREE_CODE (arg0))
6103 || (TREE_CODE (arg0) == BIT_AND_EXPR
6104 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6106 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6107 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6109 type, fold_convert (boolean_type_node, arg0),
6110 fold_convert (boolean_type_node, arg1)));
6112 if (code == EQ_EXPR)
6113 tem = invert_truthvalue (tem);
6118 if (TREE_CODE_CLASS (code) == '1')
6120 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6121 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6122 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6123 else if (TREE_CODE (arg0) == COND_EXPR)
6125 tree arg01 = TREE_OPERAND (arg0, 1);
6126 tree arg02 = TREE_OPERAND (arg0, 2);
6127 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6128 arg01 = fold (build1 (code, type, arg01));
6129 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6130 arg02 = fold (build1 (code, type, arg02));
6131 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6134 /* If this was a conversion, and all we did was to move into
6135 inside the COND_EXPR, bring it back out. But leave it if
6136 it is a conversion from integer to integer and the
6137 result precision is no wider than a word since such a
6138 conversion is cheap and may be optimized away by combine,
6139 while it couldn't if it were outside the COND_EXPR. Then return
6140 so we don't get into an infinite recursion loop taking the
6141 conversion out and then back in. */
6143 if ((code == NOP_EXPR || code == CONVERT_EXPR
6144 || code == NON_LVALUE_EXPR)
6145 && TREE_CODE (tem) == COND_EXPR
6146 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6147 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6148 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6149 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6150 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6151 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6152 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6154 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6155 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6156 tem = build1 (code, type,
6158 TREE_TYPE (TREE_OPERAND
6159 (TREE_OPERAND (tem, 1), 0)),
6160 TREE_OPERAND (tem, 0),
6161 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6162 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6165 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6167 if (TREE_CODE (type) == BOOLEAN_TYPE)
6169 arg0 = copy_node (arg0);
6170 TREE_TYPE (arg0) = type;
6173 else if (TREE_CODE (type) != INTEGER_TYPE)
6174 return fold (build3 (COND_EXPR, type, arg0,
6175 fold (build1 (code, type,
6177 fold (build1 (code, type,
6178 integer_zero_node))));
6181 else if (TREE_CODE_CLASS (code) == '<'
6182 && TREE_CODE (arg0) == COMPOUND_EXPR)
6183 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6184 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6185 else if (TREE_CODE_CLASS (code) == '<'
6186 && TREE_CODE (arg1) == COMPOUND_EXPR)
6187 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6188 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6189 else if (TREE_CODE_CLASS (code) == '2'
6190 || TREE_CODE_CLASS (code) == '<')
6192 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6193 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6194 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6196 if (TREE_CODE (arg1) == COMPOUND_EXPR
6197 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6198 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6199 fold (build2 (code, type,
6200 arg0, TREE_OPERAND (arg1, 1))));
6202 if (TREE_CODE (arg0) == COND_EXPR
6203 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6205 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6206 /*cond_first_p=*/1);
6207 if (tem != NULL_TREE)
6211 if (TREE_CODE (arg1) == COND_EXPR
6212 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6214 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6215 /*cond_first_p=*/0);
6216 if (tem != NULL_TREE)
6224 return fold (DECL_INITIAL (t));
6229 case FIX_TRUNC_EXPR:
6231 case FIX_FLOOR_EXPR:
6232 case FIX_ROUND_EXPR:
6233 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6234 return TREE_OPERAND (t, 0);
6236 /* Handle cases of two conversions in a row. */
6237 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6238 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6240 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6241 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6242 int inside_int = INTEGRAL_TYPE_P (inside_type);
6243 int inside_ptr = POINTER_TYPE_P (inside_type);
6244 int inside_float = FLOAT_TYPE_P (inside_type);
6245 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6246 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6247 int inter_int = INTEGRAL_TYPE_P (inter_type);
6248 int inter_ptr = POINTER_TYPE_P (inter_type);
6249 int inter_float = FLOAT_TYPE_P (inter_type);
6250 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6251 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6252 int final_int = INTEGRAL_TYPE_P (type);
6253 int final_ptr = POINTER_TYPE_P (type);
6254 int final_float = FLOAT_TYPE_P (type);
6255 unsigned int final_prec = TYPE_PRECISION (type);
6256 int final_unsignedp = TYPE_UNSIGNED (type);
6258 /* In addition to the cases of two conversions in a row
6259 handled below, if we are converting something to its own
6260 type via an object of identical or wider precision, neither
6261 conversion is needed. */
6262 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6263 && ((inter_int && final_int) || (inter_float && final_float))
6264 && inter_prec >= final_prec)
6265 return fold (build1 (code, type,
6266 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6268 /* Likewise, if the intermediate and final types are either both
6269 float or both integer, we don't need the middle conversion if
6270 it is wider than the final type and doesn't change the signedness
6271 (for integers). Avoid this if the final type is a pointer
6272 since then we sometimes need the inner conversion. Likewise if
6273 the outer has a precision not equal to the size of its mode. */
6274 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6275 || (inter_float && inside_float))
6276 && inter_prec >= inside_prec
6277 && (inter_float || inter_unsignedp == inside_unsignedp)
6278 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6279 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6281 return fold (build1 (code, type,
6282 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6284 /* If we have a sign-extension of a zero-extended value, we can
6285 replace that by a single zero-extension. */
6286 if (inside_int && inter_int && final_int
6287 && inside_prec < inter_prec && inter_prec < final_prec
6288 && inside_unsignedp && !inter_unsignedp)
6289 return fold (build1 (code, type,
6290 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6292 /* Two conversions in a row are not needed unless:
6293 - some conversion is floating-point (overstrict for now), or
6294 - the intermediate type is narrower than both initial and
6296 - the intermediate type and innermost type differ in signedness,
6297 and the outermost type is wider than the intermediate, or
6298 - the initial type is a pointer type and the precisions of the
6299 intermediate and final types differ, or
6300 - the final type is a pointer type and the precisions of the
6301 initial and intermediate types differ. */
6302 if (! inside_float && ! inter_float && ! final_float
6303 && (inter_prec > inside_prec || inter_prec > final_prec)
6304 && ! (inside_int && inter_int
6305 && inter_unsignedp != inside_unsignedp
6306 && inter_prec < final_prec)
6307 && ((inter_unsignedp && inter_prec > inside_prec)
6308 == (final_unsignedp && final_prec > inter_prec))
6309 && ! (inside_ptr && inter_prec != final_prec)
6310 && ! (final_ptr && inside_prec != inter_prec)
6311 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6312 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6314 return fold (build1 (code, type,
6315 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6318 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6319 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6320 /* Detect assigning a bitfield. */
6321 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6322 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6324 /* Don't leave an assignment inside a conversion
6325 unless assigning a bitfield. */
6326 tree prev = TREE_OPERAND (t, 0);
6327 tem = copy_node (t);
6328 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6329 /* First do the assignment, then return converted constant. */
6330 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6331 TREE_NO_WARNING (tem) = 1;
6332 TREE_USED (tem) = 1;
6336 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6337 constants (if x has signed type, the sign bit cannot be set
6338 in c). This folds extension into the BIT_AND_EXPR. */
6339 if (INTEGRAL_TYPE_P (type)
6340 && TREE_CODE (type) != BOOLEAN_TYPE
6341 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6342 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6344 tree and = TREE_OPERAND (t, 0);
6345 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6348 if (TYPE_UNSIGNED (TREE_TYPE (and))
6349 || (TYPE_PRECISION (type)
6350 <= TYPE_PRECISION (TREE_TYPE (and))))
6352 else if (TYPE_PRECISION (TREE_TYPE (and1))
6353 <= HOST_BITS_PER_WIDE_INT
6354 && host_integerp (and1, 1))
6356 unsigned HOST_WIDE_INT cst;
6358 cst = tree_low_cst (and1, 1);
6359 cst &= (HOST_WIDE_INT) -1
6360 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6361 change = (cst == 0);
6362 #ifdef LOAD_EXTEND_OP
6364 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6367 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6368 and0 = fold_convert (uns, and0);
6369 and1 = fold_convert (uns, and1);
6374 return fold (build2 (BIT_AND_EXPR, type,
6375 fold_convert (type, and0),
6376 fold_convert (type, and1)));
6379 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6380 T2 being pointers to types of the same size. */
6381 if (POINTER_TYPE_P (TREE_TYPE (t))
6382 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6383 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6384 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6386 tree arg00 = TREE_OPERAND (arg0, 0);
6387 tree t0 = TREE_TYPE (t);
6388 tree t1 = TREE_TYPE (arg00);
6389 tree tt0 = TREE_TYPE (t0);
6390 tree tt1 = TREE_TYPE (t1);
6391 tree s0 = TYPE_SIZE (tt0);
6392 tree s1 = TYPE_SIZE (tt1);
6394 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6395 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6396 TREE_OPERAND (arg0, 1));
6399 tem = fold_convert_const (code, type, arg0);
6400 return tem ? tem : t;
6402 case VIEW_CONVERT_EXPR:
6403 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6404 return build1 (VIEW_CONVERT_EXPR, type,
6405 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6409 if (TREE_CODE (arg0) == CONSTRUCTOR
6410 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6412 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6414 return TREE_VALUE (m);
6419 if (TREE_CONSTANT (t) != wins)
6421 tem = copy_node (t);
6422 TREE_CONSTANT (tem) = wins;
6423 TREE_INVARIANT (tem) = wins;
6429 if (negate_expr_p (arg0))
6430 return fold_convert (type, negate_expr (arg0));
6434 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6435 return fold_abs_const (arg0, type);
6436 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6437 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6438 /* Convert fabs((double)float) into (double)fabsf(float). */
6439 else if (TREE_CODE (arg0) == NOP_EXPR
6440 && TREE_CODE (type) == REAL_TYPE)
6442 tree targ0 = strip_float_extensions (arg0);
6444 return fold_convert (type, fold (build1 (ABS_EXPR,
6448 else if (tree_expr_nonnegative_p (arg0))
6453 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6454 return fold_convert (type, arg0);
6455 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6456 return build2 (COMPLEX_EXPR, type,
6457 TREE_OPERAND (arg0, 0),
6458 negate_expr (TREE_OPERAND (arg0, 1)));
6459 else if (TREE_CODE (arg0) == COMPLEX_CST)
6460 return build_complex (type, TREE_REALPART (arg0),
6461 negate_expr (TREE_IMAGPART (arg0)));
6462 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6463 return fold (build2 (TREE_CODE (arg0), type,
6464 fold (build1 (CONJ_EXPR, type,
6465 TREE_OPERAND (arg0, 0))),
6466 fold (build1 (CONJ_EXPR, type,
6467 TREE_OPERAND (arg0, 1)))));
6468 else if (TREE_CODE (arg0) == CONJ_EXPR)
6469 return TREE_OPERAND (arg0, 0);
6473 if (TREE_CODE (arg0) == INTEGER_CST)
6474 return fold_not_const (arg0, type);
6475 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6476 return TREE_OPERAND (arg0, 0);
6480 /* A + (-B) -> A - B */
6481 if (TREE_CODE (arg1) == NEGATE_EXPR)
6482 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6483 /* (-A) + B -> B - A */
6484 if (TREE_CODE (arg0) == NEGATE_EXPR
6485 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6486 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6487 if (! FLOAT_TYPE_P (type))
6489 if (integer_zerop (arg1))
6490 return non_lvalue (fold_convert (type, arg0));
6492 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6493 with a constant, and the two constants have no bits in common,
6494 we should treat this as a BIT_IOR_EXPR since this may produce more
6496 if (TREE_CODE (arg0) == BIT_AND_EXPR
6497 && TREE_CODE (arg1) == BIT_AND_EXPR
6498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6499 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6500 && integer_zerop (const_binop (BIT_AND_EXPR,
6501 TREE_OPERAND (arg0, 1),
6502 TREE_OPERAND (arg1, 1), 0)))
6504 code = BIT_IOR_EXPR;
6508 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6509 (plus (plus (mult) (mult)) (foo)) so that we can
6510 take advantage of the factoring cases below. */
6511 if ((TREE_CODE (arg0) == PLUS_EXPR
6512 && TREE_CODE (arg1) == MULT_EXPR)
6513 || (TREE_CODE (arg1) == PLUS_EXPR
6514 && TREE_CODE (arg0) == MULT_EXPR))
6516 tree parg0, parg1, parg, marg;
6518 if (TREE_CODE (arg0) == PLUS_EXPR)
6519 parg = arg0, marg = arg1;
6521 parg = arg1, marg = arg0;
6522 parg0 = TREE_OPERAND (parg, 0);
6523 parg1 = TREE_OPERAND (parg, 1);
6527 if (TREE_CODE (parg0) == MULT_EXPR
6528 && TREE_CODE (parg1) != MULT_EXPR)
6529 return fold (build2 (PLUS_EXPR, type,
6530 fold (build2 (PLUS_EXPR, type,
6531 fold_convert (type, parg0),
6532 fold_convert (type, marg))),
6533 fold_convert (type, parg1)));
6534 if (TREE_CODE (parg0) != MULT_EXPR
6535 && TREE_CODE (parg1) == MULT_EXPR)
6536 return fold (build2 (PLUS_EXPR, type,
6537 fold (build2 (PLUS_EXPR, type,
6538 fold_convert (type, parg1),
6539 fold_convert (type, marg))),
6540 fold_convert (type, parg0)));
6543 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6545 tree arg00, arg01, arg10, arg11;
6546 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6548 /* (A * C) + (B * C) -> (A+B) * C.
6549 We are most concerned about the case where C is a constant,
6550 but other combinations show up during loop reduction. Since
6551 it is not difficult, try all four possibilities. */
6553 arg00 = TREE_OPERAND (arg0, 0);
6554 arg01 = TREE_OPERAND (arg0, 1);
6555 arg10 = TREE_OPERAND (arg1, 0);
6556 arg11 = TREE_OPERAND (arg1, 1);
6559 if (operand_equal_p (arg01, arg11, 0))
6560 same = arg01, alt0 = arg00, alt1 = arg10;
6561 else if (operand_equal_p (arg00, arg10, 0))
6562 same = arg00, alt0 = arg01, alt1 = arg11;
6563 else if (operand_equal_p (arg00, arg11, 0))
6564 same = arg00, alt0 = arg01, alt1 = arg10;
6565 else if (operand_equal_p (arg01, arg10, 0))
6566 same = arg01, alt0 = arg00, alt1 = arg11;
6568 /* No identical multiplicands; see if we can find a common
6569 power-of-two factor in non-power-of-two multiplies. This
6570 can help in multi-dimensional array access. */
6571 else if (TREE_CODE (arg01) == INTEGER_CST
6572 && TREE_CODE (arg11) == INTEGER_CST
6573 && TREE_INT_CST_HIGH (arg01) == 0
6574 && TREE_INT_CST_HIGH (arg11) == 0)
6576 HOST_WIDE_INT int01, int11, tmp;
6577 int01 = TREE_INT_CST_LOW (arg01);
6578 int11 = TREE_INT_CST_LOW (arg11);
6580 /* Move min of absolute values to int11. */
6581 if ((int01 >= 0 ? int01 : -int01)
6582 < (int11 >= 0 ? int11 : -int11))
6584 tmp = int01, int01 = int11, int11 = tmp;
6585 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6586 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6589 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6591 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6592 build_int_2 (int01 / int11, 0)));
6599 return fold (build2 (MULT_EXPR, type,
6600 fold (build2 (PLUS_EXPR, type,
6607 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6608 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6609 return non_lvalue (fold_convert (type, arg0));
6611 /* Likewise if the operands are reversed. */
6612 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6613 return non_lvalue (fold_convert (type, arg1));
6615 /* Convert X + -C into X - C. */
6616 if (TREE_CODE (arg1) == REAL_CST
6617 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6619 tem = fold_negate_const (arg1, type);
6620 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6621 return fold (build2 (MINUS_EXPR, type,
6622 fold_convert (type, arg0),
6623 fold_convert (type, tem)));
6626 /* Convert x+x into x*2.0. */
6627 if (operand_equal_p (arg0, arg1, 0)
6628 && SCALAR_FLOAT_TYPE_P (type))
6629 return fold (build2 (MULT_EXPR, type, arg0,
6630 build_real (type, dconst2)));
6632 /* Convert x*c+x into x*(c+1). */
6633 if (flag_unsafe_math_optimizations
6634 && TREE_CODE (arg0) == MULT_EXPR
6635 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6636 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6637 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6641 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6642 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6643 return fold (build2 (MULT_EXPR, type, arg1,
6644 build_real (type, c)));
6647 /* Convert x+x*c into x*(c+1). */
6648 if (flag_unsafe_math_optimizations
6649 && TREE_CODE (arg1) == MULT_EXPR
6650 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6651 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6652 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6656 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6657 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6658 return fold (build2 (MULT_EXPR, type, arg0,
6659 build_real (type, c)));
6662 /* Convert x*c1+x*c2 into x*(c1+c2). */
6663 if (flag_unsafe_math_optimizations
6664 && TREE_CODE (arg0) == MULT_EXPR
6665 && TREE_CODE (arg1) == MULT_EXPR
6666 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6667 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6668 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6669 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6670 && operand_equal_p (TREE_OPERAND (arg0, 0),
6671 TREE_OPERAND (arg1, 0), 0))
6673 REAL_VALUE_TYPE c1, c2;
6675 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6676 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6677 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6678 return fold (build2 (MULT_EXPR, type,
6679 TREE_OPERAND (arg0, 0),
6680 build_real (type, c1)));
6682 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6683 if (flag_unsafe_math_optimizations
6684 && TREE_CODE (arg1) == PLUS_EXPR
6685 && TREE_CODE (arg0) != MULT_EXPR)
6687 tree tree10 = TREE_OPERAND (arg1, 0);
6688 tree tree11 = TREE_OPERAND (arg1, 1);
6689 if (TREE_CODE (tree11) == MULT_EXPR
6690 && TREE_CODE (tree10) == MULT_EXPR)
6693 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6694 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6697 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6698 if (flag_unsafe_math_optimizations
6699 && TREE_CODE (arg0) == PLUS_EXPR
6700 && TREE_CODE (arg1) != MULT_EXPR)
6702 tree tree00 = TREE_OPERAND (arg0, 0);
6703 tree tree01 = TREE_OPERAND (arg0, 1);
6704 if (TREE_CODE (tree01) == MULT_EXPR
6705 && TREE_CODE (tree00) == MULT_EXPR)
6708 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6709 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6715 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6716 is a rotate of A by C1 bits. */
6717 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6718 is a rotate of A by B bits. */
6720 enum tree_code code0, code1;
6721 code0 = TREE_CODE (arg0);
6722 code1 = TREE_CODE (arg1);
6723 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6724 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6725 && operand_equal_p (TREE_OPERAND (arg0, 0),
6726 TREE_OPERAND (arg1, 0), 0)
6727 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6729 tree tree01, tree11;
6730 enum tree_code code01, code11;
6732 tree01 = TREE_OPERAND (arg0, 1);
6733 tree11 = TREE_OPERAND (arg1, 1);
6734 STRIP_NOPS (tree01);
6735 STRIP_NOPS (tree11);
6736 code01 = TREE_CODE (tree01);
6737 code11 = TREE_CODE (tree11);
6738 if (code01 == INTEGER_CST
6739 && code11 == INTEGER_CST
6740 && TREE_INT_CST_HIGH (tree01) == 0
6741 && TREE_INT_CST_HIGH (tree11) == 0
6742 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6743 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6744 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6745 code0 == LSHIFT_EXPR ? tree01 : tree11);
6746 else if (code11 == MINUS_EXPR)
6748 tree tree110, tree111;
6749 tree110 = TREE_OPERAND (tree11, 0);
6750 tree111 = TREE_OPERAND (tree11, 1);
6751 STRIP_NOPS (tree110);
6752 STRIP_NOPS (tree111);
6753 if (TREE_CODE (tree110) == INTEGER_CST
6754 && 0 == compare_tree_int (tree110,
6756 (TREE_TYPE (TREE_OPERAND
6758 && operand_equal_p (tree01, tree111, 0))
6759 return build2 ((code0 == LSHIFT_EXPR
6762 type, TREE_OPERAND (arg0, 0), tree01);
6764 else if (code01 == MINUS_EXPR)
6766 tree tree010, tree011;
6767 tree010 = TREE_OPERAND (tree01, 0);
6768 tree011 = TREE_OPERAND (tree01, 1);
6769 STRIP_NOPS (tree010);
6770 STRIP_NOPS (tree011);
6771 if (TREE_CODE (tree010) == INTEGER_CST
6772 && 0 == compare_tree_int (tree010,
6774 (TREE_TYPE (TREE_OPERAND
6776 && operand_equal_p (tree11, tree011, 0))
6777 return build2 ((code0 != LSHIFT_EXPR
6780 type, TREE_OPERAND (arg0, 0), tree11);
6786 /* In most languages, can't associate operations on floats through
6787 parentheses. Rather than remember where the parentheses were, we
6788 don't associate floats at all, unless the user has specified
6789 -funsafe-math-optimizations. */
6792 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6794 tree var0, con0, lit0, minus_lit0;
6795 tree var1, con1, lit1, minus_lit1;
6797 /* Split both trees into variables, constants, and literals. Then
6798 associate each group together, the constants with literals,
6799 then the result with variables. This increases the chances of
6800 literals being recombined later and of generating relocatable
6801 expressions for the sum of a constant and literal. */
6802 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6803 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6804 code == MINUS_EXPR);
6806 /* Only do something if we found more than two objects. Otherwise,
6807 nothing has changed and we risk infinite recursion. */
6808 if (2 < ((var0 != 0) + (var1 != 0)
6809 + (con0 != 0) + (con1 != 0)
6810 + (lit0 != 0) + (lit1 != 0)
6811 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6813 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6814 if (code == MINUS_EXPR)
6817 var0 = associate_trees (var0, var1, code, type);
6818 con0 = associate_trees (con0, con1, code, type);
6819 lit0 = associate_trees (lit0, lit1, code, type);
6820 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6822 /* Preserve the MINUS_EXPR if the negative part of the literal is
6823 greater than the positive part. Otherwise, the multiplicative
6824 folding code (i.e extract_muldiv) may be fooled in case
6825 unsigned constants are subtracted, like in the following
6826 example: ((X*2 + 4) - 8U)/2. */
6827 if (minus_lit0 && lit0)
6829 if (TREE_CODE (lit0) == INTEGER_CST
6830 && TREE_CODE (minus_lit0) == INTEGER_CST
6831 && tree_int_cst_lt (lit0, minus_lit0))
6833 minus_lit0 = associate_trees (minus_lit0, lit0,
6839 lit0 = associate_trees (lit0, minus_lit0,
6847 return fold_convert (type,
6848 associate_trees (var0, minus_lit0,
6852 con0 = associate_trees (con0, minus_lit0,
6854 return fold_convert (type,
6855 associate_trees (var0, con0,
6860 con0 = associate_trees (con0, lit0, code, type);
6861 return fold_convert (type, associate_trees (var0, con0,
6868 t1 = const_binop (code, arg0, arg1, 0);
6869 if (t1 != NULL_TREE)
6871 /* The return value should always have
6872 the same type as the original expression. */
6873 if (TREE_TYPE (t1) != type)
6874 t1 = fold_convert (type, t1);
6881 /* A - (-B) -> A + B */
6882 if (TREE_CODE (arg1) == NEGATE_EXPR)
6883 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6884 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6885 if (TREE_CODE (arg0) == NEGATE_EXPR
6886 && (FLOAT_TYPE_P (type)
6887 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6888 && negate_expr_p (arg1)
6889 && reorder_operands_p (arg0, arg1))
6890 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6891 TREE_OPERAND (arg0, 0)));
6893 if (! FLOAT_TYPE_P (type))
6895 if (! wins && integer_zerop (arg0))
6896 return negate_expr (fold_convert (type, arg1));
6897 if (integer_zerop (arg1))
6898 return non_lvalue (fold_convert (type, arg0));
6900 /* Fold A - (A & B) into ~B & A. */
6901 if (!TREE_SIDE_EFFECTS (arg0)
6902 && TREE_CODE (arg1) == BIT_AND_EXPR)
6904 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6905 return fold (build2 (BIT_AND_EXPR, type,
6906 fold (build1 (BIT_NOT_EXPR, type,
6907 TREE_OPERAND (arg1, 0))),
6909 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6910 return fold (build2 (BIT_AND_EXPR, type,
6911 fold (build1 (BIT_NOT_EXPR, type,
6912 TREE_OPERAND (arg1, 1))),
6916 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6917 any power of 2 minus 1. */
6918 if (TREE_CODE (arg0) == BIT_AND_EXPR
6919 && TREE_CODE (arg1) == BIT_AND_EXPR
6920 && operand_equal_p (TREE_OPERAND (arg0, 0),
6921 TREE_OPERAND (arg1, 0), 0))
6923 tree mask0 = TREE_OPERAND (arg0, 1);
6924 tree mask1 = TREE_OPERAND (arg1, 1);
6925 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6927 if (operand_equal_p (tem, mask1, 0))
6929 tem = fold (build2 (BIT_XOR_EXPR, type,
6930 TREE_OPERAND (arg0, 0), mask1));
6931 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6936 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6937 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6938 return non_lvalue (fold_convert (type, arg0));
6940 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6941 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6942 (-ARG1 + ARG0) reduces to -ARG1. */
6943 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6944 return negate_expr (fold_convert (type, arg1));
6946 /* Fold &x - &x. This can happen from &x.foo - &x.
6947 This is unsafe for certain floats even in non-IEEE formats.
6948 In IEEE, it is unsafe because it does wrong for NaNs.
6949 Also note that operand_equal_p is always false if an operand
6952 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6953 && operand_equal_p (arg0, arg1, 0))
6954 return fold_convert (type, integer_zero_node);
6956 /* A - B -> A + (-B) if B is easily negatable. */
6957 if (!wins && negate_expr_p (arg1)
6958 && ((FLOAT_TYPE_P (type)
6959 /* Avoid this transformation if B is a positive REAL_CST. */
6960 && (TREE_CODE (arg1) != REAL_CST
6961 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6962 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6963 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6965 if (TREE_CODE (arg0) == MULT_EXPR
6966 && TREE_CODE (arg1) == MULT_EXPR
6967 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6969 /* (A * C) - (B * C) -> (A-B) * C. */
6970 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6971 TREE_OPERAND (arg1, 1), 0))
6972 return fold (build2 (MULT_EXPR, type,
6973 fold (build2 (MINUS_EXPR, type,
6974 TREE_OPERAND (arg0, 0),
6975 TREE_OPERAND (arg1, 0))),
6976 TREE_OPERAND (arg0, 1)));
6977 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6978 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6979 TREE_OPERAND (arg1, 0), 0))
6980 return fold (build2 (MULT_EXPR, type,
6981 TREE_OPERAND (arg0, 0),
6982 fold (build2 (MINUS_EXPR, type,
6983 TREE_OPERAND (arg0, 1),
6984 TREE_OPERAND (arg1, 1)))));
6990 /* (-A) * (-B) -> A * B */
6991 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6992 return fold (build2 (MULT_EXPR, type,
6993 TREE_OPERAND (arg0, 0),
6994 negate_expr (arg1)));
6995 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6996 return fold (build2 (MULT_EXPR, type,
6998 TREE_OPERAND (arg1, 0)));
7000 if (! FLOAT_TYPE_P (type))
7002 if (integer_zerop (arg1))
7003 return omit_one_operand (type, arg1, arg0);
7004 if (integer_onep (arg1))
7005 return non_lvalue (fold_convert (type, arg0));
7007 /* (a * (1 << b)) is (a << b) */
7008 if (TREE_CODE (arg1) == LSHIFT_EXPR
7009 && integer_onep (TREE_OPERAND (arg1, 0)))
7010 return fold (build2 (LSHIFT_EXPR, type, arg0,
7011 TREE_OPERAND (arg1, 1)));
7012 if (TREE_CODE (arg0) == LSHIFT_EXPR
7013 && integer_onep (TREE_OPERAND (arg0, 0)))
7014 return fold (build2 (LSHIFT_EXPR, type, arg1,
7015 TREE_OPERAND (arg0, 1)));
7017 if (TREE_CODE (arg1) == INTEGER_CST
7018 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
7019 fold_convert (type, arg1),
7021 return fold_convert (type, tem);
7026 /* Maybe fold x * 0 to 0. The expressions aren't the same
7027 when x is NaN, since x * 0 is also NaN. Nor are they the
7028 same in modes with signed zeros, since multiplying a
7029 negative value by 0 gives -0, not +0. */
7030 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7031 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7032 && real_zerop (arg1))
7033 return omit_one_operand (type, arg1, arg0);
7034 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7035 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7036 && real_onep (arg1))
7037 return non_lvalue (fold_convert (type, arg0));
7039 /* Transform x * -1.0 into -x. */
7040 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7041 && real_minus_onep (arg1))
7042 return fold_convert (type, negate_expr (arg0));
7044 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7045 if (flag_unsafe_math_optimizations
7046 && TREE_CODE (arg0) == RDIV_EXPR
7047 && TREE_CODE (arg1) == REAL_CST
7048 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7050 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7053 return fold (build2 (RDIV_EXPR, type, tem,
7054 TREE_OPERAND (arg0, 1)));
7057 if (flag_unsafe_math_optimizations)
7059 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7060 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7062 /* Optimizations of root(...)*root(...). */
7063 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7065 tree rootfn, arg, arglist;
7066 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7067 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7069 /* Optimize sqrt(x)*sqrt(x) as x. */
7070 if (BUILTIN_SQRT_P (fcode0)
7071 && operand_equal_p (arg00, arg10, 0)
7072 && ! HONOR_SNANS (TYPE_MODE (type)))
7075 /* Optimize root(x)*root(y) as root(x*y). */
7076 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7077 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7078 arglist = build_tree_list (NULL_TREE, arg);
7079 return build_function_call_expr (rootfn, arglist);
7082 /* Optimize expN(x)*expN(y) as expN(x+y). */
7083 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7085 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7086 tree arg = build2 (PLUS_EXPR, type,
7087 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7088 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7089 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7090 return build_function_call_expr (expfn, arglist);
7093 /* Optimizations of pow(...)*pow(...). */
7094 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7095 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7096 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7098 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7099 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7101 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7102 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7105 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7106 if (operand_equal_p (arg01, arg11, 0))
7108 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7109 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7110 tree arglist = tree_cons (NULL_TREE, fold (arg),
7111 build_tree_list (NULL_TREE,
7113 return build_function_call_expr (powfn, arglist);
7116 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7117 if (operand_equal_p (arg00, arg10, 0))
7119 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7120 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7121 tree arglist = tree_cons (NULL_TREE, arg00,
7122 build_tree_list (NULL_TREE,
7124 return build_function_call_expr (powfn, arglist);
7128 /* Optimize tan(x)*cos(x) as sin(x). */
7129 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7130 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7131 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7132 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7133 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7134 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7135 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7136 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7138 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7140 if (sinfn != NULL_TREE)
7141 return build_function_call_expr (sinfn,
7142 TREE_OPERAND (arg0, 1));
7145 /* Optimize x*pow(x,c) as pow(x,c+1). */
7146 if (fcode1 == BUILT_IN_POW
7147 || fcode1 == BUILT_IN_POWF
7148 || fcode1 == BUILT_IN_POWL)
7150 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7151 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7153 if (TREE_CODE (arg11) == REAL_CST
7154 && ! TREE_CONSTANT_OVERFLOW (arg11)
7155 && operand_equal_p (arg0, arg10, 0))
7157 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7161 c = TREE_REAL_CST (arg11);
7162 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7163 arg = build_real (type, c);
7164 arglist = build_tree_list (NULL_TREE, arg);
7165 arglist = tree_cons (NULL_TREE, arg0, arglist);
7166 return build_function_call_expr (powfn, arglist);
7170 /* Optimize pow(x,c)*x as pow(x,c+1). */
7171 if (fcode0 == BUILT_IN_POW
7172 || fcode0 == BUILT_IN_POWF
7173 || fcode0 == BUILT_IN_POWL)
7175 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7176 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7178 if (TREE_CODE (arg01) == REAL_CST
7179 && ! TREE_CONSTANT_OVERFLOW (arg01)
7180 && operand_equal_p (arg1, arg00, 0))
7182 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7186 c = TREE_REAL_CST (arg01);
7187 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7188 arg = build_real (type, c);
7189 arglist = build_tree_list (NULL_TREE, arg);
7190 arglist = tree_cons (NULL_TREE, arg1, arglist);
7191 return build_function_call_expr (powfn, arglist);
7195 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7197 && operand_equal_p (arg0, arg1, 0))
7199 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7203 tree arg = build_real (type, dconst2);
7204 tree arglist = build_tree_list (NULL_TREE, arg);
7205 arglist = tree_cons (NULL_TREE, arg0, arglist);
7206 return build_function_call_expr (powfn, arglist);
7215 if (integer_all_onesp (arg1))
7216 return omit_one_operand (type, arg1, arg0);
7217 if (integer_zerop (arg1))
7218 return non_lvalue (fold_convert (type, arg0));
7219 if (operand_equal_p (arg0, arg1, 0))
7220 return non_lvalue (fold_convert (type, arg0));
7223 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7224 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7226 t1 = build_int_2 (-1, -1);
7227 TREE_TYPE (t1) = type;
7228 force_fit_type (t1, 0);
7229 return omit_one_operand (type, t1, arg1);
7233 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7234 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7236 t1 = build_int_2 (-1, -1);
7237 TREE_TYPE (t1) = type;
7238 force_fit_type (t1, 0);
7239 return omit_one_operand (type, t1, arg0);
7242 t1 = distribute_bit_expr (code, type, arg0, arg1);
7243 if (t1 != NULL_TREE)
7246 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7248 This results in more efficient code for machines without a NAND
7249 instruction. Combine will canonicalize to the first form
7250 which will allow use of NAND instructions provided by the
7251 backend if they exist. */
7252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7253 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7255 return fold (build1 (BIT_NOT_EXPR, type,
7256 build2 (BIT_AND_EXPR, type,
7257 TREE_OPERAND (arg0, 0),
7258 TREE_OPERAND (arg1, 0))));
7261 /* See if this can be simplified into a rotate first. If that
7262 is unsuccessful continue in the association code. */
7266 if (integer_zerop (arg1))
7267 return non_lvalue (fold_convert (type, arg0));
7268 if (integer_all_onesp (arg1))
7269 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7270 if (operand_equal_p (arg0, arg1, 0))
7271 return omit_one_operand (type, integer_zero_node, arg0);
7274 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7275 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7277 t1 = build_int_2 (-1, -1);
7278 TREE_TYPE (t1) = type;
7279 force_fit_type (t1, 0);
7280 return omit_one_operand (type, t1, arg1);
7284 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7285 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7287 t1 = build_int_2 (-1, -1);
7288 TREE_TYPE (t1) = type;
7289 force_fit_type (t1, 0);
7290 return omit_one_operand (type, t1, arg0);
7293 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7294 with a constant, and the two constants have no bits in common,
7295 we should treat this as a BIT_IOR_EXPR since this may produce more
7297 if (TREE_CODE (arg0) == BIT_AND_EXPR
7298 && TREE_CODE (arg1) == BIT_AND_EXPR
7299 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7300 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7301 && integer_zerop (const_binop (BIT_AND_EXPR,
7302 TREE_OPERAND (arg0, 1),
7303 TREE_OPERAND (arg1, 1), 0)))
7305 code = BIT_IOR_EXPR;
7309 /* See if this can be simplified into a rotate first. If that
7310 is unsuccessful continue in the association code. */
7314 if (integer_all_onesp (arg1))
7315 return non_lvalue (fold_convert (type, arg0));
7316 if (integer_zerop (arg1))
7317 return omit_one_operand (type, arg1, arg0);
7318 if (operand_equal_p (arg0, arg1, 0))
7319 return non_lvalue (fold_convert (type, arg0));
7321 /* ~X & X is always zero. */
7322 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7323 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7324 return omit_one_operand (type, integer_zero_node, arg1);
7326 /* X & ~X is always zero. */
7327 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7328 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7329 return omit_one_operand (type, integer_zero_node, arg0);
7331 t1 = distribute_bit_expr (code, type, arg0, arg1);
7332 if (t1 != NULL_TREE)
7334 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7335 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7336 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7339 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7341 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7342 && (~TREE_INT_CST_LOW (arg1)
7343 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7344 return fold_convert (type, TREE_OPERAND (arg0, 0));
7347 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7349 This results in more efficient code for machines without a NOR
7350 instruction. Combine will canonicalize to the first form
7351 which will allow use of NOR instructions provided by the
7352 backend if they exist. */
7353 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7354 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7356 return fold (build1 (BIT_NOT_EXPR, type,
7357 build2 (BIT_IOR_EXPR, type,
7358 TREE_OPERAND (arg0, 0),
7359 TREE_OPERAND (arg1, 0))));
7365 /* Don't touch a floating-point divide by zero unless the mode
7366 of the constant can represent infinity. */
7367 if (TREE_CODE (arg1) == REAL_CST
7368 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7369 && real_zerop (arg1))
7372 /* (-A) / (-B) -> A / B */
7373 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7374 return fold (build2 (RDIV_EXPR, type,
7375 TREE_OPERAND (arg0, 0),
7376 negate_expr (arg1)));
7377 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7378 return fold (build2 (RDIV_EXPR, type,
7380 TREE_OPERAND (arg1, 0)));
7382 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7383 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7384 && real_onep (arg1))
7385 return non_lvalue (fold_convert (type, arg0));
7387 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7388 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7389 && real_minus_onep (arg1))
7390 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7392 /* If ARG1 is a constant, we can convert this to a multiply by the
7393 reciprocal. This does not have the same rounding properties,
7394 so only do this if -funsafe-math-optimizations. We can actually
7395 always safely do it if ARG1 is a power of two, but it's hard to
7396 tell if it is or not in a portable manner. */
7397 if (TREE_CODE (arg1) == REAL_CST)
7399 if (flag_unsafe_math_optimizations
7400 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7402 return fold (build2 (MULT_EXPR, type, arg0, tem));
7403 /* Find the reciprocal if optimizing and the result is exact. */
7407 r = TREE_REAL_CST (arg1);
7408 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7410 tem = build_real (type, r);
7411 return fold (build2 (MULT_EXPR, type, arg0, tem));
7415 /* Convert A/B/C to A/(B*C). */
7416 if (flag_unsafe_math_optimizations
7417 && TREE_CODE (arg0) == RDIV_EXPR)
7418 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7419 fold (build2 (MULT_EXPR, type,
7420 TREE_OPERAND (arg0, 1), arg1))));
7422 /* Convert A/(B/C) to (A/B)*C. */
7423 if (flag_unsafe_math_optimizations
7424 && TREE_CODE (arg1) == RDIV_EXPR)
7425 return fold (build2 (MULT_EXPR, type,
7426 fold (build2 (RDIV_EXPR, type, arg0,
7427 TREE_OPERAND (arg1, 0))),
7428 TREE_OPERAND (arg1, 1)));
7430 /* Convert C1/(X*C2) into (C1/C2)/X. */
7431 if (flag_unsafe_math_optimizations
7432 && TREE_CODE (arg1) == MULT_EXPR
7433 && TREE_CODE (arg0) == REAL_CST
7434 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7436 tree tem = const_binop (RDIV_EXPR, arg0,
7437 TREE_OPERAND (arg1, 1), 0);
7439 return fold (build2 (RDIV_EXPR, type, tem,
7440 TREE_OPERAND (arg1, 0)));
7443 if (flag_unsafe_math_optimizations)
7445 enum built_in_function fcode = builtin_mathfn_code (arg1);
7446 /* Optimize x/expN(y) into x*expN(-y). */
7447 if (BUILTIN_EXPONENT_P (fcode))
7449 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7450 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7451 tree arglist = build_tree_list (NULL_TREE,
7452 fold_convert (type, arg));
7453 arg1 = build_function_call_expr (expfn, arglist);
7454 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7457 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7458 if (fcode == BUILT_IN_POW
7459 || fcode == BUILT_IN_POWF
7460 || fcode == BUILT_IN_POWL)
7462 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7463 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7464 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7465 tree neg11 = fold_convert (type, negate_expr (arg11));
7466 tree arglist = tree_cons(NULL_TREE, arg10,
7467 build_tree_list (NULL_TREE, neg11));
7468 arg1 = build_function_call_expr (powfn, arglist);
7469 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7473 if (flag_unsafe_math_optimizations)
7475 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7476 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7478 /* Optimize sin(x)/cos(x) as tan(x). */
7479 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7480 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7481 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7482 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7483 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7485 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7487 if (tanfn != NULL_TREE)
7488 return build_function_call_expr (tanfn,
7489 TREE_OPERAND (arg0, 1));
7492 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7493 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7494 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7495 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7496 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7497 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7499 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7501 if (tanfn != NULL_TREE)
7503 tree tmp = TREE_OPERAND (arg0, 1);
7504 tmp = build_function_call_expr (tanfn, tmp);
7505 return fold (build2 (RDIV_EXPR, type,
7506 build_real (type, dconst1), tmp));
7510 /* Optimize pow(x,c)/x as pow(x,c-1). */
7511 if (fcode0 == BUILT_IN_POW
7512 || fcode0 == BUILT_IN_POWF
7513 || fcode0 == BUILT_IN_POWL)
7515 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7516 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7517 if (TREE_CODE (arg01) == REAL_CST
7518 && ! TREE_CONSTANT_OVERFLOW (arg01)
7519 && operand_equal_p (arg1, arg00, 0))
7521 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7525 c = TREE_REAL_CST (arg01);
7526 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7527 arg = build_real (type, c);
7528 arglist = build_tree_list (NULL_TREE, arg);
7529 arglist = tree_cons (NULL_TREE, arg1, arglist);
7530 return build_function_call_expr (powfn, arglist);
7536 case TRUNC_DIV_EXPR:
7537 case ROUND_DIV_EXPR:
7538 case FLOOR_DIV_EXPR:
7540 case EXACT_DIV_EXPR:
7541 if (integer_onep (arg1))
7542 return non_lvalue (fold_convert (type, arg0));
7543 if (integer_zerop (arg1))
7546 if (!TYPE_UNSIGNED (type)
7547 && TREE_CODE (arg1) == INTEGER_CST
7548 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7549 && TREE_INT_CST_HIGH (arg1) == -1)
7550 return fold_convert (type, negate_expr (arg0));
7552 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7553 operation, EXACT_DIV_EXPR.
7555 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7556 At one time others generated faster code, it's not clear if they do
7557 after the last round to changes to the DIV code in expmed.c. */
7558 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7559 && multiple_of_p (type, arg0, arg1))
7560 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7562 if (TREE_CODE (arg1) == INTEGER_CST
7563 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7565 return fold_convert (type, tem);
7570 case FLOOR_MOD_EXPR:
7571 case ROUND_MOD_EXPR:
7572 case TRUNC_MOD_EXPR:
7573 if (integer_onep (arg1))
7574 return omit_one_operand (type, integer_zero_node, arg0);
7575 if (integer_zerop (arg1))
7578 /* X % -1 is zero. */
7579 if (!TYPE_UNSIGNED (type)
7580 && TREE_CODE (arg1) == INTEGER_CST
7581 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7582 && TREE_INT_CST_HIGH (arg1) == -1)
7583 return omit_one_operand (type, integer_zero_node, arg0);
7585 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7586 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7587 if (code == TRUNC_MOD_EXPR
7588 && TYPE_UNSIGNED (type)
7589 && integer_pow2p (arg1))
7591 unsigned HOST_WIDE_INT high, low;
7595 l = tree_log2 (arg1);
7596 if (l >= HOST_BITS_PER_WIDE_INT)
7598 high = ((unsigned HOST_WIDE_INT) 1
7599 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7605 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7608 mask = build_int_2 (low, high);
7609 TREE_TYPE (mask) = type;
7610 return fold (build2 (BIT_AND_EXPR, type,
7611 fold_convert (type, arg0), mask));
7614 /* X % -C is the same as X % C (for all rounding moduli). */
7615 if (!TYPE_UNSIGNED (type)
7616 && TREE_CODE (arg1) == INTEGER_CST
7617 && TREE_INT_CST_HIGH (arg1) < 0
7619 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7620 && !sign_bit_p (arg1, arg1))
7621 return fold (build2 (code, type, fold_convert (type, arg0),
7622 fold_convert (type, negate_expr (arg1))));
7624 /* X % -Y is the same as X % Y (for all rounding moduli). */
7625 if (!TYPE_UNSIGNED (type)
7626 && TREE_CODE (arg1) == NEGATE_EXPR
7628 return fold (build2 (code, type, fold_convert (type, arg0),
7629 fold_convert (type, TREE_OPERAND (arg1, 0))));
7631 if (TREE_CODE (arg1) == INTEGER_CST
7632 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7634 return fold_convert (type, tem);
7640 if (integer_all_onesp (arg0))
7641 return omit_one_operand (type, arg0, arg1);
7645 /* Optimize -1 >> x for arithmetic right shifts. */
7646 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7647 return omit_one_operand (type, arg0, arg1);
7648 /* ... fall through ... */
7652 if (integer_zerop (arg1))
7653 return non_lvalue (fold_convert (type, arg0));
7654 if (integer_zerop (arg0))
7655 return omit_one_operand (type, arg0, arg1);
7657 /* Since negative shift count is not well-defined,
7658 don't try to compute it in the compiler. */
7659 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7661 /* Rewrite an LROTATE_EXPR by a constant into an
7662 RROTATE_EXPR by a new constant. */
7663 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7665 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7666 tem = fold_convert (TREE_TYPE (arg1), tem);
7667 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7668 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7671 /* If we have a rotate of a bit operation with the rotate count and
7672 the second operand of the bit operation both constant,
7673 permute the two operations. */
7674 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7675 && (TREE_CODE (arg0) == BIT_AND_EXPR
7676 || TREE_CODE (arg0) == BIT_IOR_EXPR
7677 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7678 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7679 return fold (build2 (TREE_CODE (arg0), type,
7680 fold (build2 (code, type,
7681 TREE_OPERAND (arg0, 0), arg1)),
7682 fold (build2 (code, type,
7683 TREE_OPERAND (arg0, 1), arg1))));
7685 /* Two consecutive rotates adding up to the width of the mode can
7687 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7688 && TREE_CODE (arg0) == RROTATE_EXPR
7689 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7690 && TREE_INT_CST_HIGH (arg1) == 0
7691 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7692 && ((TREE_INT_CST_LOW (arg1)
7693 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7694 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7695 return TREE_OPERAND (arg0, 0);
7700 if (operand_equal_p (arg0, arg1, 0))
7701 return omit_one_operand (type, arg0, arg1);
7702 if (INTEGRAL_TYPE_P (type)
7703 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7704 return omit_one_operand (type, arg1, arg0);
7708 if (operand_equal_p (arg0, arg1, 0))
7709 return omit_one_operand (type, arg0, arg1);
7710 if (INTEGRAL_TYPE_P (type)
7711 && TYPE_MAX_VALUE (type)
7712 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7713 return omit_one_operand (type, arg1, arg0);
7716 case TRUTH_NOT_EXPR:
7717 /* The argument to invert_truthvalue must have Boolean type. */
7718 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7719 arg0 = fold_convert (boolean_type_node, arg0);
7721 /* Note that the operand of this must be an int
7722 and its values must be 0 or 1.
7723 ("true" is a fixed value perhaps depending on the language,
7724 but we don't handle values other than 1 correctly yet.) */
7725 tem = invert_truthvalue (arg0);
7726 /* Avoid infinite recursion. */
7727 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7729 tem = fold_single_bit_test (code, arg0, arg1, type);
7734 return fold_convert (type, tem);
7736 case TRUTH_ANDIF_EXPR:
7737 /* Note that the operands of this must be ints
7738 and their values must be 0 or 1.
7739 ("true" is a fixed value perhaps depending on the language.) */
7740 /* If first arg is constant zero, return it. */
7741 if (integer_zerop (arg0))
7742 return fold_convert (type, arg0);
7743 case TRUTH_AND_EXPR:
7744 /* If either arg is constant true, drop it. */
7745 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7746 return non_lvalue (fold_convert (type, arg1));
7747 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7748 /* Preserve sequence points. */
7749 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7750 return non_lvalue (fold_convert (type, arg0));
7751 /* If second arg is constant zero, result is zero, but first arg
7752 must be evaluated. */
7753 if (integer_zerop (arg1))
7754 return omit_one_operand (type, arg1, arg0);
7755 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7756 case will be handled here. */
7757 if (integer_zerop (arg0))
7758 return omit_one_operand (type, arg0, arg1);
7760 /* !X && X is always false. */
7761 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7762 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7763 return omit_one_operand (type, integer_zero_node, arg1);
7764 /* X && !X is always false. */
7765 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7766 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7767 return omit_one_operand (type, integer_zero_node, arg0);
7770 /* We only do these simplifications if we are optimizing. */
7774 /* Check for things like (A || B) && (A || C). We can convert this
7775 to A || (B && C). Note that either operator can be any of the four
7776 truth and/or operations and the transformation will still be
7777 valid. Also note that we only care about order for the
7778 ANDIF and ORIF operators. If B contains side effects, this
7779 might change the truth-value of A. */
7780 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7781 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7782 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7783 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7784 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7785 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7787 tree a00 = TREE_OPERAND (arg0, 0);
7788 tree a01 = TREE_OPERAND (arg0, 1);
7789 tree a10 = TREE_OPERAND (arg1, 0);
7790 tree a11 = TREE_OPERAND (arg1, 1);
7791 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7792 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7793 && (code == TRUTH_AND_EXPR
7794 || code == TRUTH_OR_EXPR));
7796 if (operand_equal_p (a00, a10, 0))
7797 return fold (build2 (TREE_CODE (arg0), type, a00,
7798 fold (build2 (code, type, a01, a11))));
7799 else if (commutative && operand_equal_p (a00, a11, 0))
7800 return fold (build2 (TREE_CODE (arg0), type, a00,
7801 fold (build2 (code, type, a01, a10))));
7802 else if (commutative && operand_equal_p (a01, a10, 0))
7803 return fold (build2 (TREE_CODE (arg0), type, a01,
7804 fold (build2 (code, type, a00, a11))));
7806 /* This case if tricky because we must either have commutative
7807 operators or else A10 must not have side-effects. */
7809 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7810 && operand_equal_p (a01, a11, 0))
7811 return fold (build2 (TREE_CODE (arg0), type,
7812 fold (build2 (code, type, a00, a10)),
7816 /* See if we can build a range comparison. */
7817 if (0 != (tem = fold_range_test (t)))
7820 /* Check for the possibility of merging component references. If our
7821 lhs is another similar operation, try to merge its rhs with our
7822 rhs. Then try to merge our lhs and rhs. */
7823 if (TREE_CODE (arg0) == code
7824 && 0 != (tem = fold_truthop (code, type,
7825 TREE_OPERAND (arg0, 1), arg1)))
7826 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7828 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7833 case TRUTH_ORIF_EXPR:
7834 /* Note that the operands of this must be ints
7835 and their values must be 0 or true.
7836 ("true" is a fixed value perhaps depending on the language.) */
7837 /* If first arg is constant true, return it. */
7838 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7839 return fold_convert (type, arg0);
7841 /* If either arg is constant zero, drop it. */
7842 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7843 return non_lvalue (fold_convert (type, arg1));
7844 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7845 /* Preserve sequence points. */
7846 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7847 return non_lvalue (fold_convert (type, arg0));
7848 /* If second arg is constant true, result is true, but we must
7849 evaluate first arg. */
7850 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7851 return omit_one_operand (type, arg1, arg0);
7852 /* Likewise for first arg, but note this only occurs here for
7854 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7855 return omit_one_operand (type, arg0, arg1);
7857 /* !X || X is always true. */
7858 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7859 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7860 return omit_one_operand (type, integer_one_node, arg1);
7861 /* X || !X is always true. */
7862 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7863 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7864 return omit_one_operand (type, integer_one_node, arg0);
7868 case TRUTH_XOR_EXPR:
7869 /* If the second arg is constant zero, drop it. */
7870 if (integer_zerop (arg1))
7871 return non_lvalue (fold_convert (type, arg0));
7872 /* If the second arg is constant true, this is a logical inversion. */
7873 if (integer_onep (arg1))
7874 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7875 /* Identical arguments cancel to zero. */
7876 if (operand_equal_p (arg0, arg1, 0))
7877 return omit_one_operand (type, integer_zero_node, arg0);
7879 /* !X ^ X is always true. */
7880 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7881 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7882 return omit_one_operand (type, integer_one_node, arg1);
7884 /* X ^ !X is always true. */
7885 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7886 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7887 return omit_one_operand (type, integer_one_node, arg0);
7897 /* If one arg is a real or integer constant, put it last. */
7898 if (tree_swap_operands_p (arg0, arg1, true))
7899 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7901 /* If this is an equality comparison of the address of a non-weak
7902 object against zero, then we know the result. */
7903 if ((code == EQ_EXPR || code == NE_EXPR)
7904 && TREE_CODE (arg0) == ADDR_EXPR
7905 && DECL_P (TREE_OPERAND (arg0, 0))
7906 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7907 && integer_zerop (arg1))
7908 return constant_boolean_node (code != EQ_EXPR, type);
7910 /* If this is an equality comparison of the address of two non-weak,
7911 unaliased symbols neither of which are extern (since we do not
7912 have access to attributes for externs), then we know the result. */
7913 if ((code == EQ_EXPR || code == NE_EXPR)
7914 && TREE_CODE (arg0) == ADDR_EXPR
7915 && DECL_P (TREE_OPERAND (arg0, 0))
7916 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7917 && ! lookup_attribute ("alias",
7918 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7919 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7920 && TREE_CODE (arg1) == ADDR_EXPR
7921 && DECL_P (TREE_OPERAND (arg1, 0))
7922 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7923 && ! lookup_attribute ("alias",
7924 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7925 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7926 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7927 ? code == EQ_EXPR : code != EQ_EXPR,
7930 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7932 tree targ0 = strip_float_extensions (arg0);
7933 tree targ1 = strip_float_extensions (arg1);
7934 tree newtype = TREE_TYPE (targ0);
7936 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7937 newtype = TREE_TYPE (targ1);
7939 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7940 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7941 return fold (build2 (code, type, fold_convert (newtype, targ0),
7942 fold_convert (newtype, targ1)));
7944 /* (-a) CMP (-b) -> b CMP a */
7945 if (TREE_CODE (arg0) == NEGATE_EXPR
7946 && TREE_CODE (arg1) == NEGATE_EXPR)
7947 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7948 TREE_OPERAND (arg0, 0)));
7950 if (TREE_CODE (arg1) == REAL_CST)
7952 REAL_VALUE_TYPE cst;
7953 cst = TREE_REAL_CST (arg1);
7955 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7956 if (TREE_CODE (arg0) == NEGATE_EXPR)
7958 fold (build2 (swap_tree_comparison (code), type,
7959 TREE_OPERAND (arg0, 0),
7960 build_real (TREE_TYPE (arg1),
7961 REAL_VALUE_NEGATE (cst))));
7963 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7964 /* a CMP (-0) -> a CMP 0 */
7965 if (REAL_VALUE_MINUS_ZERO (cst))
7966 return fold (build2 (code, type, arg0,
7967 build_real (TREE_TYPE (arg1), dconst0)));
7969 /* x != NaN is always true, other ops are always false. */
7970 if (REAL_VALUE_ISNAN (cst)
7971 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7973 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7974 return omit_one_operand (type, tem, arg0);
7977 /* Fold comparisons against infinity. */
7978 if (REAL_VALUE_ISINF (cst))
7980 tem = fold_inf_compare (code, type, arg0, arg1);
7981 if (tem != NULL_TREE)
7986 /* If this is a comparison of a real constant with a PLUS_EXPR
7987 or a MINUS_EXPR of a real constant, we can convert it into a
7988 comparison with a revised real constant as long as no overflow
7989 occurs when unsafe_math_optimizations are enabled. */
7990 if (flag_unsafe_math_optimizations
7991 && TREE_CODE (arg1) == REAL_CST
7992 && (TREE_CODE (arg0) == PLUS_EXPR
7993 || TREE_CODE (arg0) == MINUS_EXPR)
7994 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7995 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7996 ? MINUS_EXPR : PLUS_EXPR,
7997 arg1, TREE_OPERAND (arg0, 1), 0))
7998 && ! TREE_CONSTANT_OVERFLOW (tem))
7999 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8001 /* Likewise, we can simplify a comparison of a real constant with
8002 a MINUS_EXPR whose first operand is also a real constant, i.e.
8003 (c1 - x) < c2 becomes x > c1-c2. */
8004 if (flag_unsafe_math_optimizations
8005 && TREE_CODE (arg1) == REAL_CST
8006 && TREE_CODE (arg0) == MINUS_EXPR
8007 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8008 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8010 && ! TREE_CONSTANT_OVERFLOW (tem))
8011 return fold (build2 (swap_tree_comparison (code), type,
8012 TREE_OPERAND (arg0, 1), tem));
8014 /* Fold comparisons against built-in math functions. */
8015 if (TREE_CODE (arg1) == REAL_CST
8016 && flag_unsafe_math_optimizations
8017 && ! flag_errno_math)
8019 enum built_in_function fcode = builtin_mathfn_code (arg0);
8021 if (fcode != END_BUILTINS)
8023 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8024 if (tem != NULL_TREE)
8030 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8031 if (TREE_CONSTANT (arg1)
8032 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8033 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8034 /* This optimization is invalid for ordered comparisons
8035 if CONST+INCR overflows or if foo+incr might overflow.
8036 This optimization is invalid for floating point due to rounding.
8037 For pointer types we assume overflow doesn't happen. */
8038 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8039 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8040 && (code == EQ_EXPR || code == NE_EXPR))))
8042 tree varop, newconst;
8044 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8046 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8047 arg1, TREE_OPERAND (arg0, 1)));
8048 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8049 TREE_OPERAND (arg0, 0),
8050 TREE_OPERAND (arg0, 1));
8054 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8055 arg1, TREE_OPERAND (arg0, 1)));
8056 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8057 TREE_OPERAND (arg0, 0),
8058 TREE_OPERAND (arg0, 1));
8062 /* If VAROP is a reference to a bitfield, we must mask
8063 the constant by the width of the field. */
8064 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8065 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8066 && host_integerp (DECL_SIZE (TREE_OPERAND
8067 (TREE_OPERAND (varop, 0), 1)), 1))
8069 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8070 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8071 tree folded_compare, shift;
8073 /* First check whether the comparison would come out
8074 always the same. If we don't do that we would
8075 change the meaning with the masking. */
8076 folded_compare = fold (build2 (code, type,
8077 TREE_OPERAND (varop, 0), arg1));
8078 if (integer_zerop (folded_compare)
8079 || integer_onep (folded_compare))
8080 return omit_one_operand (type, folded_compare, varop);
8082 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
8084 shift = fold_convert (TREE_TYPE (varop), shift);
8085 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8087 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8091 return fold (build2 (code, type, varop, newconst));
8094 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8095 This transformation affects the cases which are handled in later
8096 optimizations involving comparisons with non-negative constants. */
8097 if (TREE_CODE (arg1) == INTEGER_CST
8098 && TREE_CODE (arg0) != INTEGER_CST
8099 && tree_int_cst_sgn (arg1) > 0)
8104 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8105 return fold (build2 (GT_EXPR, type, arg0, arg1));
8108 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8109 return fold (build2 (LE_EXPR, type, arg0, arg1));
8116 /* Comparisons with the highest or lowest possible integer of
8117 the specified size will have known values.
8119 This is quite similar to fold_relational_hi_lo; however, my
8120 attempts to share the code have been nothing but trouble.
8121 I give up for now. */
8123 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8125 if (TREE_CODE (arg1) == INTEGER_CST
8126 && ! TREE_CONSTANT_OVERFLOW (arg1)
8127 && width <= HOST_BITS_PER_WIDE_INT
8128 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8129 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8131 unsigned HOST_WIDE_INT signed_max;
8132 unsigned HOST_WIDE_INT max, min;
8134 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8136 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8138 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8144 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8147 if (TREE_INT_CST_HIGH (arg1) == 0
8148 && TREE_INT_CST_LOW (arg1) == max)
8152 return omit_one_operand (type, integer_zero_node, arg0);
8155 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8158 return omit_one_operand (type, integer_one_node, arg0);
8161 return fold (build2 (NE_EXPR, type, arg0, arg1));
8163 /* The GE_EXPR and LT_EXPR cases above are not normally
8164 reached because of previous transformations. */
8169 else if (TREE_INT_CST_HIGH (arg1) == 0
8170 && TREE_INT_CST_LOW (arg1) == max - 1)
8174 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8175 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8177 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8178 return fold (build2 (NE_EXPR, type, arg0, arg1));
8182 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8183 && TREE_INT_CST_LOW (arg1) == min)
8187 return omit_one_operand (type, integer_zero_node, arg0);
8190 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8193 return omit_one_operand (type, integer_one_node, arg0);
8196 return fold (build2 (NE_EXPR, type, arg0, arg1));
8201 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8202 && TREE_INT_CST_LOW (arg1) == min + 1)
8206 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8207 return fold (build2 (NE_EXPR, type, arg0, arg1));
8209 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8210 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8215 else if (!in_gimple_form
8216 && TREE_INT_CST_HIGH (arg1) == 0
8217 && TREE_INT_CST_LOW (arg1) == signed_max
8218 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8219 /* signed_type does not work on pointer types. */
8220 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8222 /* The following case also applies to X < signed_max+1
8223 and X >= signed_max+1 because previous transformations. */
8224 if (code == LE_EXPR || code == GT_EXPR)
8227 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8228 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8230 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8231 type, fold_convert (st0, arg0),
8232 fold_convert (st1, integer_zero_node)));
8238 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8239 a MINUS_EXPR of a constant, we can convert it into a comparison with
8240 a revised constant as long as no overflow occurs. */
8241 if ((code == EQ_EXPR || code == NE_EXPR)
8242 && TREE_CODE (arg1) == INTEGER_CST
8243 && (TREE_CODE (arg0) == PLUS_EXPR
8244 || TREE_CODE (arg0) == MINUS_EXPR)
8245 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8246 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8247 ? MINUS_EXPR : PLUS_EXPR,
8248 arg1, TREE_OPERAND (arg0, 1), 0))
8249 && ! TREE_CONSTANT_OVERFLOW (tem))
8250 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8252 /* Similarly for a NEGATE_EXPR. */
8253 else if ((code == EQ_EXPR || code == NE_EXPR)
8254 && TREE_CODE (arg0) == NEGATE_EXPR
8255 && TREE_CODE (arg1) == INTEGER_CST
8256 && 0 != (tem = negate_expr (arg1))
8257 && TREE_CODE (tem) == INTEGER_CST
8258 && ! TREE_CONSTANT_OVERFLOW (tem))
8259 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8261 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8262 for !=. Don't do this for ordered comparisons due to overflow. */
8263 else if ((code == NE_EXPR || code == EQ_EXPR)
8264 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8265 return fold (build2 (code, type,
8266 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8268 /* If we are widening one operand of an integer comparison,
8269 see if the other operand is similarly being widened. Perhaps we
8270 can do the comparison in the narrower type. */
8271 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8272 && TREE_CODE (arg0) == NOP_EXPR
8273 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8274 && (code == EQ_EXPR || code == NE_EXPR
8275 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8276 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8277 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8278 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8279 || (TREE_CODE (t1) == INTEGER_CST
8280 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8281 return fold (build2 (code, type, tem,
8282 fold_convert (TREE_TYPE (tem), t1)));
8284 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8285 constant, we can simplify it. */
8286 else if (TREE_CODE (arg1) == INTEGER_CST
8287 && (TREE_CODE (arg0) == MIN_EXPR
8288 || TREE_CODE (arg0) == MAX_EXPR)
8289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8290 return optimize_minmax_comparison (t);
8292 /* If we are comparing an ABS_EXPR with a constant, we can
8293 convert all the cases into explicit comparisons, but they may
8294 well not be faster than doing the ABS and one comparison.
8295 But ABS (X) <= C is a range comparison, which becomes a subtraction
8296 and a comparison, and is probably faster. */
8297 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8298 && TREE_CODE (arg0) == ABS_EXPR
8299 && ! TREE_SIDE_EFFECTS (arg0)
8300 && (0 != (tem = negate_expr (arg1)))
8301 && TREE_CODE (tem) == INTEGER_CST
8302 && ! TREE_CONSTANT_OVERFLOW (tem))
8303 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8304 build2 (GE_EXPR, type,
8305 TREE_OPERAND (arg0, 0), tem),
8306 build2 (LE_EXPR, type,
8307 TREE_OPERAND (arg0, 0), arg1)));
8309 /* If this is an EQ or NE comparison with zero and ARG0 is
8310 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8311 two operations, but the latter can be done in one less insn
8312 on machines that have only two-operand insns or on which a
8313 constant cannot be the first operand. */
8314 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8315 && TREE_CODE (arg0) == BIT_AND_EXPR)
8317 tree arg00 = TREE_OPERAND (arg0, 0);
8318 tree arg01 = TREE_OPERAND (arg0, 1);
8319 if (TREE_CODE (arg00) == LSHIFT_EXPR
8320 && integer_onep (TREE_OPERAND (arg00, 0)))
8322 fold (build2 (code, type,
8323 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8324 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8325 arg01, TREE_OPERAND (arg00, 1)),
8326 fold_convert (TREE_TYPE (arg0),
8329 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8330 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8332 fold (build2 (code, type,
8333 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8334 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8335 arg00, TREE_OPERAND (arg01, 1)),
8336 fold_convert (TREE_TYPE (arg0),
8341 /* If this is an NE or EQ comparison of zero against the result of a
8342 signed MOD operation whose second operand is a power of 2, make
8343 the MOD operation unsigned since it is simpler and equivalent. */
8344 if ((code == NE_EXPR || code == EQ_EXPR)
8345 && integer_zerop (arg1)
8346 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8347 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8348 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8349 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8350 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8351 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8353 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8354 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8355 fold_convert (newtype,
8356 TREE_OPERAND (arg0, 0)),
8357 fold_convert (newtype,
8358 TREE_OPERAND (arg0, 1))));
8360 return fold (build2 (code, type, newmod,
8361 fold_convert (newtype, arg1)));
8364 /* If this is an NE comparison of zero with an AND of one, remove the
8365 comparison since the AND will give the correct value. */
8366 if (code == NE_EXPR && integer_zerop (arg1)
8367 && TREE_CODE (arg0) == BIT_AND_EXPR
8368 && integer_onep (TREE_OPERAND (arg0, 1)))
8369 return fold_convert (type, arg0);
8371 /* If we have (A & C) == C where C is a power of 2, convert this into
8372 (A & C) != 0. Similarly for NE_EXPR. */
8373 if ((code == EQ_EXPR || code == NE_EXPR)
8374 && TREE_CODE (arg0) == BIT_AND_EXPR
8375 && integer_pow2p (TREE_OPERAND (arg0, 1))
8376 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8377 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8378 arg0, integer_zero_node));
8380 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8381 2, then fold the expression into shifts and logical operations. */
8382 tem = fold_single_bit_test (code, arg0, arg1, type);
8386 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8387 Similarly for NE_EXPR. */
8388 if ((code == EQ_EXPR || code == NE_EXPR)
8389 && TREE_CODE (arg0) == BIT_AND_EXPR
8390 && TREE_CODE (arg1) == INTEGER_CST
8391 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8394 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8395 arg1, build1 (BIT_NOT_EXPR,
8396 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8397 TREE_OPERAND (arg0, 1))));
8398 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8399 if (integer_nonzerop (dandnotc))
8400 return omit_one_operand (type, rslt, arg0);
8403 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8404 Similarly for NE_EXPR. */
8405 if ((code == EQ_EXPR || code == NE_EXPR)
8406 && TREE_CODE (arg0) == BIT_IOR_EXPR
8407 && TREE_CODE (arg1) == INTEGER_CST
8408 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8411 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8412 TREE_OPERAND (arg0, 1),
8413 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8414 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8415 if (integer_nonzerop (candnotd))
8416 return omit_one_operand (type, rslt, arg0);
8419 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8420 and similarly for >= into !=. */
8421 if ((code == LT_EXPR || code == GE_EXPR)
8422 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8423 && TREE_CODE (arg1) == LSHIFT_EXPR
8424 && integer_onep (TREE_OPERAND (arg1, 0)))
8425 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8426 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8427 TREE_OPERAND (arg1, 1)),
8428 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8430 else if ((code == LT_EXPR || code == GE_EXPR)
8431 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8432 && (TREE_CODE (arg1) == NOP_EXPR
8433 || TREE_CODE (arg1) == CONVERT_EXPR)
8434 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8435 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8437 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8438 fold_convert (TREE_TYPE (arg0),
8439 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8440 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8442 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8444 /* Simplify comparison of something with itself. (For IEEE
8445 floating-point, we can only do some of these simplifications.) */
8446 if (operand_equal_p (arg0, arg1, 0))
8451 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8452 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8453 return constant_boolean_node (1, type);
8458 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8459 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8460 return constant_boolean_node (1, type);
8461 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8464 /* For NE, we can only do this simplification if integer
8465 or we don't honor IEEE floating point NaNs. */
8466 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8467 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8469 /* ... fall through ... */
8472 return constant_boolean_node (0, type);
8478 /* If we are comparing an expression that just has comparisons
8479 of two integer values, arithmetic expressions of those comparisons,
8480 and constants, we can simplify it. There are only three cases
8481 to check: the two values can either be equal, the first can be
8482 greater, or the second can be greater. Fold the expression for
8483 those three values. Since each value must be 0 or 1, we have
8484 eight possibilities, each of which corresponds to the constant 0
8485 or 1 or one of the six possible comparisons.
8487 This handles common cases like (a > b) == 0 but also handles
8488 expressions like ((x > y) - (y > x)) > 0, which supposedly
8489 occur in macroized code. */
8491 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8493 tree cval1 = 0, cval2 = 0;
8496 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8497 /* Don't handle degenerate cases here; they should already
8498 have been handled anyway. */
8499 && cval1 != 0 && cval2 != 0
8500 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8501 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8502 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8503 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8504 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8505 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8506 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8508 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8509 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8511 /* We can't just pass T to eval_subst in case cval1 or cval2
8512 was the same as ARG1. */
8515 = fold (build2 (code, type,
8516 eval_subst (arg0, cval1, maxval,
8520 = fold (build2 (code, type,
8521 eval_subst (arg0, cval1, maxval,
8525 = fold (build2 (code, type,
8526 eval_subst (arg0, cval1, minval,
8530 /* All three of these results should be 0 or 1. Confirm they
8531 are. Then use those values to select the proper code
8534 if ((integer_zerop (high_result)
8535 || integer_onep (high_result))
8536 && (integer_zerop (equal_result)
8537 || integer_onep (equal_result))
8538 && (integer_zerop (low_result)
8539 || integer_onep (low_result)))
8541 /* Make a 3-bit mask with the high-order bit being the
8542 value for `>', the next for '=', and the low for '<'. */
8543 switch ((integer_onep (high_result) * 4)
8544 + (integer_onep (equal_result) * 2)
8545 + integer_onep (low_result))
8549 return omit_one_operand (type, integer_zero_node, arg0);
8570 return omit_one_operand (type, integer_one_node, arg0);
8573 tem = build2 (code, type, cval1, cval2);
8575 return save_expr (tem);
8582 /* If this is a comparison of a field, we may be able to simplify it. */
8583 if (((TREE_CODE (arg0) == COMPONENT_REF
8584 && lang_hooks.can_use_bit_fields_p ())
8585 || TREE_CODE (arg0) == BIT_FIELD_REF)
8586 && (code == EQ_EXPR || code == NE_EXPR)
8587 /* Handle the constant case even without -O
8588 to make sure the warnings are given. */
8589 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8591 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8596 /* If this is a comparison of complex values and either or both sides
8597 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8598 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8599 This may prevent needless evaluations. */
8600 if ((code == EQ_EXPR || code == NE_EXPR)
8601 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8602 && (TREE_CODE (arg0) == COMPLEX_EXPR
8603 || TREE_CODE (arg1) == COMPLEX_EXPR
8604 || TREE_CODE (arg0) == COMPLEX_CST
8605 || TREE_CODE (arg1) == COMPLEX_CST))
8607 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8608 tree real0, imag0, real1, imag1;
8610 arg0 = save_expr (arg0);
8611 arg1 = save_expr (arg1);
8612 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8613 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8614 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8615 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8617 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8620 fold (build2 (code, type, real0, real1)),
8621 fold (build2 (code, type, imag0, imag1))));
8624 /* Optimize comparisons of strlen vs zero to a compare of the
8625 first character of the string vs zero. To wit,
8626 strlen(ptr) == 0 => *ptr == 0
8627 strlen(ptr) != 0 => *ptr != 0
8628 Other cases should reduce to one of these two (or a constant)
8629 due to the return value of strlen being unsigned. */
8630 if ((code == EQ_EXPR || code == NE_EXPR)
8631 && integer_zerop (arg1)
8632 && TREE_CODE (arg0) == CALL_EXPR)
8634 tree fndecl = get_callee_fndecl (arg0);
8638 && DECL_BUILT_IN (fndecl)
8639 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8640 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8641 && (arglist = TREE_OPERAND (arg0, 1))
8642 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8643 && ! TREE_CHAIN (arglist))
8644 return fold (build2 (code, type,
8645 build1 (INDIRECT_REF, char_type_node,
8646 TREE_VALUE(arglist)),
8647 integer_zero_node));
8650 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8651 into a single range test. */
8652 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8653 && TREE_CODE (arg1) == INTEGER_CST
8654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8655 && !integer_zerop (TREE_OPERAND (arg0, 1))
8656 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8657 && !TREE_OVERFLOW (arg1))
8659 t1 = fold_div_compare (code, type, arg0, arg1);
8660 if (t1 != NULL_TREE)
8664 if ((code == EQ_EXPR || code == NE_EXPR)
8665 && !TREE_SIDE_EFFECTS (arg0)
8666 && integer_zerop (arg1)
8667 && tree_expr_nonzero_p (arg0))
8668 return constant_boolean_node (code==NE_EXPR, type);
8670 t1 = fold_relational_const (code, type, arg0, arg1);
8671 return t1 == NULL_TREE ? t : t1;
8673 case UNORDERED_EXPR:
8681 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8683 t1 = fold_relational_const (code, type, arg0, arg1);
8684 if (t1 != NULL_TREE)
8688 /* If the first operand is NaN, the result is constant. */
8689 if (TREE_CODE (arg0) == REAL_CST
8690 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8691 && (code != LTGT_EXPR || ! flag_trapping_math))
8693 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8696 return omit_one_operand (type, t1, arg1);
8699 /* If the second operand is NaN, the result is constant. */
8700 if (TREE_CODE (arg1) == REAL_CST
8701 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8702 && (code != LTGT_EXPR || ! flag_trapping_math))
8704 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8707 return omit_one_operand (type, t1, arg0);
8710 /* Simplify unordered comparison of something with itself. */
8711 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8712 && operand_equal_p (arg0, arg1, 0))
8713 return constant_boolean_node (1, type);
8715 if (code == LTGT_EXPR
8716 && !flag_trapping_math
8717 && operand_equal_p (arg0, arg1, 0))
8718 return constant_boolean_node (0, type);
8720 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8722 tree targ0 = strip_float_extensions (arg0);
8723 tree targ1 = strip_float_extensions (arg1);
8724 tree newtype = TREE_TYPE (targ0);
8726 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8727 newtype = TREE_TYPE (targ1);
8729 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8730 return fold (build2 (code, type, fold_convert (newtype, targ0),
8731 fold_convert (newtype, targ1)));
8737 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8738 so all simple results must be passed through pedantic_non_lvalue. */
8739 if (TREE_CODE (arg0) == INTEGER_CST)
8741 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8742 /* Only optimize constant conditions when the selected branch
8743 has the same type as the COND_EXPR. This avoids optimizing
8744 away "c ? x : throw", where the throw has a void type. */
8745 if (! VOID_TYPE_P (TREE_TYPE (tem))
8746 || VOID_TYPE_P (type))
8747 return pedantic_non_lvalue (tem);
8750 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8751 return pedantic_omit_one_operand (type, arg1, arg0);
8753 /* If we have A op B ? A : C, we may be able to convert this to a
8754 simpler expression, depending on the operation and the values
8755 of B and C. Signed zeros prevent all of these transformations,
8756 for reasons given above each one.
8758 Also try swapping the arguments and inverting the conditional. */
8759 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8760 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8761 arg1, TREE_OPERAND (arg0, 1))
8762 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8764 tem = fold_cond_expr_with_comparison (type, arg0,
8765 TREE_OPERAND (t, 1),
8766 TREE_OPERAND (t, 2));
8771 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8772 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8773 TREE_OPERAND (t, 2),
8774 TREE_OPERAND (arg0, 1))
8775 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8777 tem = invert_truthvalue (arg0);
8778 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8780 tem = fold_cond_expr_with_comparison (type, tem,
8781 TREE_OPERAND (t, 2),
8782 TREE_OPERAND (t, 1));
8788 /* If the second operand is simpler than the third, swap them
8789 since that produces better jump optimization results. */
8790 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8791 TREE_OPERAND (t, 2), false))
8793 /* See if this can be inverted. If it can't, possibly because
8794 it was a floating-point inequality comparison, don't do
8796 tem = invert_truthvalue (arg0);
8798 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8799 return fold (build3 (code, type, tem,
8800 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8803 /* Convert A ? 1 : 0 to simply A. */
8804 if (integer_onep (TREE_OPERAND (t, 1))
8805 && integer_zerop (TREE_OPERAND (t, 2))
8806 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8807 call to fold will try to move the conversion inside
8808 a COND, which will recurse. In that case, the COND_EXPR
8809 is probably the best choice, so leave it alone. */
8810 && type == TREE_TYPE (arg0))
8811 return pedantic_non_lvalue (arg0);
8813 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8814 over COND_EXPR in cases such as floating point comparisons. */
8815 if (integer_zerop (TREE_OPERAND (t, 1))
8816 && integer_onep (TREE_OPERAND (t, 2))
8817 && truth_value_p (TREE_CODE (arg0)))
8818 return pedantic_non_lvalue (fold_convert (type,
8819 invert_truthvalue (arg0)));
8821 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8822 if (TREE_CODE (arg0) == LT_EXPR
8823 && integer_zerop (TREE_OPERAND (arg0, 1))
8824 && integer_zerop (TREE_OPERAND (t, 2))
8825 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8826 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8827 TREE_TYPE (tem), tem, arg1)));
8829 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8830 already handled above. */
8831 if (TREE_CODE (arg0) == BIT_AND_EXPR
8832 && integer_onep (TREE_OPERAND (arg0, 1))
8833 && integer_zerop (TREE_OPERAND (t, 2))
8834 && integer_pow2p (arg1))
8836 tree tem = TREE_OPERAND (arg0, 0);
8838 if (TREE_CODE (tem) == RSHIFT_EXPR
8839 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8840 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8841 return fold (build2 (BIT_AND_EXPR, type,
8842 TREE_OPERAND (tem, 0), arg1));
8845 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8846 is probably obsolete because the first operand should be a
8847 truth value (that's why we have the two cases above), but let's
8848 leave it in until we can confirm this for all front-ends. */
8849 if (integer_zerop (TREE_OPERAND (t, 2))
8850 && TREE_CODE (arg0) == NE_EXPR
8851 && integer_zerop (TREE_OPERAND (arg0, 1))
8852 && integer_pow2p (arg1)
8853 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8854 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8855 arg1, OEP_ONLY_CONST))
8856 return pedantic_non_lvalue (fold_convert (type,
8857 TREE_OPERAND (arg0, 0)));
8859 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8860 if (integer_zerop (TREE_OPERAND (t, 2))
8861 && truth_value_p (TREE_CODE (arg0))
8862 && truth_value_p (TREE_CODE (arg1)))
8863 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8865 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8866 if (integer_onep (TREE_OPERAND (t, 2))
8867 && truth_value_p (TREE_CODE (arg0))
8868 && truth_value_p (TREE_CODE (arg1)))
8870 /* Only perform transformation if ARG0 is easily inverted. */
8871 tem = invert_truthvalue (arg0);
8872 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8873 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8876 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8877 if (integer_zerop (arg1)
8878 && truth_value_p (TREE_CODE (arg0))
8879 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8881 /* Only perform transformation if ARG0 is easily inverted. */
8882 tem = invert_truthvalue (arg0);
8883 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8884 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8885 TREE_OPERAND (t, 2)));
8888 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8889 if (integer_onep (arg1)
8890 && truth_value_p (TREE_CODE (arg0))
8891 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8892 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8893 TREE_OPERAND (t, 2)));
8898 /* When pedantic, a compound expression can be neither an lvalue
8899 nor an integer constant expression. */
8900 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8902 /* Don't let (0, 0) be null pointer constant. */
8903 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8904 : fold_convert (type, arg1);
8905 return pedantic_non_lvalue (tem);
8909 return build_complex (type, arg0, arg1);
8913 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8915 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8916 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8917 TREE_OPERAND (arg0, 1));
8918 else if (TREE_CODE (arg0) == COMPLEX_CST)
8919 return TREE_REALPART (arg0);
8920 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8921 return fold (build2 (TREE_CODE (arg0), type,
8922 fold (build1 (REALPART_EXPR, type,
8923 TREE_OPERAND (arg0, 0))),
8924 fold (build1 (REALPART_EXPR, type,
8925 TREE_OPERAND (arg0, 1)))));
8929 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8930 return fold_convert (type, integer_zero_node);
8931 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8932 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8933 TREE_OPERAND (arg0, 0));
8934 else if (TREE_CODE (arg0) == COMPLEX_CST)
8935 return TREE_IMAGPART (arg0);
8936 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8937 return fold (build2 (TREE_CODE (arg0), type,
8938 fold (build1 (IMAGPART_EXPR, type,
8939 TREE_OPERAND (arg0, 0))),
8940 fold (build1 (IMAGPART_EXPR, type,
8941 TREE_OPERAND (arg0, 1)))));
8944 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8946 case CLEANUP_POINT_EXPR:
8947 if (! has_cleanups (arg0))
8948 return TREE_OPERAND (t, 0);
8951 enum tree_code code0 = TREE_CODE (arg0);
8952 int kind0 = TREE_CODE_CLASS (code0);
8953 tree arg00 = TREE_OPERAND (arg0, 0);
8956 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8957 return fold (build1 (code0, type,
8958 fold (build1 (CLEANUP_POINT_EXPR,
8959 TREE_TYPE (arg00), arg00))));
8961 if (kind0 == '<' || kind0 == '2'
8962 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8963 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8964 || code0 == TRUTH_XOR_EXPR)
8966 arg01 = TREE_OPERAND (arg0, 1);
8968 if (TREE_CONSTANT (arg00)
8969 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8970 && ! has_cleanups (arg00)))
8971 return fold (build2 (code0, type, arg00,
8972 fold (build1 (CLEANUP_POINT_EXPR,
8973 TREE_TYPE (arg01), arg01))));
8975 if (TREE_CONSTANT (arg01))
8976 return fold (build2 (code0, type,
8977 fold (build1 (CLEANUP_POINT_EXPR,
8978 TREE_TYPE (arg00), arg00)),
8986 /* Check for a built-in function. */
8987 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8988 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8990 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8992 tree tmp = fold_builtin (t, false);
9000 } /* switch (code) */
9003 #ifdef ENABLE_FOLD_CHECKING
9006 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
9007 static void fold_check_failed (tree, tree);
9008 void print_fold_checksum (tree);
9010 /* When --enable-checking=fold, compute a digest of expr before
9011 and after actual fold call to see if fold did not accidentally
9012 change original expr. */
9019 unsigned char checksum_before[16], checksum_after[16];
9022 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9023 md5_init_ctx (&ctx);
9024 fold_checksum_tree (expr, &ctx, ht);
9025 md5_finish_ctx (&ctx, checksum_before);
9028 ret = fold_1 (expr);
9030 md5_init_ctx (&ctx);
9031 fold_checksum_tree (expr, &ctx, ht);
9032 md5_finish_ctx (&ctx, checksum_after);
9035 if (memcmp (checksum_before, checksum_after, 16))
9036 fold_check_failed (expr, ret);
9042 print_fold_checksum (tree expr)
9045 unsigned char checksum[16], cnt;
9048 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9049 md5_init_ctx (&ctx);
9050 fold_checksum_tree (expr, &ctx, ht);
9051 md5_finish_ctx (&ctx, checksum);
9053 for (cnt = 0; cnt < 16; ++cnt)
9054 fprintf (stderr, "%02x", checksum[cnt]);
9055 putc ('\n', stderr);
9059 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9061 internal_error ("fold check: original tree changed by fold");
9065 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9068 enum tree_code code;
9069 char buf[sizeof (struct tree_decl)];
9072 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9073 > sizeof (struct tree_decl)
9074 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9078 slot = htab_find_slot (ht, expr, INSERT);
9082 code = TREE_CODE (expr);
9083 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9085 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9086 memcpy (buf, expr, tree_size (expr));
9088 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9090 else if (TREE_CODE_CLASS (code) == 't'
9091 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9093 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9094 memcpy (buf, expr, tree_size (expr));
9096 TYPE_POINTER_TO (expr) = NULL;
9097 TYPE_REFERENCE_TO (expr) = NULL;
9099 md5_process_bytes (expr, tree_size (expr), ctx);
9100 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9101 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9102 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9103 switch (TREE_CODE_CLASS (code))
9109 md5_process_bytes (TREE_STRING_POINTER (expr),
9110 TREE_STRING_LENGTH (expr), ctx);
9113 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9114 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9117 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9127 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9128 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9131 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9132 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9144 len = first_rtl_op (code);
9145 for (i = 0; i < len; ++i)
9146 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9149 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9150 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9151 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9152 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9153 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9154 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9155 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9156 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9157 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9158 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9159 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9162 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9163 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9164 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9165 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9166 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9167 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9168 if (INTEGRAL_TYPE_P (expr)
9169 || SCALAR_FLOAT_TYPE_P (expr))
9171 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9172 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9174 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9175 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9176 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9185 /* Perform constant folding and related simplification of initializer
9186 expression EXPR. This behaves identically to "fold" but ignores
9187 potential run-time traps and exceptions that fold must preserve. */
9190 fold_initializer (tree expr)
9192 int saved_signaling_nans = flag_signaling_nans;
9193 int saved_trapping_math = flag_trapping_math;
9194 int saved_trapv = flag_trapv;
9197 flag_signaling_nans = 0;
9198 flag_trapping_math = 0;
9201 result = fold (expr);
9203 flag_signaling_nans = saved_signaling_nans;
9204 flag_trapping_math = saved_trapping_math;
9205 flag_trapv = saved_trapv;
9210 /* Determine if first argument is a multiple of second argument. Return 0 if
9211 it is not, or we cannot easily determined it to be.
9213 An example of the sort of thing we care about (at this point; this routine
9214 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9215 fold cases do now) is discovering that
9217 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9223 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9225 This code also handles discovering that
9227 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9229 is a multiple of 8 so we don't have to worry about dealing with a
9232 Note that we *look* inside a SAVE_EXPR only to determine how it was
9233 calculated; it is not safe for fold to do much of anything else with the
9234 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9235 at run time. For example, the latter example above *cannot* be implemented
9236 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9237 evaluation time of the original SAVE_EXPR is not necessarily the same at
9238 the time the new expression is evaluated. The only optimization of this
9239 sort that would be valid is changing
9241 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9245 SAVE_EXPR (I) * SAVE_EXPR (J)
9247 (where the same SAVE_EXPR (J) is used in the original and the
9248 transformed version). */
9251 multiple_of_p (tree type, tree top, tree bottom)
9253 if (operand_equal_p (top, bottom, 0))
9256 if (TREE_CODE (type) != INTEGER_TYPE)
9259 switch (TREE_CODE (top))
9262 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9263 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9267 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9268 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9271 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9275 op1 = TREE_OPERAND (top, 1);
9276 /* const_binop may not detect overflow correctly,
9277 so check for it explicitly here. */
9278 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9279 > TREE_INT_CST_LOW (op1)
9280 && TREE_INT_CST_HIGH (op1) == 0
9281 && 0 != (t1 = fold_convert (type,
9282 const_binop (LSHIFT_EXPR,
9285 && ! TREE_OVERFLOW (t1))
9286 return multiple_of_p (type, t1, bottom);
9291 /* Can't handle conversions from non-integral or wider integral type. */
9292 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9293 || (TYPE_PRECISION (type)
9294 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9297 /* .. fall through ... */
9300 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9303 if (TREE_CODE (bottom) != INTEGER_CST
9304 || (TYPE_UNSIGNED (type)
9305 && (tree_int_cst_sgn (top) < 0
9306 || tree_int_cst_sgn (bottom) < 0)))
9308 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9316 /* Return true if `t' is known to be non-negative. */
9319 tree_expr_nonnegative_p (tree t)
9321 switch (TREE_CODE (t))
9327 return tree_int_cst_sgn (t) >= 0;
9330 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9333 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9334 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9335 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9337 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9338 both unsigned and at least 2 bits shorter than the result. */
9339 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9340 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9341 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9343 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9344 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9345 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9346 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9348 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9349 TYPE_PRECISION (inner2)) + 1;
9350 return prec < TYPE_PRECISION (TREE_TYPE (t));
9356 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9358 /* x * x for floating point x is always non-negative. */
9359 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9361 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9362 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9365 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9366 both unsigned and their total bits is shorter than the result. */
9367 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9368 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9369 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9371 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9372 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9373 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9374 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9375 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9376 < TYPE_PRECISION (TREE_TYPE (t));
9380 case TRUNC_DIV_EXPR:
9382 case FLOOR_DIV_EXPR:
9383 case ROUND_DIV_EXPR:
9384 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9385 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9387 case TRUNC_MOD_EXPR:
9389 case FLOOR_MOD_EXPR:
9390 case ROUND_MOD_EXPR:
9391 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9394 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9395 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9398 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9399 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9402 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9403 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9407 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9408 tree outer_type = TREE_TYPE (t);
9410 if (TREE_CODE (outer_type) == REAL_TYPE)
9412 if (TREE_CODE (inner_type) == REAL_TYPE)
9413 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9414 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9416 if (TYPE_UNSIGNED (inner_type))
9418 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9421 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9423 if (TREE_CODE (inner_type) == REAL_TYPE)
9424 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9425 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9426 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9427 && TYPE_UNSIGNED (inner_type);
9433 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9434 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9436 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9438 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9439 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9441 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9442 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9444 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9446 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9448 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9449 case NON_LVALUE_EXPR:
9450 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9452 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9456 tree temp = TARGET_EXPR_SLOT (t);
9457 t = TARGET_EXPR_INITIAL (t);
9459 /* If the initializer is non-void, then it's a normal expression
9460 that will be assigned to the slot. */
9461 if (!VOID_TYPE_P (t))
9462 return tree_expr_nonnegative_p (t);
9464 /* Otherwise, the initializer sets the slot in some way. One common
9465 way is an assignment statement at the end of the initializer. */
9468 if (TREE_CODE (t) == BIND_EXPR)
9469 t = expr_last (BIND_EXPR_BODY (t));
9470 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9471 || TREE_CODE (t) == TRY_CATCH_EXPR)
9472 t = expr_last (TREE_OPERAND (t, 0));
9473 else if (TREE_CODE (t) == STATEMENT_LIST)
9478 if (TREE_CODE (t) == MODIFY_EXPR
9479 && TREE_OPERAND (t, 0) == temp)
9480 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9487 tree fndecl = get_callee_fndecl (t);
9488 tree arglist = TREE_OPERAND (t, 1);
9490 && DECL_BUILT_IN (fndecl)
9491 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9492 switch (DECL_FUNCTION_CODE (fndecl))
9494 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9495 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9496 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9497 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9499 CASE_BUILTIN_F (BUILT_IN_ACOS)
9500 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9501 CASE_BUILTIN_F (BUILT_IN_CABS)
9502 CASE_BUILTIN_F (BUILT_IN_COSH)
9503 CASE_BUILTIN_F (BUILT_IN_ERFC)
9504 CASE_BUILTIN_F (BUILT_IN_EXP)
9505 CASE_BUILTIN_F (BUILT_IN_EXP10)
9506 CASE_BUILTIN_F (BUILT_IN_EXP2)
9507 CASE_BUILTIN_F (BUILT_IN_FABS)
9508 CASE_BUILTIN_F (BUILT_IN_FDIM)
9509 CASE_BUILTIN_F (BUILT_IN_FREXP)
9510 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9511 CASE_BUILTIN_F (BUILT_IN_POW10)
9512 CASE_BUILTIN_I (BUILT_IN_FFS)
9513 CASE_BUILTIN_I (BUILT_IN_PARITY)
9514 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9518 CASE_BUILTIN_F (BUILT_IN_SQRT)
9519 /* sqrt(-0.0) is -0.0. */
9520 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9522 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9524 CASE_BUILTIN_F (BUILT_IN_ASINH)
9525 CASE_BUILTIN_F (BUILT_IN_ATAN)
9526 CASE_BUILTIN_F (BUILT_IN_ATANH)
9527 CASE_BUILTIN_F (BUILT_IN_CBRT)
9528 CASE_BUILTIN_F (BUILT_IN_CEIL)
9529 CASE_BUILTIN_F (BUILT_IN_ERF)
9530 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9531 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9532 CASE_BUILTIN_F (BUILT_IN_FMOD)
9533 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9534 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9535 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9536 CASE_BUILTIN_F (BUILT_IN_LRINT)
9537 CASE_BUILTIN_F (BUILT_IN_LROUND)
9538 CASE_BUILTIN_F (BUILT_IN_MODF)
9539 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9540 CASE_BUILTIN_F (BUILT_IN_POW)
9541 CASE_BUILTIN_F (BUILT_IN_RINT)
9542 CASE_BUILTIN_F (BUILT_IN_ROUND)
9543 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9544 CASE_BUILTIN_F (BUILT_IN_SINH)
9545 CASE_BUILTIN_F (BUILT_IN_TANH)
9546 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9547 /* True if the 1st argument is nonnegative. */
9548 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9550 CASE_BUILTIN_F (BUILT_IN_FMAX)
9551 /* True if the 1st OR 2nd arguments are nonnegative. */
9552 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9553 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9555 CASE_BUILTIN_F (BUILT_IN_FMIN)
9556 /* True if the 1st AND 2nd arguments are nonnegative. */
9557 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9558 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9560 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9561 /* True if the 2nd argument is nonnegative. */
9562 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9566 #undef CASE_BUILTIN_F
9567 #undef CASE_BUILTIN_I
9571 /* ... fall through ... */
9574 if (truth_value_p (TREE_CODE (t)))
9575 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9579 /* We don't know sign of `t', so be conservative and return false. */
9583 /* Return true when T is an address and is known to be nonzero.
9584 For floating point we further ensure that T is not denormal.
9585 Similar logic is present in nonzero_address in rtlanal.h */
9588 tree_expr_nonzero_p (tree t)
9590 tree type = TREE_TYPE (t);
9592 /* Doing something useful for floating point would need more work. */
9593 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9596 switch (TREE_CODE (t))
9599 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9600 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9603 return !integer_zerop (t);
9606 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9608 /* With the presence of negative values it is hard
9609 to say something. */
9610 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9611 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9613 /* One of operands must be positive and the other non-negative. */
9614 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9615 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9620 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9622 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9623 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9629 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9630 tree outer_type = TREE_TYPE (t);
9632 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9633 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9638 /* Weak declarations may link to NULL. */
9639 if (DECL_P (TREE_OPERAND (t, 0)))
9640 return !DECL_WEAK (TREE_OPERAND (t, 0));
9641 /* Constants and all other cases are never weak. */
9645 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9646 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9649 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9650 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9653 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9655 /* When both operands are nonzero, then MAX must be too. */
9656 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9659 /* MAX where operand 0 is positive is positive. */
9660 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9662 /* MAX where operand 1 is positive is positive. */
9663 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9664 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9671 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9674 case NON_LVALUE_EXPR:
9675 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9678 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9679 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9687 /* Return true if `r' is known to be non-negative.
9688 Only handles constants at the moment. */
9691 rtl_expr_nonnegative_p (rtx r)
9693 switch (GET_CODE (r))
9696 return INTVAL (r) >= 0;
9699 if (GET_MODE (r) == VOIDmode)
9700 return CONST_DOUBLE_HIGH (r) >= 0;
9708 units = CONST_VECTOR_NUNITS (r);
9710 for (i = 0; i < units; ++i)
9712 elt = CONST_VECTOR_ELT (r, i);
9713 if (!rtl_expr_nonnegative_p (elt))
9722 /* These are always nonnegative. */
9731 /* See if we are applying CODE, a relational to the highest or lowest
9732 possible integer of TYPE. If so, then the result is a compile
9736 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9741 enum tree_code code = *code_p;
9742 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9744 if (TREE_CODE (op1) == INTEGER_CST
9745 && ! TREE_CONSTANT_OVERFLOW (op1)
9746 && width <= HOST_BITS_PER_WIDE_INT
9747 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9748 || POINTER_TYPE_P (TREE_TYPE (op1))))
9750 unsigned HOST_WIDE_INT signed_max;
9751 unsigned HOST_WIDE_INT max, min;
9753 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9755 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9757 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9763 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9766 if (TREE_INT_CST_HIGH (op1) == 0
9767 && TREE_INT_CST_LOW (op1) == max)
9771 return omit_one_operand (type, integer_zero_node, op0);
9777 return omit_one_operand (type, integer_one_node, op0);
9783 /* The GE_EXPR and LT_EXPR cases above are not normally
9784 reached because of previous transformations. */
9789 else if (TREE_INT_CST_HIGH (op1) == 0
9790 && TREE_INT_CST_LOW (op1) == max - 1)
9795 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9799 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9804 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9805 && TREE_INT_CST_LOW (op1) == min)
9809 return omit_one_operand (type, integer_zero_node, op0);
9816 return omit_one_operand (type, integer_one_node, op0);
9825 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9826 && TREE_INT_CST_LOW (op1) == min + 1)
9831 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9835 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9841 else if (TREE_INT_CST_HIGH (op1) == 0
9842 && TREE_INT_CST_LOW (op1) == signed_max
9843 && TYPE_UNSIGNED (TREE_TYPE (op1))
9844 /* signed_type does not work on pointer types. */
9845 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9847 /* The following case also applies to X < signed_max+1
9848 and X >= signed_max+1 because previous transformations. */
9849 if (code == LE_EXPR || code == GT_EXPR)
9851 tree st0, st1, exp, retval;
9852 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9853 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9855 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9857 fold_convert (st0, op0),
9858 fold_convert (st1, integer_zero_node));
9861 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9863 TREE_OPERAND (exp, 0),
9864 TREE_OPERAND (exp, 1));
9866 /* If we are in gimple form, then returning EXP would create
9867 non-gimple expressions. Clearing it is safe and insures
9868 we do not allow a non-gimple expression to escape. */
9872 return (retval ? retval : exp);
9881 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9882 attempt to fold the expression to a constant without modifying TYPE,
9885 If the expression could be simplified to a constant, then return
9886 the constant. If the expression would not be simplified to a
9887 constant, then return NULL_TREE.
9889 Note this is primarily designed to be called after gimplification
9890 of the tree structures and when at least one operand is a constant.
9891 As a result of those simplifying assumptions this routine is far
9892 simpler than the generic fold routine. */
9895 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9903 /* If this is a commutative operation, and ARG0 is a constant, move it
9904 to ARG1 to reduce the number of tests below. */
9905 if (commutative_tree_code (code)
9906 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9913 /* If either operand is a complex type, extract its real component. */
9914 if (TREE_CODE (op0) == COMPLEX_CST)
9915 subop0 = TREE_REALPART (op0);
9919 if (TREE_CODE (op1) == COMPLEX_CST)
9920 subop1 = TREE_REALPART (op1);
9924 /* Note if either argument is not a real or integer constant.
9925 With a few exceptions, simplification is limited to cases
9926 where both arguments are constants. */
9927 if ((TREE_CODE (subop0) != INTEGER_CST
9928 && TREE_CODE (subop0) != REAL_CST)
9929 || (TREE_CODE (subop1) != INTEGER_CST
9930 && TREE_CODE (subop1) != REAL_CST))
9936 /* (plus (address) (const_int)) is a constant. */
9937 if (TREE_CODE (op0) == PLUS_EXPR
9938 && TREE_CODE (op1) == INTEGER_CST
9939 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9940 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9941 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9943 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9945 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9946 const_binop (PLUS_EXPR, op1,
9947 TREE_OPERAND (op0, 1), 0));
9955 /* Both arguments are constants. Simplify. */
9956 tem = const_binop (code, op0, op1, 0);
9957 if (tem != NULL_TREE)
9959 /* The return value should always have the same type as
9960 the original expression. */
9961 if (TREE_TYPE (tem) != type)
9962 tem = fold_convert (type, tem);
9969 /* Fold &x - &x. This can happen from &x.foo - &x.
9970 This is unsafe for certain floats even in non-IEEE formats.
9971 In IEEE, it is unsafe because it does wrong for NaNs.
9972 Also note that operand_equal_p is always false if an
9973 operand is volatile. */
9974 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9975 return fold_convert (type, integer_zero_node);
9981 /* Special case multiplication or bitwise AND where one argument
9983 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9984 return omit_one_operand (type, op1, op0);
9986 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9987 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9988 && real_zerop (op1))
9989 return omit_one_operand (type, op1, op0);
9994 /* Special case when we know the result will be all ones. */
9995 if (integer_all_onesp (op1))
9996 return omit_one_operand (type, op1, op0);
10000 case TRUNC_DIV_EXPR:
10001 case ROUND_DIV_EXPR:
10002 case FLOOR_DIV_EXPR:
10003 case CEIL_DIV_EXPR:
10004 case EXACT_DIV_EXPR:
10005 case TRUNC_MOD_EXPR:
10006 case ROUND_MOD_EXPR:
10007 case FLOOR_MOD_EXPR:
10008 case CEIL_MOD_EXPR:
10010 /* Division by zero is undefined. */
10011 if (integer_zerop (op1))
10014 if (TREE_CODE (op1) == REAL_CST
10015 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
10016 && real_zerop (op1))
10022 if (INTEGRAL_TYPE_P (type)
10023 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10024 return omit_one_operand (type, op1, op0);
10029 if (INTEGRAL_TYPE_P (type)
10030 && TYPE_MAX_VALUE (type)
10031 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10032 return omit_one_operand (type, op1, op0);
10037 /* Optimize -1 >> x for arithmetic right shifts. */
10038 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10039 return omit_one_operand (type, op0, op1);
10040 /* ... fall through ... */
10043 if (integer_zerop (op0))
10044 return omit_one_operand (type, op0, op1);
10046 /* Since negative shift count is not well-defined, don't
10047 try to compute it in the compiler. */
10048 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10055 /* -1 rotated either direction by any amount is still -1. */
10056 if (integer_all_onesp (op0))
10057 return omit_one_operand (type, op0, op1);
10059 /* 0 rotated either direction by any amount is still zero. */
10060 if (integer_zerop (op0))
10061 return omit_one_operand (type, op0, op1);
10067 return build_complex (type, op0, op1);
10076 /* If one arg is a real or integer constant, put it last. */
10077 if ((TREE_CODE (op0) == INTEGER_CST
10078 && TREE_CODE (op1) != INTEGER_CST)
10079 || (TREE_CODE (op0) == REAL_CST
10080 && TREE_CODE (op0) != REAL_CST))
10087 code = swap_tree_comparison (code);
10090 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10091 This transformation affects the cases which are handled in later
10092 optimizations involving comparisons with non-negative constants. */
10093 if (TREE_CODE (op1) == INTEGER_CST
10094 && TREE_CODE (op0) != INTEGER_CST
10095 && tree_int_cst_sgn (op1) > 0)
10101 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10106 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10114 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10118 /* Fall through. */
10121 case UNORDERED_EXPR:
10131 return fold_relational_const (code, type, op0, op1);
10134 /* This could probably be handled. */
10137 case TRUTH_AND_EXPR:
10138 /* If second arg is constant zero, result is zero, but first arg
10139 must be evaluated. */
10140 if (integer_zerop (op1))
10141 return omit_one_operand (type, op1, op0);
10142 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10143 case will be handled here. */
10144 if (integer_zerop (op0))
10145 return omit_one_operand (type, op0, op1);
10146 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10147 return constant_boolean_node (true, type);
10150 case TRUTH_OR_EXPR:
10151 /* If second arg is constant true, result is true, but we must
10152 evaluate first arg. */
10153 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10154 return omit_one_operand (type, op1, op0);
10155 /* Likewise for first arg, but note this only occurs here for
10157 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10158 return omit_one_operand (type, op0, op1);
10159 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10160 return constant_boolean_node (false, type);
10163 case TRUTH_XOR_EXPR:
10164 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10166 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10167 return constant_boolean_node (x, type);
10176 /* Given the components of a unary expression CODE, TYPE and OP0,
10177 attempt to fold the expression to a constant without modifying
10180 If the expression could be simplified to a constant, then return
10181 the constant. If the expression would not be simplified to a
10182 constant, then return NULL_TREE.
10184 Note this is primarily designed to be called after gimplification
10185 of the tree structures and when op0 is a constant. As a result
10186 of those simplifying assumptions this routine is far simpler than
10187 the generic fold routine. */
10190 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10193 /* Make sure we have a suitable constant argument. */
10194 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10198 if (TREE_CODE (op0) == COMPLEX_CST)
10199 subop = TREE_REALPART (op0);
10203 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10212 case FIX_TRUNC_EXPR:
10213 case FIX_FLOOR_EXPR:
10214 case FIX_CEIL_EXPR:
10215 return fold_convert_const (code, type, op0);
10218 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10219 return fold_negate_const (op0, type);
10224 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10225 return fold_abs_const (op0, type);
10230 if (TREE_CODE (op0) == INTEGER_CST)
10231 return fold_not_const (op0, type);
10235 case REALPART_EXPR:
10236 if (TREE_CODE (op0) == COMPLEX_CST)
10237 return TREE_REALPART (op0);
10241 case IMAGPART_EXPR:
10242 if (TREE_CODE (op0) == COMPLEX_CST)
10243 return TREE_IMAGPART (op0);
10248 if (TREE_CODE (op0) == COMPLEX_CST
10249 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10250 return build_complex (type, TREE_REALPART (op0),
10251 negate_expr (TREE_IMAGPART (op0)));
10259 /* If EXP represents referencing an element in a constant string
10260 (either via pointer arithmetic or array indexing), return the
10261 tree representing the value accessed, otherwise return NULL. */
10264 fold_read_from_constant_string (tree exp)
10266 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10268 tree exp1 = TREE_OPERAND (exp, 0);
10272 if (TREE_CODE (exp) == INDIRECT_REF)
10273 string = string_constant (exp1, &index);
10276 tree low_bound = array_ref_low_bound (exp);
10277 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10279 /* Optimize the special-case of a zero lower bound.
10281 We convert the low_bound to sizetype to avoid some problems
10282 with constant folding. (E.g. suppose the lower bound is 1,
10283 and its mode is QI. Without the conversion,l (ARRAY
10284 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10285 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10286 if (! integer_zerop (low_bound))
10287 index = size_diffop (index, fold_convert (sizetype, low_bound));
10293 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10294 && TREE_CODE (string) == STRING_CST
10295 && TREE_CODE (index) == INTEGER_CST
10296 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10297 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10299 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10300 return fold_convert (TREE_TYPE (exp),
10301 build_int_2 ((TREE_STRING_POINTER (string)
10302 [TREE_INT_CST_LOW (index)]), 0));
10307 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10308 an integer constant or real constant.
10310 TYPE is the type of the result. */
10313 fold_negate_const (tree arg0, tree type)
10315 tree t = NULL_TREE;
10317 if (TREE_CODE (arg0) == INTEGER_CST)
10319 unsigned HOST_WIDE_INT low;
10320 HOST_WIDE_INT high;
10321 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10322 TREE_INT_CST_HIGH (arg0),
10324 t = build_int_2 (low, high);
10325 TREE_TYPE (t) = type;
10327 = (TREE_OVERFLOW (arg0)
10328 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10329 TREE_CONSTANT_OVERFLOW (t)
10330 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10332 else if (TREE_CODE (arg0) == REAL_CST)
10333 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10334 #ifdef ENABLE_CHECKING
10342 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10343 an integer constant or real constant.
10345 TYPE is the type of the result. */
10348 fold_abs_const (tree arg0, tree type)
10350 tree t = NULL_TREE;
10352 if (TREE_CODE (arg0) == INTEGER_CST)
10354 /* If the value is unsigned, then the absolute value is
10355 the same as the ordinary value. */
10356 if (TYPE_UNSIGNED (type))
10358 /* Similarly, if the value is non-negative. */
10359 else if (INT_CST_LT (integer_minus_one_node, arg0))
10361 /* If the value is negative, then the absolute value is
10365 unsigned HOST_WIDE_INT low;
10366 HOST_WIDE_INT high;
10367 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10368 TREE_INT_CST_HIGH (arg0),
10370 t = build_int_2 (low, high);
10371 TREE_TYPE (t) = type;
10373 = (TREE_OVERFLOW (arg0)
10374 | force_fit_type (t, overflow));
10375 TREE_CONSTANT_OVERFLOW (t)
10376 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10380 else if (TREE_CODE (arg0) == REAL_CST)
10382 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10383 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10387 #ifdef ENABLE_CHECKING
10395 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10396 constant. TYPE is the type of the result. */
10399 fold_not_const (tree arg0, tree type)
10401 tree t = NULL_TREE;
10403 if (TREE_CODE (arg0) == INTEGER_CST)
10405 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10406 ~ TREE_INT_CST_HIGH (arg0));
10407 TREE_TYPE (t) = type;
10408 force_fit_type (t, 0);
10409 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10410 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10412 #ifdef ENABLE_CHECKING
10420 /* Given CODE, a relational operator, the target type, TYPE and two
10421 constant operands OP0 and OP1, return the result of the
10422 relational operation. If the result is not a compile time
10423 constant, then return NULL_TREE. */
10426 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10428 int result, invert;
10430 /* From here on, the only cases we handle are when the result is
10431 known to be a constant. */
10433 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10435 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10436 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10438 /* Handle the cases where either operand is a NaN. */
10439 if (real_isnan (c0) || real_isnan (c1))
10449 case UNORDERED_EXPR:
10463 if (flag_trapping_math)
10472 return constant_boolean_node (result, type);
10475 return constant_boolean_node (real_compare (code, c0, c1), type);
10478 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10480 To compute GT, swap the arguments and do LT.
10481 To compute GE, do LT and invert the result.
10482 To compute LE, swap the arguments, do LT and invert the result.
10483 To compute NE, do EQ and invert the result.
10485 Therefore, the code below must handle only EQ and LT. */
10487 if (code == LE_EXPR || code == GT_EXPR)
10492 code = swap_tree_comparison (code);
10495 /* Note that it is safe to invert for real values here because we
10496 have already handled the one case that it matters. */
10499 if (code == NE_EXPR || code == GE_EXPR)
10502 code = invert_tree_comparison (code, false);
10505 /* Compute a result for LT or EQ if args permit;
10506 Otherwise return T. */
10507 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10509 if (code == EQ_EXPR)
10510 result = tree_int_cst_equal (op0, op1);
10511 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10512 result = INT_CST_LT_UNSIGNED (op0, op1);
10514 result = INT_CST_LT (op0, op1);
10521 return constant_boolean_node (result, type);
10524 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10525 avoid confusing the gimplify process. */
10528 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10530 if (TREE_CODE (t) == INDIRECT_REF)
10532 t = TREE_OPERAND (t, 0);
10533 if (TREE_TYPE (t) != ptrtype)
10534 t = build1 (NOP_EXPR, ptrtype, t);
10540 while (handled_component_p (base)
10541 || TREE_CODE (base) == REALPART_EXPR
10542 || TREE_CODE (base) == IMAGPART_EXPR)
10543 base = TREE_OPERAND (base, 0);
10545 TREE_ADDRESSABLE (base) = 1;
10547 t = build1 (ADDR_EXPR, ptrtype, t);
10554 build_fold_addr_expr (tree t)
10556 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10559 /* Builds an expression for an indirection through T, simplifying some
10563 build_fold_indirect_ref (tree t)
10565 tree type = TREE_TYPE (TREE_TYPE (t));
10570 if (TREE_CODE (sub) == ADDR_EXPR)
10572 tree op = TREE_OPERAND (sub, 0);
10573 tree optype = TREE_TYPE (op);
10575 if (lang_hooks.types_compatible_p (type, optype))
10577 /* *(foo *)&fooarray => fooarray[0] */
10578 else if (TREE_CODE (optype) == ARRAY_TYPE
10579 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10580 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10583 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10584 subtype = TREE_TYPE (sub);
10585 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10586 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10588 sub = build_fold_indirect_ref (sub);
10589 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10592 return build1 (INDIRECT_REF, type, t);
10595 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10596 whose result is ignored. The type of the returned tree need not be
10597 the same as the original expression. */
10600 fold_ignored_result (tree t)
10602 if (!TREE_SIDE_EFFECTS (t))
10603 return integer_zero_node;
10606 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10609 t = TREE_OPERAND (t, 0);
10614 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10615 t = TREE_OPERAND (t, 0);
10616 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10617 t = TREE_OPERAND (t, 1);
10623 switch (TREE_CODE (t))
10625 case COMPOUND_EXPR:
10626 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10628 t = TREE_OPERAND (t, 0);
10632 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10633 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10635 t = TREE_OPERAND (t, 0);
10648 #include "gt-fold-const.h"