1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 /* The following constants represent a bit based encoding of GCC's
62 comparison operators. This encoding simplifies transformations
63 on relational comparison operators, such as AND and OR. */
64 enum comparison_code {
83 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
84 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
85 static bool negate_mathfn_p (enum built_in_function);
86 static bool negate_expr_p (tree);
87 static tree negate_expr (tree);
88 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
89 static tree associate_trees (tree, tree, enum tree_code, tree);
90 static tree const_binop (enum tree_code, tree, tree, int);
91 static hashval_t size_htab_hash (const void *);
92 static int size_htab_eq (const void *, const void *);
93 static tree fold_convert_const (enum tree_code, tree, tree);
94 static enum tree_code invert_tree_comparison (enum tree_code, bool);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static tree combine_comparisons (enum tree_code, enum tree_code,
98 enum tree_code, tree, tree, tree);
99 static int truth_value_p (enum tree_code);
100 static int operand_equal_for_comparison_p (tree, tree, tree);
101 static int twoval_comparison_p (tree, tree *, tree *, int *);
102 static tree eval_subst (tree, tree, tree, tree, tree);
103 static tree pedantic_omit_one_operand (tree, tree, tree);
104 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
105 static tree make_bit_field_ref (tree, tree, int, int, int);
106 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
107 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
110 static int all_ones_mask_p (tree, int);
111 static tree sign_bit_p (tree, tree);
112 static int simple_operand_p (tree);
113 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
114 static tree make_range (tree, int *, tree *, tree *);
115 static tree build_range_check (tree, tree, int, tree, tree);
116 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
118 static tree fold_range_test (tree);
119 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree fold_truthop (enum tree_code, tree, tree, tree);
122 static tree optimize_minmax_comparison (tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
125 static int multiple_of_p (tree, tree, tree);
126 static tree constant_boolean_node (int, tree);
127 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
129 static bool fold_real_zero_addition_p (tree, tree, int);
130 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
132 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (tree, tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_relational_hi_lo (enum tree_code *, const tree,
140 static bool tree_expr_nonzero_p (tree);
142 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
143 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
144 and SUM1. Then this yields nonzero if overflow occurred during the
147 Overflow occurs if A and B have the same sign, but A and SUM differ in
148 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
150 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
152 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
153 We do that by representing the two-word integer in 4 words, with only
154 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
155 number. The value of the word is LOWPART + HIGHPART * BASE. */
158 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
159 #define HIGHPART(x) \
160 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
161 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
163 /* Unpack a two-word integer into 4 words.
164 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
165 WORDS points to the array of HOST_WIDE_INTs. */
168 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
170 words[0] = LOWPART (low);
171 words[1] = HIGHPART (low);
172 words[2] = LOWPART (hi);
173 words[3] = HIGHPART (hi);
176 /* Pack an array of 4 words into a two-word integer.
177 WORDS points to the array of words.
178 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
181 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
184 *low = words[0] + words[1] * BASE;
185 *hi = words[2] + words[3] * BASE;
188 /* Make the integer constant T valid for its type by setting to 0 or 1 all
189 the bits in the constant that don't belong in the type.
191 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
192 nonzero, a signed overflow has already occurred in calculating T, so
196 force_fit_type (tree t, int overflow)
198 unsigned HOST_WIDE_INT low;
202 if (TREE_CODE (t) == REAL_CST)
204 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
205 Consider doing it via real_convert now. */
209 else if (TREE_CODE (t) != INTEGER_CST)
212 low = TREE_INT_CST_LOW (t);
213 high = TREE_INT_CST_HIGH (t);
215 if (POINTER_TYPE_P (TREE_TYPE (t))
216 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
219 prec = TYPE_PRECISION (TREE_TYPE (t));
221 /* First clear all bits that are beyond the type's precision. */
223 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 TREE_INT_CST_HIGH (t)
227 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
230 TREE_INT_CST_HIGH (t) = 0;
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
235 /* Unsigned types do not suffer sign extension or overflow unless they
237 if (TYPE_UNSIGNED (TREE_TYPE (t))
238 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
239 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
242 /* If the value's sign bit is set, extend the sign. */
243 if (prec != 2 * HOST_BITS_PER_WIDE_INT
244 && (prec > HOST_BITS_PER_WIDE_INT
245 ? 0 != (TREE_INT_CST_HIGH (t)
247 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
248 : 0 != (TREE_INT_CST_LOW (t)
249 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
251 /* Value is negative:
252 set to 1 all the bits that are outside this type's precision. */
253 if (prec > HOST_BITS_PER_WIDE_INT)
254 TREE_INT_CST_HIGH (t)
255 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
258 TREE_INT_CST_HIGH (t) = -1;
259 if (prec < HOST_BITS_PER_WIDE_INT)
260 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
264 /* Return nonzero if signed overflow occurred. */
266 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
270 /* Add two doubleword integers with doubleword result.
271 Each argument is given as two `HOST_WIDE_INT' pieces.
272 One argument is L1 and H1; the other, L2 and H2.
273 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
276 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
277 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
278 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
280 unsigned HOST_WIDE_INT l;
284 h = h1 + h2 + (l < l1);
288 return OVERFLOW_SUM_SIGN (h1, h2, h);
291 /* Negate a doubleword integer with doubleword result.
292 Return nonzero if the operation overflows, assuming it's signed.
293 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
294 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
297 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
298 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
304 return (*hv & h1) < 0;
314 /* Multiply two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows, assuming it's signed.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
325 HOST_WIDE_INT arg1[4];
326 HOST_WIDE_INT arg2[4];
327 HOST_WIDE_INT prod[4 * 2];
328 unsigned HOST_WIDE_INT carry;
330 unsigned HOST_WIDE_INT toplow, neglow;
331 HOST_WIDE_INT tophigh, neghigh;
333 encode (arg1, l1, h1);
334 encode (arg2, l2, h2);
336 memset (prod, 0, sizeof prod);
338 for (i = 0; i < 4; i++)
341 for (j = 0; j < 4; j++)
344 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
345 carry += arg1[i] * arg2[j];
346 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
348 prod[k] = LOWPART (carry);
349 carry = HIGHPART (carry);
354 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
356 /* Check for overflow by calculating the top half of the answer in full;
357 it should agree with the low half's sign bit. */
358 decode (prod + 4, &toplow, &tophigh);
361 neg_double (l2, h2, &neglow, &neghigh);
362 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
366 neg_double (l1, h1, &neglow, &neghigh);
367 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
369 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
372 /* Shift the doubleword integer in L1, H1 left by COUNT places
373 keeping only PREC bits of result.
374 Shift right if COUNT is negative.
375 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
376 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
379 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
380 HOST_WIDE_INT count, unsigned int prec,
381 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
383 unsigned HOST_WIDE_INT signmask;
387 rshift_double (l1, h1, -count, prec, lv, hv, arith);
391 if (SHIFT_COUNT_TRUNCATED)
394 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
396 /* Shifting by the host word size is undefined according to the
397 ANSI standard, so we must handle this as a special case. */
401 else if (count >= HOST_BITS_PER_WIDE_INT)
403 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
408 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
409 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
413 /* Sign extend all bits that are beyond the precision. */
415 signmask = -((prec > HOST_BITS_PER_WIDE_INT
416 ? ((unsigned HOST_WIDE_INT) *hv
417 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
418 : (*lv >> (prec - 1))) & 1);
420 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
422 else if (prec >= HOST_BITS_PER_WIDE_INT)
424 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
425 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
430 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
431 *lv |= signmask << prec;
435 /* Shift the doubleword integer in L1, H1 right by COUNT places
436 keeping only PREC bits of result. COUNT must be positive.
437 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
438 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
441 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
442 HOST_WIDE_INT count, unsigned int prec,
443 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
446 unsigned HOST_WIDE_INT signmask;
449 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
452 if (SHIFT_COUNT_TRUNCATED)
455 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
457 /* Shifting by the host word size is undefined according to the
458 ANSI standard, so we must handle this as a special case. */
462 else if (count >= HOST_BITS_PER_WIDE_INT)
465 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
469 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
471 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
474 /* Zero / sign extend all bits that are beyond the precision. */
476 if (count >= (HOST_WIDE_INT)prec)
481 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
483 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
485 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
486 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
491 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
492 *lv |= signmask << (prec - count);
496 /* Rotate the doubleword integer in L1, H1 left by COUNT places
497 keeping only PREC bits of result.
498 Rotate right if COUNT is negative.
499 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
502 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
503 HOST_WIDE_INT count, unsigned int prec,
504 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
506 unsigned HOST_WIDE_INT s1l, s2l;
507 HOST_WIDE_INT s1h, s2h;
513 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
514 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
519 /* Rotate the doubleword integer in L1, H1 left by COUNT places
520 keeping only PREC bits of result. COUNT must be positive.
521 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
524 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
525 HOST_WIDE_INT count, unsigned int prec,
526 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
528 unsigned HOST_WIDE_INT s1l, s2l;
529 HOST_WIDE_INT s1h, s2h;
535 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
536 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
541 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
542 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
543 CODE is a tree code for a kind of division, one of
544 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
546 It controls how the quotient is rounded to an integer.
547 Return nonzero if the operation overflows.
548 UNS nonzero says do unsigned division. */
551 div_and_round_double (enum tree_code code, int uns,
552 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
553 HOST_WIDE_INT hnum_orig,
554 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
555 HOST_WIDE_INT hden_orig,
556 unsigned HOST_WIDE_INT *lquo,
557 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
561 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
562 HOST_WIDE_INT den[4], quo[4];
564 unsigned HOST_WIDE_INT work;
565 unsigned HOST_WIDE_INT carry = 0;
566 unsigned HOST_WIDE_INT lnum = lnum_orig;
567 HOST_WIDE_INT hnum = hnum_orig;
568 unsigned HOST_WIDE_INT lden = lden_orig;
569 HOST_WIDE_INT hden = hden_orig;
572 if (hden == 0 && lden == 0)
573 overflow = 1, lden = 1;
575 /* Calculate quotient sign and convert operands to unsigned. */
581 /* (minimum integer) / (-1) is the only overflow case. */
582 if (neg_double (lnum, hnum, &lnum, &hnum)
583 && ((HOST_WIDE_INT) lden & hden) == -1)
589 neg_double (lden, hden, &lden, &hden);
593 if (hnum == 0 && hden == 0)
594 { /* single precision */
596 /* This unsigned division rounds toward zero. */
602 { /* trivial case: dividend < divisor */
603 /* hden != 0 already checked. */
610 memset (quo, 0, sizeof quo);
612 memset (num, 0, sizeof num); /* to zero 9th element */
613 memset (den, 0, sizeof den);
615 encode (num, lnum, hnum);
616 encode (den, lden, hden);
618 /* Special code for when the divisor < BASE. */
619 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
621 /* hnum != 0 already checked. */
622 for (i = 4 - 1; i >= 0; i--)
624 work = num[i] + carry * BASE;
625 quo[i] = work / lden;
631 /* Full double precision division,
632 with thanks to Don Knuth's "Seminumerical Algorithms". */
633 int num_hi_sig, den_hi_sig;
634 unsigned HOST_WIDE_INT quo_est, scale;
636 /* Find the highest nonzero divisor digit. */
637 for (i = 4 - 1;; i--)
644 /* Insure that the first digit of the divisor is at least BASE/2.
645 This is required by the quotient digit estimation algorithm. */
647 scale = BASE / (den[den_hi_sig] + 1);
649 { /* scale divisor and dividend */
651 for (i = 0; i <= 4 - 1; i++)
653 work = (num[i] * scale) + carry;
654 num[i] = LOWPART (work);
655 carry = HIGHPART (work);
660 for (i = 0; i <= 4 - 1; i++)
662 work = (den[i] * scale) + carry;
663 den[i] = LOWPART (work);
664 carry = HIGHPART (work);
665 if (den[i] != 0) den_hi_sig = i;
672 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
674 /* Guess the next quotient digit, quo_est, by dividing the first
675 two remaining dividend digits by the high order quotient digit.
676 quo_est is never low and is at most 2 high. */
677 unsigned HOST_WIDE_INT tmp;
679 num_hi_sig = i + den_hi_sig + 1;
680 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
681 if (num[num_hi_sig] != den[den_hi_sig])
682 quo_est = work / den[den_hi_sig];
686 /* Refine quo_est so it's usually correct, and at most one high. */
687 tmp = work - quo_est * den[den_hi_sig];
689 && (den[den_hi_sig - 1] * quo_est
690 > (tmp * BASE + num[num_hi_sig - 2])))
693 /* Try QUO_EST as the quotient digit, by multiplying the
694 divisor by QUO_EST and subtracting from the remaining dividend.
695 Keep in mind that QUO_EST is the I - 1st digit. */
698 for (j = 0; j <= den_hi_sig; j++)
700 work = quo_est * den[j] + carry;
701 carry = HIGHPART (work);
702 work = num[i + j] - LOWPART (work);
703 num[i + j] = LOWPART (work);
704 carry += HIGHPART (work) != 0;
707 /* If quo_est was high by one, then num[i] went negative and
708 we need to correct things. */
709 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
712 carry = 0; /* add divisor back in */
713 for (j = 0; j <= den_hi_sig; j++)
715 work = num[i + j] + den[j] + carry;
716 carry = HIGHPART (work);
717 num[i + j] = LOWPART (work);
720 num [num_hi_sig] += carry;
723 /* Store the quotient digit. */
728 decode (quo, lquo, hquo);
731 /* If result is negative, make it so. */
733 neg_double (*lquo, *hquo, lquo, hquo);
735 /* Compute trial remainder: rem = num - (quo * den) */
736 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
737 neg_double (*lrem, *hrem, lrem, hrem);
738 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
743 case TRUNC_MOD_EXPR: /* round toward zero */
744 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
748 case FLOOR_MOD_EXPR: /* round toward negative infinity */
749 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
760 case CEIL_MOD_EXPR: /* round toward positive infinity */
761 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
763 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
771 case ROUND_MOD_EXPR: /* round to closest integer */
773 unsigned HOST_WIDE_INT labs_rem = *lrem;
774 HOST_WIDE_INT habs_rem = *hrem;
775 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
776 HOST_WIDE_INT habs_den = hden, htwice;
778 /* Get absolute values. */
780 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
782 neg_double (lden, hden, &labs_den, &habs_den);
784 /* If (2 * abs (lrem) >= abs (lden)) */
785 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
786 labs_rem, habs_rem, <wice, &htwice);
788 if (((unsigned HOST_WIDE_INT) habs_den
789 < (unsigned HOST_WIDE_INT) htwice)
790 || (((unsigned HOST_WIDE_INT) habs_den
791 == (unsigned HOST_WIDE_INT) htwice)
792 && (labs_den < ltwice)))
796 add_double (*lquo, *hquo,
797 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
800 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
812 /* Compute true remainder: rem = num - (quo * den) */
813 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
814 neg_double (*lrem, *hrem, lrem, hrem);
815 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
819 /* Return true if built-in mathematical function specified by CODE
820 preserves the sign of it argument, i.e. -f(x) == f(-x). */
823 negate_mathfn_p (enum built_in_function code)
847 /* Determine whether an expression T can be cheaply negated using
848 the function negate_expr. */
851 negate_expr_p (tree t)
853 unsigned HOST_WIDE_INT val;
860 type = TREE_TYPE (t);
863 switch (TREE_CODE (t))
866 if (TYPE_UNSIGNED (type) || ! flag_trapv)
869 /* Check that -CST will not overflow type. */
870 prec = TYPE_PRECISION (type);
871 if (prec > HOST_BITS_PER_WIDE_INT)
873 if (TREE_INT_CST_LOW (t) != 0)
875 prec -= HOST_BITS_PER_WIDE_INT;
876 val = TREE_INT_CST_HIGH (t);
879 val = TREE_INT_CST_LOW (t);
880 if (prec < HOST_BITS_PER_WIDE_INT)
881 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
882 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
889 return negate_expr_p (TREE_REALPART (t))
890 && negate_expr_p (TREE_IMAGPART (t));
893 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
895 /* -(A + B) -> (-B) - A. */
896 if (negate_expr_p (TREE_OPERAND (t, 1))
897 && reorder_operands_p (TREE_OPERAND (t, 0),
898 TREE_OPERAND (t, 1)))
900 /* -(A + B) -> (-A) - B. */
901 return negate_expr_p (TREE_OPERAND (t, 0));
904 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
905 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
906 && reorder_operands_p (TREE_OPERAND (t, 0),
907 TREE_OPERAND (t, 1));
910 if (TYPE_UNSIGNED (TREE_TYPE (t)))
916 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
917 return negate_expr_p (TREE_OPERAND (t, 1))
918 || negate_expr_p (TREE_OPERAND (t, 0));
922 /* Negate -((double)float) as (double)(-float). */
923 if (TREE_CODE (type) == REAL_TYPE)
925 tree tem = strip_float_extensions (t);
927 return negate_expr_p (tem);
932 /* Negate -f(x) as f(-x). */
933 if (negate_mathfn_p (builtin_mathfn_code (t)))
934 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
938 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
939 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
941 tree op1 = TREE_OPERAND (t, 1);
942 if (TREE_INT_CST_HIGH (op1) == 0
943 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
944 == TREE_INT_CST_LOW (op1))
955 /* Given T, an expression, return the negation of T. Allow for T to be
956 null, in which case return null. */
967 type = TREE_TYPE (t);
970 switch (TREE_CODE (t))
973 tem = fold_negate_const (t, type);
974 if (! TREE_OVERFLOW (tem)
975 || TYPE_UNSIGNED (type)
981 tem = fold_negate_const (t, type);
982 /* Two's complement FP formats, such as c4x, may overflow. */
983 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
984 return fold_convert (type, tem);
989 tree rpart = negate_expr (TREE_REALPART (t));
990 tree ipart = negate_expr (TREE_IMAGPART (t));
992 if ((TREE_CODE (rpart) == REAL_CST
993 && TREE_CODE (ipart) == REAL_CST)
994 || (TREE_CODE (rpart) == INTEGER_CST
995 && TREE_CODE (ipart) == INTEGER_CST))
996 return build_complex (type, rpart, ipart);
1001 return fold_convert (type, TREE_OPERAND (t, 0));
1004 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1006 /* -(A + B) -> (-B) - A. */
1007 if (negate_expr_p (TREE_OPERAND (t, 1))
1008 && reorder_operands_p (TREE_OPERAND (t, 0),
1009 TREE_OPERAND (t, 1)))
1011 tem = negate_expr (TREE_OPERAND (t, 1));
1012 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1013 tem, TREE_OPERAND (t, 0)));
1014 return fold_convert (type, tem);
1017 /* -(A + B) -> (-A) - B. */
1018 if (negate_expr_p (TREE_OPERAND (t, 0)))
1020 tem = negate_expr (TREE_OPERAND (t, 0));
1021 tem = fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1022 tem, TREE_OPERAND (t, 1)));
1023 return fold_convert (type, tem);
1029 /* - (A - B) -> B - A */
1030 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1031 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1032 return fold_convert (type,
1033 fold (build2 (MINUS_EXPR, TREE_TYPE (t),
1034 TREE_OPERAND (t, 1),
1035 TREE_OPERAND (t, 0))));
1039 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1045 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1047 tem = TREE_OPERAND (t, 1);
1048 if (negate_expr_p (tem))
1049 return fold_convert (type,
1050 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1051 TREE_OPERAND (t, 0),
1052 negate_expr (tem))));
1053 tem = TREE_OPERAND (t, 0);
1054 if (negate_expr_p (tem))
1055 return fold_convert (type,
1056 fold (build2 (TREE_CODE (t), TREE_TYPE (t),
1058 TREE_OPERAND (t, 1))));
1063 /* Convert -((double)float) into (double)(-float). */
1064 if (TREE_CODE (type) == REAL_TYPE)
1066 tem = strip_float_extensions (t);
1067 if (tem != t && negate_expr_p (tem))
1068 return fold_convert (type, negate_expr (tem));
1073 /* Negate -f(x) as f(-x). */
1074 if (negate_mathfn_p (builtin_mathfn_code (t))
1075 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1077 tree fndecl, arg, arglist;
1079 fndecl = get_callee_fndecl (t);
1080 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1081 arglist = build_tree_list (NULL_TREE, arg);
1082 return build_function_call_expr (fndecl, arglist);
1087 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1088 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1090 tree op1 = TREE_OPERAND (t, 1);
1091 if (TREE_INT_CST_HIGH (op1) == 0
1092 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1093 == TREE_INT_CST_LOW (op1))
1095 tree ntype = TYPE_UNSIGNED (type)
1096 ? lang_hooks.types.signed_type (type)
1097 : lang_hooks.types.unsigned_type (type);
1098 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1099 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1100 return fold_convert (type, temp);
1109 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1110 return fold_convert (type, tem);
1113 /* Split a tree IN into a constant, literal and variable parts that could be
1114 combined with CODE to make IN. "constant" means an expression with
1115 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1116 commutative arithmetic operation. Store the constant part into *CONP,
1117 the literal in *LITP and return the variable part. If a part isn't
1118 present, set it to null. If the tree does not decompose in this way,
1119 return the entire tree as the variable part and the other parts as null.
1121 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1122 case, we negate an operand that was subtracted. Except if it is a
1123 literal for which we use *MINUS_LITP instead.
1125 If NEGATE_P is true, we are negating all of IN, again except a literal
1126 for which we use *MINUS_LITP instead.
1128 If IN is itself a literal or constant, return it as appropriate.
1130 Note that we do not guarantee that any of the three values will be the
1131 same type as IN, but they will have the same signedness and mode. */
1134 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1135 tree *minus_litp, int negate_p)
1143 /* Strip any conversions that don't change the machine mode or signedness. */
1144 STRIP_SIGN_NOPS (in);
1146 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1148 else if (TREE_CODE (in) == code
1149 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1150 /* We can associate addition and subtraction together (even
1151 though the C standard doesn't say so) for integers because
1152 the value is not affected. For reals, the value might be
1153 affected, so we can't. */
1154 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1155 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1157 tree op0 = TREE_OPERAND (in, 0);
1158 tree op1 = TREE_OPERAND (in, 1);
1159 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1160 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1162 /* First see if either of the operands is a literal, then a constant. */
1163 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1164 *litp = op0, op0 = 0;
1165 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1166 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1168 if (op0 != 0 && TREE_CONSTANT (op0))
1169 *conp = op0, op0 = 0;
1170 else if (op1 != 0 && TREE_CONSTANT (op1))
1171 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1173 /* If we haven't dealt with either operand, this is not a case we can
1174 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1175 if (op0 != 0 && op1 != 0)
1180 var = op1, neg_var_p = neg1_p;
1182 /* Now do any needed negations. */
1184 *minus_litp = *litp, *litp = 0;
1186 *conp = negate_expr (*conp);
1188 var = negate_expr (var);
1190 else if (TREE_CONSTANT (in))
1198 *minus_litp = *litp, *litp = 0;
1199 else if (*minus_litp)
1200 *litp = *minus_litp, *minus_litp = 0;
1201 *conp = negate_expr (*conp);
1202 var = negate_expr (var);
1208 /* Re-associate trees split by the above function. T1 and T2 are either
1209 expressions to associate or null. Return the new expression, if any. If
1210 we build an operation, do it in TYPE and with CODE. */
1213 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1220 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1221 try to fold this since we will have infinite recursion. But do
1222 deal with any NEGATE_EXPRs. */
1223 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1224 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1226 if (code == PLUS_EXPR)
1228 if (TREE_CODE (t1) == NEGATE_EXPR)
1229 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1230 fold_convert (type, TREE_OPERAND (t1, 0)));
1231 else if (TREE_CODE (t2) == NEGATE_EXPR)
1232 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1233 fold_convert (type, TREE_OPERAND (t2, 0)));
1235 return build2 (code, type, fold_convert (type, t1),
1236 fold_convert (type, t2));
1239 return fold (build2 (code, type, fold_convert (type, t1),
1240 fold_convert (type, t2)));
1243 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1244 to produce a new constant.
1246 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1249 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1251 unsigned HOST_WIDE_INT int1l, int2l;
1252 HOST_WIDE_INT int1h, int2h;
1253 unsigned HOST_WIDE_INT low;
1255 unsigned HOST_WIDE_INT garbagel;
1256 HOST_WIDE_INT garbageh;
1258 tree type = TREE_TYPE (arg1);
1259 int uns = TYPE_UNSIGNED (type);
1261 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1263 int no_overflow = 0;
1265 int1l = TREE_INT_CST_LOW (arg1);
1266 int1h = TREE_INT_CST_HIGH (arg1);
1267 int2l = TREE_INT_CST_LOW (arg2);
1268 int2h = TREE_INT_CST_HIGH (arg2);
1273 low = int1l | int2l, hi = int1h | int2h;
1277 low = int1l ^ int2l, hi = int1h ^ int2h;
1281 low = int1l & int2l, hi = int1h & int2h;
1287 /* It's unclear from the C standard whether shifts can overflow.
1288 The following code ignores overflow; perhaps a C standard
1289 interpretation ruling is needed. */
1290 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1298 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1303 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1307 neg_double (int2l, int2h, &low, &hi);
1308 add_double (int1l, int1h, low, hi, &low, &hi);
1309 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1313 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1316 case TRUNC_DIV_EXPR:
1317 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1318 case EXACT_DIV_EXPR:
1319 /* This is a shortcut for a common special case. */
1320 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1321 && ! TREE_CONSTANT_OVERFLOW (arg1)
1322 && ! TREE_CONSTANT_OVERFLOW (arg2)
1323 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1325 if (code == CEIL_DIV_EXPR)
1328 low = int1l / int2l, hi = 0;
1332 /* ... fall through ... */
1334 case ROUND_DIV_EXPR:
1335 if (int2h == 0 && int2l == 1)
1337 low = int1l, hi = int1h;
1340 if (int1l == int2l && int1h == int2h
1341 && ! (int1l == 0 && int1h == 0))
1346 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1347 &low, &hi, &garbagel, &garbageh);
1350 case TRUNC_MOD_EXPR:
1351 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1352 /* This is a shortcut for a common special case. */
1353 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1354 && ! TREE_CONSTANT_OVERFLOW (arg1)
1355 && ! TREE_CONSTANT_OVERFLOW (arg2)
1356 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1358 if (code == CEIL_MOD_EXPR)
1360 low = int1l % int2l, hi = 0;
1364 /* ... fall through ... */
1366 case ROUND_MOD_EXPR:
1367 overflow = div_and_round_double (code, uns,
1368 int1l, int1h, int2l, int2h,
1369 &garbagel, &garbageh, &low, &hi);
1375 low = (((unsigned HOST_WIDE_INT) int1h
1376 < (unsigned HOST_WIDE_INT) int2h)
1377 || (((unsigned HOST_WIDE_INT) int1h
1378 == (unsigned HOST_WIDE_INT) int2h)
1381 low = (int1h < int2h
1382 || (int1h == int2h && int1l < int2l));
1384 if (low == (code == MIN_EXPR))
1385 low = int1l, hi = int1h;
1387 low = int2l, hi = int2h;
1394 /* If this is for a sizetype, can be represented as one (signed)
1395 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1398 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1399 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1400 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1401 return size_int_type_wide (low, type);
1404 t = build_int_2 (low, hi);
1405 TREE_TYPE (t) = TREE_TYPE (arg1);
1410 ? (!uns || is_sizetype) && overflow
1411 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1413 | TREE_OVERFLOW (arg1)
1414 | TREE_OVERFLOW (arg2));
1416 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1417 So check if force_fit_type truncated the value. */
1419 && ! TREE_OVERFLOW (t)
1420 && (TREE_INT_CST_HIGH (t) != hi
1421 || TREE_INT_CST_LOW (t) != low))
1422 TREE_OVERFLOW (t) = 1;
1424 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1425 | TREE_CONSTANT_OVERFLOW (arg1)
1426 | TREE_CONSTANT_OVERFLOW (arg2));
1430 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1431 constant. We assume ARG1 and ARG2 have the same data type, or at least
1432 are the same kind of constant and the same machine mode.
1434 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1437 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1442 if (TREE_CODE (arg1) == INTEGER_CST)
1443 return int_const_binop (code, arg1, arg2, notrunc);
1445 if (TREE_CODE (arg1) == REAL_CST)
1447 enum machine_mode mode;
1450 REAL_VALUE_TYPE value;
1453 d1 = TREE_REAL_CST (arg1);
1454 d2 = TREE_REAL_CST (arg2);
1456 type = TREE_TYPE (arg1);
1457 mode = TYPE_MODE (type);
1459 /* Don't perform operation if we honor signaling NaNs and
1460 either operand is a NaN. */
1461 if (HONOR_SNANS (mode)
1462 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1465 /* Don't perform operation if it would raise a division
1466 by zero exception. */
1467 if (code == RDIV_EXPR
1468 && REAL_VALUES_EQUAL (d2, dconst0)
1469 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1472 /* If either operand is a NaN, just return it. Otherwise, set up
1473 for floating-point trap; we return an overflow. */
1474 if (REAL_VALUE_ISNAN (d1))
1476 else if (REAL_VALUE_ISNAN (d2))
1479 REAL_ARITHMETIC (value, code, d1, d2);
1481 t = build_real (type, real_value_truncate (mode, value));
1484 = (force_fit_type (t, 0)
1485 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1486 TREE_CONSTANT_OVERFLOW (t)
1488 | TREE_CONSTANT_OVERFLOW (arg1)
1489 | TREE_CONSTANT_OVERFLOW (arg2);
1492 if (TREE_CODE (arg1) == COMPLEX_CST)
1494 tree type = TREE_TYPE (arg1);
1495 tree r1 = TREE_REALPART (arg1);
1496 tree i1 = TREE_IMAGPART (arg1);
1497 tree r2 = TREE_REALPART (arg2);
1498 tree i2 = TREE_IMAGPART (arg2);
1504 t = build_complex (type,
1505 const_binop (PLUS_EXPR, r1, r2, notrunc),
1506 const_binop (PLUS_EXPR, i1, i2, notrunc));
1510 t = build_complex (type,
1511 const_binop (MINUS_EXPR, r1, r2, notrunc),
1512 const_binop (MINUS_EXPR, i1, i2, notrunc));
1516 t = build_complex (type,
1517 const_binop (MINUS_EXPR,
1518 const_binop (MULT_EXPR,
1520 const_binop (MULT_EXPR,
1523 const_binop (PLUS_EXPR,
1524 const_binop (MULT_EXPR,
1526 const_binop (MULT_EXPR,
1534 = const_binop (PLUS_EXPR,
1535 const_binop (MULT_EXPR, r2, r2, notrunc),
1536 const_binop (MULT_EXPR, i2, i2, notrunc),
1539 t = build_complex (type,
1541 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1542 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1543 const_binop (PLUS_EXPR,
1544 const_binop (MULT_EXPR, r1, r2,
1546 const_binop (MULT_EXPR, i1, i2,
1549 magsquared, notrunc),
1551 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1552 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1553 const_binop (MINUS_EXPR,
1554 const_binop (MULT_EXPR, i1, r2,
1556 const_binop (MULT_EXPR, r1, i2,
1559 magsquared, notrunc));
1571 /* These are the hash table functions for the hash table of INTEGER_CST
1572 nodes of a sizetype. */
1574 /* Return the hash code code X, an INTEGER_CST. */
1577 size_htab_hash (const void *x)
1581 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1582 ^ htab_hash_pointer (TREE_TYPE (t))
1583 ^ (TREE_OVERFLOW (t) << 20));
1586 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1587 is the same as that given by *Y, which is the same. */
1590 size_htab_eq (const void *x, const void *y)
1595 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1596 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1597 && TREE_TYPE (xt) == TREE_TYPE (yt)
1598 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1601 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1602 bits are given by NUMBER and of the sizetype represented by KIND. */
1605 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1607 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1610 /* Likewise, but the desired type is specified explicitly. */
1612 static GTY (()) tree new_const;
1613 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1617 size_int_type_wide (HOST_WIDE_INT number, tree type)
1623 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1624 new_const = make_node (INTEGER_CST);
1627 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1628 hash table, we return the value from the hash table. Otherwise, we
1629 place that in the hash table and make a new node for the next time. */
1630 TREE_INT_CST_LOW (new_const) = number;
1631 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1632 TREE_TYPE (new_const) = type;
1633 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1634 = force_fit_type (new_const, 0);
1636 slot = htab_find_slot (size_htab, new_const, INSERT);
1642 new_const = make_node (INTEGER_CST);
1646 return (tree) *slot;
1649 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1650 is a tree code. The type of the result is taken from the operands.
1651 Both must be the same type integer type and it must be a size type.
1652 If the operands are constant, so is the result. */
1655 size_binop (enum tree_code code, tree arg0, tree arg1)
1657 tree type = TREE_TYPE (arg0);
1659 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1660 || type != TREE_TYPE (arg1))
1663 /* Handle the special case of two integer constants faster. */
1664 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1666 /* And some specific cases even faster than that. */
1667 if (code == PLUS_EXPR && integer_zerop (arg0))
1669 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1670 && integer_zerop (arg1))
1672 else if (code == MULT_EXPR && integer_onep (arg0))
1675 /* Handle general case of two integer constants. */
1676 return int_const_binop (code, arg0, arg1, 0);
1679 if (arg0 == error_mark_node || arg1 == error_mark_node)
1680 return error_mark_node;
1682 return fold (build2 (code, type, arg0, arg1));
1685 /* Given two values, either both of sizetype or both of bitsizetype,
1686 compute the difference between the two values. Return the value
1687 in signed type corresponding to the type of the operands. */
1690 size_diffop (tree arg0, tree arg1)
1692 tree type = TREE_TYPE (arg0);
1695 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1696 || type != TREE_TYPE (arg1))
1699 /* If the type is already signed, just do the simple thing. */
1700 if (!TYPE_UNSIGNED (type))
1701 return size_binop (MINUS_EXPR, arg0, arg1);
1703 ctype = (type == bitsizetype || type == ubitsizetype
1704 ? sbitsizetype : ssizetype);
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1711 fold_convert (ctype, arg1));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0, arg1))
1718 return fold_convert (ctype, integer_zero_node);
1719 else if (tree_int_cst_lt (arg1, arg0))
1720 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1723 fold_convert (ctype, size_binop (MINUS_EXPR,
1728 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1729 type TYPE. If no simplification can be done return NULL_TREE. */
1732 fold_convert_const (enum tree_code code, tree type, tree arg1)
1737 if (TREE_TYPE (arg1) == type)
1740 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1742 if (TREE_CODE (arg1) == INTEGER_CST)
1744 /* If we would build a constant wider than GCC supports,
1745 leave the conversion unfolded. */
1746 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1749 /* If we are trying to make a sizetype for a small integer, use
1750 size_int to pick up cached types to reduce duplicate nodes. */
1751 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1752 && !TREE_CONSTANT_OVERFLOW (arg1)
1753 && compare_tree_int (arg1, 10000) < 0)
1754 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1756 /* Given an integer constant, make new constant with new type,
1757 appropriately sign-extended or truncated. */
1758 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1759 TREE_INT_CST_HIGH (arg1));
1760 TREE_TYPE (t) = type;
1761 /* Indicate an overflow if (1) ARG1 already overflowed,
1762 or (2) force_fit_type indicates an overflow.
1763 Tell force_fit_type that an overflow has already occurred
1764 if ARG1 is a too-large unsigned value and T is signed.
1765 But don't indicate an overflow if converting a pointer. */
1767 = ((force_fit_type (t,
1768 (TREE_INT_CST_HIGH (arg1) < 0
1769 && (TYPE_UNSIGNED (type)
1770 < TYPE_UNSIGNED (TREE_TYPE (arg1)))))
1771 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1772 || TREE_OVERFLOW (arg1));
1773 TREE_CONSTANT_OVERFLOW (t)
1774 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1777 else if (TREE_CODE (arg1) == REAL_CST)
1779 /* The following code implements the floating point to integer
1780 conversion rules required by the Java Language Specification,
1781 that IEEE NaNs are mapped to zero and values that overflow
1782 the target precision saturate, i.e. values greater than
1783 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1784 are mapped to INT_MIN. These semantics are allowed by the
1785 C and C++ standards that simply state that the behavior of
1786 FP-to-integer conversion is unspecified upon overflow. */
1788 HOST_WIDE_INT high, low;
1791 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1795 case FIX_TRUNC_EXPR:
1796 real_trunc (&r, VOIDmode, &x);
1800 real_ceil (&r, VOIDmode, &x);
1803 case FIX_FLOOR_EXPR:
1804 real_floor (&r, VOIDmode, &x);
1807 case FIX_ROUND_EXPR:
1808 real_round (&r, VOIDmode, &x);
1815 /* If R is NaN, return zero and show we have an overflow. */
1816 if (REAL_VALUE_ISNAN (r))
1823 /* See if R is less than the lower bound or greater than the
1828 tree lt = TYPE_MIN_VALUE (type);
1829 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1830 if (REAL_VALUES_LESS (r, l))
1833 high = TREE_INT_CST_HIGH (lt);
1834 low = TREE_INT_CST_LOW (lt);
1840 tree ut = TYPE_MAX_VALUE (type);
1843 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1844 if (REAL_VALUES_LESS (u, r))
1847 high = TREE_INT_CST_HIGH (ut);
1848 low = TREE_INT_CST_LOW (ut);
1854 REAL_VALUE_TO_INT (&low, &high, r);
1856 t = build_int_2 (low, high);
1857 TREE_TYPE (t) = type;
1859 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1860 TREE_CONSTANT_OVERFLOW (t)
1861 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1865 else if (TREE_CODE (type) == REAL_TYPE)
1867 if (TREE_CODE (arg1) == INTEGER_CST)
1868 return build_real_from_int_cst (type, arg1);
1869 if (TREE_CODE (arg1) == REAL_CST)
1871 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1873 /* We make a copy of ARG1 so that we don't modify an
1874 existing constant tree. */
1875 t = copy_node (arg1);
1876 TREE_TYPE (t) = type;
1880 t = build_real (type,
1881 real_value_truncate (TYPE_MODE (type),
1882 TREE_REAL_CST (arg1)));
1885 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1886 TREE_CONSTANT_OVERFLOW (t)
1887 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1894 /* Convert expression ARG to type TYPE. Used by the middle-end for
1895 simple conversions in preference to calling the front-end's convert. */
1898 fold_convert (tree type, tree arg)
1900 tree orig = TREE_TYPE (arg);
1906 if (TREE_CODE (arg) == ERROR_MARK
1907 || TREE_CODE (type) == ERROR_MARK
1908 || TREE_CODE (orig) == ERROR_MARK)
1909 return error_mark_node;
1911 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1912 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1913 TYPE_MAIN_VARIANT (orig)))
1914 return fold (build1 (NOP_EXPR, type, arg));
1916 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1917 || TREE_CODE (type) == OFFSET_TYPE)
1919 if (TREE_CODE (arg) == INTEGER_CST)
1921 tem = fold_convert_const (NOP_EXPR, type, arg);
1922 if (tem != NULL_TREE)
1925 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1926 || TREE_CODE (orig) == OFFSET_TYPE)
1927 return fold (build1 (NOP_EXPR, type, arg));
1928 if (TREE_CODE (orig) == COMPLEX_TYPE)
1930 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1931 return fold_convert (type, tem);
1933 if (TREE_CODE (orig) == VECTOR_TYPE
1934 && GET_MODE_SIZE (TYPE_MODE (type))
1935 == GET_MODE_SIZE (TYPE_MODE (orig)))
1936 return fold (build1 (NOP_EXPR, type, arg));
1938 else if (TREE_CODE (type) == REAL_TYPE)
1940 if (TREE_CODE (arg) == INTEGER_CST)
1942 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1946 else if (TREE_CODE (arg) == REAL_CST)
1948 tem = fold_convert_const (NOP_EXPR, type, arg);
1949 if (tem != NULL_TREE)
1953 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1954 return fold (build1 (FLOAT_EXPR, type, arg));
1955 if (TREE_CODE (orig) == REAL_TYPE)
1956 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1958 if (TREE_CODE (orig) == COMPLEX_TYPE)
1960 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1961 return fold_convert (type, tem);
1964 else if (TREE_CODE (type) == COMPLEX_TYPE)
1966 if (INTEGRAL_TYPE_P (orig)
1967 || POINTER_TYPE_P (orig)
1968 || TREE_CODE (orig) == REAL_TYPE)
1969 return build2 (COMPLEX_EXPR, type,
1970 fold_convert (TREE_TYPE (type), arg),
1971 fold_convert (TREE_TYPE (type), integer_zero_node));
1972 if (TREE_CODE (orig) == COMPLEX_TYPE)
1976 if (TREE_CODE (arg) == COMPLEX_EXPR)
1978 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1979 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1980 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1983 arg = save_expr (arg);
1984 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1985 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1986 rpart = fold_convert (TREE_TYPE (type), rpart);
1987 ipart = fold_convert (TREE_TYPE (type), ipart);
1988 return fold (build2 (COMPLEX_EXPR, type, rpart, ipart));
1991 else if (TREE_CODE (type) == VECTOR_TYPE)
1993 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1994 && GET_MODE_SIZE (TYPE_MODE (type))
1995 == GET_MODE_SIZE (TYPE_MODE (orig)))
1996 return fold (build1 (NOP_EXPR, type, arg));
1997 if (TREE_CODE (orig) == VECTOR_TYPE
1998 && GET_MODE_SIZE (TYPE_MODE (type))
1999 == GET_MODE_SIZE (TYPE_MODE (orig)))
2000 return fold (build1 (NOP_EXPR, type, arg));
2002 else if (VOID_TYPE_P (type))
2003 return fold (build1 (CONVERT_EXPR, type, fold_ignored_result (arg)));
2007 /* Return an expr equal to X but certainly not valid as an lvalue. */
2012 /* We only need to wrap lvalue tree codes. */
2013 switch (TREE_CODE (x))
2025 case ARRAY_RANGE_REF:
2031 case PREINCREMENT_EXPR:
2032 case PREDECREMENT_EXPR:
2035 case TRY_CATCH_EXPR:
2036 case WITH_CLEANUP_EXPR:
2047 /* Assume the worst for front-end tree codes. */
2048 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2052 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2055 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2056 Zero means allow extended lvalues. */
2058 int pedantic_lvalues;
2060 /* When pedantic, return an expr equal to X but certainly not valid as a
2061 pedantic lvalue. Otherwise, return X. */
2064 pedantic_non_lvalue (tree x)
2066 if (pedantic_lvalues)
2067 return non_lvalue (x);
2072 /* Given a tree comparison code, return the code that is the logical inverse
2073 of the given code. It is not safe to do this for floating-point
2074 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2075 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2077 static enum tree_code
2078 invert_tree_comparison (enum tree_code code, bool honor_nans)
2080 if (honor_nans && flag_trapping_math)
2090 return honor_nans ? UNLE_EXPR : LE_EXPR;
2092 return honor_nans ? UNLT_EXPR : LT_EXPR;
2094 return honor_nans ? UNGE_EXPR : GE_EXPR;
2096 return honor_nans ? UNGT_EXPR : GT_EXPR;
2110 return UNORDERED_EXPR;
2111 case UNORDERED_EXPR:
2112 return ORDERED_EXPR;
2118 /* Similar, but return the comparison that results if the operands are
2119 swapped. This is safe for floating-point. */
2122 swap_tree_comparison (enum tree_code code)
2143 /* Convert a comparison tree code from an enum tree_code representation
2144 into a compcode bit-based encoding. This function is the inverse of
2145 compcode_to_comparison. */
2147 static enum comparison_code
2148 comparison_to_compcode (enum tree_code code)
2165 return COMPCODE_ORD;
2166 case UNORDERED_EXPR:
2167 return COMPCODE_UNORD;
2169 return COMPCODE_UNLT;
2171 return COMPCODE_UNEQ;
2173 return COMPCODE_UNLE;
2175 return COMPCODE_UNGT;
2177 return COMPCODE_LTGT;
2179 return COMPCODE_UNGE;
2185 /* Convert a compcode bit-based encoding of a comparison operator back
2186 to GCC's enum tree_code representation. This function is the
2187 inverse of comparison_to_compcode. */
2189 static enum tree_code
2190 compcode_to_comparison (enum comparison_code code)
2207 return ORDERED_EXPR;
2208 case COMPCODE_UNORD:
2209 return UNORDERED_EXPR;
2227 /* Return a tree for the comparison which is the combination of
2228 doing the AND or OR (depending on CODE) of the two operations LCODE
2229 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2230 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2231 if this makes the transformation invalid. */
2234 combine_comparisons (enum tree_code code, enum tree_code lcode,
2235 enum tree_code rcode, tree truth_type,
2236 tree ll_arg, tree lr_arg)
2238 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2239 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2240 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2241 enum comparison_code compcode;
2245 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2246 compcode = lcompcode & rcompcode;
2249 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2250 compcode = lcompcode | rcompcode;
2259 /* Eliminate unordered comparisons, as well as LTGT and ORD
2260 which are not used unless the mode has NaNs. */
2261 compcode &= ~COMPCODE_UNORD;
2262 if (compcode == COMPCODE_LTGT)
2263 compcode = COMPCODE_NE;
2264 else if (compcode == COMPCODE_ORD)
2265 compcode = COMPCODE_TRUE;
2267 else if (flag_trapping_math)
2269 /* Check that the original operation and the optimized ones will trap
2270 under the same condition. */
2271 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2272 && (lcompcode != COMPCODE_EQ)
2273 && (lcompcode != COMPCODE_ORD);
2274 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2275 && (rcompcode != COMPCODE_EQ)
2276 && (rcompcode != COMPCODE_ORD);
2277 bool trap = (compcode & COMPCODE_UNORD) == 0
2278 && (compcode != COMPCODE_EQ)
2279 && (compcode != COMPCODE_ORD);
2281 /* In a short-circuited boolean expression the LHS might be
2282 such that the RHS, if evaluated, will never trap. For
2283 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2284 if neither x nor y is NaN. (This is a mixed blessing: for
2285 example, the expression above will never trap, hence
2286 optimizing it to x < y would be invalid). */
2287 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2288 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2291 /* If the comparison was short-circuited, and only the RHS
2292 trapped, we may now generate a spurious trap. */
2294 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2297 /* If we changed the conditions that cause a trap, we lose. */
2298 if ((ltrap || rtrap) != trap)
2302 if (compcode == COMPCODE_TRUE)
2303 return constant_boolean_node (true, truth_type);
2304 else if (compcode == COMPCODE_FALSE)
2305 return constant_boolean_node (false, truth_type);
2307 return fold (build2 (compcode_to_comparison (compcode),
2308 truth_type, ll_arg, lr_arg));
2311 /* Return nonzero if CODE is a tree code that represents a truth value. */
2314 truth_value_p (enum tree_code code)
2316 return (TREE_CODE_CLASS (code) == '<'
2317 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2318 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2319 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2322 /* Return nonzero if two operands (typically of the same tree node)
2323 are necessarily equal. If either argument has side-effects this
2324 function returns zero. FLAGS modifies behavior as follows:
2326 If OEP_ONLY_CONST is set, only return nonzero for constants.
2327 This function tests whether the operands are indistinguishable;
2328 it does not test whether they are equal using C's == operation.
2329 The distinction is important for IEEE floating point, because
2330 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2331 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2333 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2334 even though it may hold multiple values during a function.
2335 This is because a GCC tree node guarantees that nothing else is
2336 executed between the evaluation of its "operands" (which may often
2337 be evaluated in arbitrary order). Hence if the operands themselves
2338 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2339 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2340 unset means assuming isochronic (or instantaneous) tree equivalence.
2341 Unless comparing arbitrary expression trees, such as from different
2342 statements, this flag can usually be left unset.
2344 If OEP_PURE_SAME is set, then pure functions with identical arguments
2345 are considered the same. It is used when the caller has other ways
2346 to ensure that global memory is unchanged in between. */
2349 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2351 /* If either is ERROR_MARK, they aren't equal. */
2352 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2355 /* If both types don't have the same signedness, then we can't consider
2356 them equal. We must check this before the STRIP_NOPS calls
2357 because they may change the signedness of the arguments. */
2358 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2364 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2365 /* This is needed for conversions and for COMPONENT_REF.
2366 Might as well play it safe and always test this. */
2367 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2368 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2369 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2372 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2373 We don't care about side effects in that case because the SAVE_EXPR
2374 takes care of that for us. In all other cases, two expressions are
2375 equal if they have no side effects. If we have two identical
2376 expressions with side effects that should be treated the same due
2377 to the only side effects being identical SAVE_EXPR's, that will
2378 be detected in the recursive calls below. */
2379 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2380 && (TREE_CODE (arg0) == SAVE_EXPR
2381 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2384 /* Next handle constant cases, those for which we can return 1 even
2385 if ONLY_CONST is set. */
2386 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2387 switch (TREE_CODE (arg0))
2390 return (! TREE_CONSTANT_OVERFLOW (arg0)
2391 && ! TREE_CONSTANT_OVERFLOW (arg1)
2392 && tree_int_cst_equal (arg0, arg1));
2395 return (! TREE_CONSTANT_OVERFLOW (arg0)
2396 && ! TREE_CONSTANT_OVERFLOW (arg1)
2397 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2398 TREE_REAL_CST (arg1)));
2404 if (TREE_CONSTANT_OVERFLOW (arg0)
2405 || TREE_CONSTANT_OVERFLOW (arg1))
2408 v1 = TREE_VECTOR_CST_ELTS (arg0);
2409 v2 = TREE_VECTOR_CST_ELTS (arg1);
2412 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2415 v1 = TREE_CHAIN (v1);
2416 v2 = TREE_CHAIN (v2);
2423 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2425 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2429 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2430 && ! memcmp (TREE_STRING_POINTER (arg0),
2431 TREE_STRING_POINTER (arg1),
2432 TREE_STRING_LENGTH (arg0)));
2435 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2441 if (flags & OEP_ONLY_CONST)
2444 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2447 /* Two conversions are equal only if signedness and modes match. */
2448 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2449 && (TYPE_UNSIGNED (TREE_TYPE (arg0))
2450 != TYPE_UNSIGNED (TREE_TYPE (arg1))))
2453 return operand_equal_p (TREE_OPERAND (arg0, 0),
2454 TREE_OPERAND (arg1, 0), flags);
2458 if (operand_equal_p (TREE_OPERAND (arg0, 0),
2459 TREE_OPERAND (arg1, 0), flags)
2460 && operand_equal_p (TREE_OPERAND (arg0, 1),
2461 TREE_OPERAND (arg1, 1), flags))
2464 /* For commutative ops, allow the other order. */
2465 return (commutative_tree_code (TREE_CODE (arg0))
2466 && operand_equal_p (TREE_OPERAND (arg0, 0),
2467 TREE_OPERAND (arg1, 1), flags)
2468 && operand_equal_p (TREE_OPERAND (arg0, 1),
2469 TREE_OPERAND (arg1, 0), flags));
2472 /* If either of the pointer (or reference) expressions we are
2473 dereferencing contain a side effect, these cannot be equal. */
2474 if (TREE_SIDE_EFFECTS (arg0)
2475 || TREE_SIDE_EFFECTS (arg1))
2478 switch (TREE_CODE (arg0))
2483 return operand_equal_p (TREE_OPERAND (arg0, 0),
2484 TREE_OPERAND (arg1, 0), flags);
2488 case ARRAY_RANGE_REF:
2489 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2490 TREE_OPERAND (arg1, 0), flags)
2491 && operand_equal_p (TREE_OPERAND (arg0, 1),
2492 TREE_OPERAND (arg1, 1), flags));
2495 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2496 TREE_OPERAND (arg1, 0), flags)
2497 && operand_equal_p (TREE_OPERAND (arg0, 1),
2498 TREE_OPERAND (arg1, 1), flags)
2499 && operand_equal_p (TREE_OPERAND (arg0, 2),
2500 TREE_OPERAND (arg1, 2), flags));
2506 switch (TREE_CODE (arg0))
2509 case TRUTH_NOT_EXPR:
2510 return operand_equal_p (TREE_OPERAND (arg0, 0),
2511 TREE_OPERAND (arg1, 0), flags);
2513 case TRUTH_ANDIF_EXPR:
2514 case TRUTH_ORIF_EXPR:
2515 return operand_equal_p (TREE_OPERAND (arg0, 0),
2516 TREE_OPERAND (arg1, 0), flags)
2517 && operand_equal_p (TREE_OPERAND (arg0, 1),
2518 TREE_OPERAND (arg1, 1), flags);
2520 case TRUTH_AND_EXPR:
2522 case TRUTH_XOR_EXPR:
2523 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2524 TREE_OPERAND (arg1, 0), flags)
2525 && operand_equal_p (TREE_OPERAND (arg0, 1),
2526 TREE_OPERAND (arg1, 1), flags))
2527 || (operand_equal_p (TREE_OPERAND (arg0, 0),
2528 TREE_OPERAND (arg1, 1), flags)
2529 && operand_equal_p (TREE_OPERAND (arg0, 1),
2530 TREE_OPERAND (arg1, 0), flags));
2533 /* If the CALL_EXPRs call different functions, then they
2534 clearly can not be equal. */
2535 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2536 TREE_OPERAND (arg1, 0), flags))
2540 unsigned int cef = call_expr_flags (arg0);
2541 if (flags & OEP_PURE_SAME)
2542 cef &= ECF_CONST | ECF_PURE;
2549 /* Now see if all the arguments are the same. operand_equal_p
2550 does not handle TREE_LIST, so we walk the operands here
2551 feeding them to operand_equal_p. */
2552 arg0 = TREE_OPERAND (arg0, 1);
2553 arg1 = TREE_OPERAND (arg1, 1);
2554 while (arg0 && arg1)
2556 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2560 arg0 = TREE_CHAIN (arg0);
2561 arg1 = TREE_CHAIN (arg1);
2564 /* If we get here and both argument lists are exhausted
2565 then the CALL_EXPRs are equal. */
2566 return ! (arg0 || arg1);
2573 /* Consider __builtin_sqrt equal to sqrt. */
2574 return (TREE_CODE (arg0) == FUNCTION_DECL
2575 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2576 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2577 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2584 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2585 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2587 When in doubt, return 0. */
2590 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2592 int unsignedp1, unsignedpo;
2593 tree primarg0, primarg1, primother;
2594 unsigned int correct_width;
2596 if (operand_equal_p (arg0, arg1, 0))
2599 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2600 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2603 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2604 and see if the inner values are the same. This removes any
2605 signedness comparison, which doesn't matter here. */
2606 primarg0 = arg0, primarg1 = arg1;
2607 STRIP_NOPS (primarg0);
2608 STRIP_NOPS (primarg1);
2609 if (operand_equal_p (primarg0, primarg1, 0))
2612 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2613 actual comparison operand, ARG0.
2615 First throw away any conversions to wider types
2616 already present in the operands. */
2618 primarg1 = get_narrower (arg1, &unsignedp1);
2619 primother = get_narrower (other, &unsignedpo);
2621 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2622 if (unsignedp1 == unsignedpo
2623 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2624 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2626 tree type = TREE_TYPE (arg0);
2628 /* Make sure shorter operand is extended the right way
2629 to match the longer operand. */
2630 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2631 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2633 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2640 /* See if ARG is an expression that is either a comparison or is performing
2641 arithmetic on comparisons. The comparisons must only be comparing
2642 two different values, which will be stored in *CVAL1 and *CVAL2; if
2643 they are nonzero it means that some operands have already been found.
2644 No variables may be used anywhere else in the expression except in the
2645 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2646 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2648 If this is true, return 1. Otherwise, return zero. */
2651 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2653 enum tree_code code = TREE_CODE (arg);
2654 char class = TREE_CODE_CLASS (code);
2656 /* We can handle some of the 'e' cases here. */
2657 if (class == 'e' && code == TRUTH_NOT_EXPR)
2659 else if (class == 'e'
2660 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2661 || code == COMPOUND_EXPR))
2664 else if (class == 'e' && code == SAVE_EXPR
2665 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2667 /* If we've already found a CVAL1 or CVAL2, this expression is
2668 two complex to handle. */
2669 if (*cval1 || *cval2)
2679 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2682 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2683 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2684 cval1, cval2, save_p));
2690 if (code == COND_EXPR)
2691 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2692 cval1, cval2, save_p)
2693 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2694 cval1, cval2, save_p)
2695 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2696 cval1, cval2, save_p));
2700 /* First see if we can handle the first operand, then the second. For
2701 the second operand, we know *CVAL1 can't be zero. It must be that
2702 one side of the comparison is each of the values; test for the
2703 case where this isn't true by failing if the two operands
2706 if (operand_equal_p (TREE_OPERAND (arg, 0),
2707 TREE_OPERAND (arg, 1), 0))
2711 *cval1 = TREE_OPERAND (arg, 0);
2712 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2714 else if (*cval2 == 0)
2715 *cval2 = TREE_OPERAND (arg, 0);
2716 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2721 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2723 else if (*cval2 == 0)
2724 *cval2 = TREE_OPERAND (arg, 1);
2725 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2737 /* ARG is a tree that is known to contain just arithmetic operations and
2738 comparisons. Evaluate the operations in the tree substituting NEW0 for
2739 any occurrence of OLD0 as an operand of a comparison and likewise for
2743 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2745 tree type = TREE_TYPE (arg);
2746 enum tree_code code = TREE_CODE (arg);
2747 char class = TREE_CODE_CLASS (code);
2749 /* We can handle some of the 'e' cases here. */
2750 if (class == 'e' && code == TRUTH_NOT_EXPR)
2752 else if (class == 'e'
2753 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2759 return fold (build1 (code, type,
2760 eval_subst (TREE_OPERAND (arg, 0),
2761 old0, new0, old1, new1)));
2764 return fold (build2 (code, type,
2765 eval_subst (TREE_OPERAND (arg, 0),
2766 old0, new0, old1, new1),
2767 eval_subst (TREE_OPERAND (arg, 1),
2768 old0, new0, old1, new1)));
2774 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2777 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2780 return fold (build3 (code, type,
2781 eval_subst (TREE_OPERAND (arg, 0),
2782 old0, new0, old1, new1),
2783 eval_subst (TREE_OPERAND (arg, 1),
2784 old0, new0, old1, new1),
2785 eval_subst (TREE_OPERAND (arg, 2),
2786 old0, new0, old1, new1)));
2790 /* Fall through - ??? */
2794 tree arg0 = TREE_OPERAND (arg, 0);
2795 tree arg1 = TREE_OPERAND (arg, 1);
2797 /* We need to check both for exact equality and tree equality. The
2798 former will be true if the operand has a side-effect. In that
2799 case, we know the operand occurred exactly once. */
2801 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2803 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2806 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2808 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2811 return fold (build2 (code, type, arg0, arg1));
2819 /* Return a tree for the case when the result of an expression is RESULT
2820 converted to TYPE and OMITTED was previously an operand of the expression
2821 but is now not needed (e.g., we folded OMITTED * 0).
2823 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2824 the conversion of RESULT to TYPE. */
2827 omit_one_operand (tree type, tree result, tree omitted)
2829 tree t = fold_convert (type, result);
2831 if (TREE_SIDE_EFFECTS (omitted))
2832 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2834 return non_lvalue (t);
2837 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2840 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2842 tree t = fold_convert (type, result);
2844 if (TREE_SIDE_EFFECTS (omitted))
2845 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2847 return pedantic_non_lvalue (t);
2850 /* Return a tree for the case when the result of an expression is RESULT
2851 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2852 of the expression but are now not needed.
2854 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2855 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2856 evaluated before OMITTED2. Otherwise, if neither has side effects,
2857 just do the conversion of RESULT to TYPE. */
2860 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2862 tree t = fold_convert (type, result);
2864 if (TREE_SIDE_EFFECTS (omitted2))
2865 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2866 if (TREE_SIDE_EFFECTS (omitted1))
2867 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2869 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2873 /* Return a simplified tree node for the truth-negation of ARG. This
2874 never alters ARG itself. We assume that ARG is an operation that
2875 returns a truth value (0 or 1).
2877 FIXME: one would think we would fold the result, but it causes
2878 problems with the dominator optimizer. */
2880 invert_truthvalue (tree arg)
2882 tree type = TREE_TYPE (arg);
2883 enum tree_code code = TREE_CODE (arg);
2885 if (code == ERROR_MARK)
2888 /* If this is a comparison, we can simply invert it, except for
2889 floating-point non-equality comparisons, in which case we just
2890 enclose a TRUTH_NOT_EXPR around what we have. */
2892 if (TREE_CODE_CLASS (code) == '<')
2894 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2895 if (FLOAT_TYPE_P (op_type)
2896 && flag_trapping_math
2897 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2898 && code != NE_EXPR && code != EQ_EXPR)
2899 return build1 (TRUTH_NOT_EXPR, type, arg);
2902 code = invert_tree_comparison (code,
2903 HONOR_NANS (TYPE_MODE (op_type)));
2904 if (code == ERROR_MARK)
2905 return build1 (TRUTH_NOT_EXPR, type, arg);
2907 return build2 (code, type,
2908 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2915 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2917 case TRUTH_AND_EXPR:
2918 return build2 (TRUTH_OR_EXPR, type,
2919 invert_truthvalue (TREE_OPERAND (arg, 0)),
2920 invert_truthvalue (TREE_OPERAND (arg, 1)));
2923 return build2 (TRUTH_AND_EXPR, type,
2924 invert_truthvalue (TREE_OPERAND (arg, 0)),
2925 invert_truthvalue (TREE_OPERAND (arg, 1)));
2927 case TRUTH_XOR_EXPR:
2928 /* Here we can invert either operand. We invert the first operand
2929 unless the second operand is a TRUTH_NOT_EXPR in which case our
2930 result is the XOR of the first operand with the inside of the
2931 negation of the second operand. */
2933 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2934 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2935 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2937 return build2 (TRUTH_XOR_EXPR, type,
2938 invert_truthvalue (TREE_OPERAND (arg, 0)),
2939 TREE_OPERAND (arg, 1));
2941 case TRUTH_ANDIF_EXPR:
2942 return build2 (TRUTH_ORIF_EXPR, type,
2943 invert_truthvalue (TREE_OPERAND (arg, 0)),
2944 invert_truthvalue (TREE_OPERAND (arg, 1)));
2946 case TRUTH_ORIF_EXPR:
2947 return build2 (TRUTH_ANDIF_EXPR, type,
2948 invert_truthvalue (TREE_OPERAND (arg, 0)),
2949 invert_truthvalue (TREE_OPERAND (arg, 1)));
2951 case TRUTH_NOT_EXPR:
2952 return TREE_OPERAND (arg, 0);
2955 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
2956 invert_truthvalue (TREE_OPERAND (arg, 1)),
2957 invert_truthvalue (TREE_OPERAND (arg, 2)));
2960 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2961 invert_truthvalue (TREE_OPERAND (arg, 1)));
2963 case NON_LVALUE_EXPR:
2964 return invert_truthvalue (TREE_OPERAND (arg, 0));
2967 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
2972 return build1 (TREE_CODE (arg), type,
2973 invert_truthvalue (TREE_OPERAND (arg, 0)));
2976 if (!integer_onep (TREE_OPERAND (arg, 1)))
2978 return build2 (EQ_EXPR, type, arg,
2979 fold_convert (type, integer_zero_node));
2982 return build1 (TRUTH_NOT_EXPR, type, arg);
2984 case CLEANUP_POINT_EXPR:
2985 return build1 (CLEANUP_POINT_EXPR, type,
2986 invert_truthvalue (TREE_OPERAND (arg, 0)));
2991 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2993 return build1 (TRUTH_NOT_EXPR, type, arg);
2996 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2997 operands are another bit-wise operation with a common input. If so,
2998 distribute the bit operations to save an operation and possibly two if
2999 constants are involved. For example, convert
3000 (A | B) & (A | C) into A | (B & C)
3001 Further simplification will occur if B and C are constants.
3003 If this optimization cannot be done, 0 will be returned. */
3006 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3011 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3012 || TREE_CODE (arg0) == code
3013 || (TREE_CODE (arg0) != BIT_AND_EXPR
3014 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3017 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3019 common = TREE_OPERAND (arg0, 0);
3020 left = TREE_OPERAND (arg0, 1);
3021 right = TREE_OPERAND (arg1, 1);
3023 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3025 common = TREE_OPERAND (arg0, 0);
3026 left = TREE_OPERAND (arg0, 1);
3027 right = TREE_OPERAND (arg1, 0);
3029 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3031 common = TREE_OPERAND (arg0, 1);
3032 left = TREE_OPERAND (arg0, 0);
3033 right = TREE_OPERAND (arg1, 1);
3035 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3037 common = TREE_OPERAND (arg0, 1);
3038 left = TREE_OPERAND (arg0, 0);
3039 right = TREE_OPERAND (arg1, 0);
3044 return fold (build2 (TREE_CODE (arg0), type, common,
3045 fold (build2 (code, type, left, right))));
3048 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3049 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3052 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3055 tree result = build3 (BIT_FIELD_REF, type, inner,
3056 size_int (bitsize), bitsize_int (bitpos));
3058 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3063 /* Optimize a bit-field compare.
3065 There are two cases: First is a compare against a constant and the
3066 second is a comparison of two items where the fields are at the same
3067 bit position relative to the start of a chunk (byte, halfword, word)
3068 large enough to contain it. In these cases we can avoid the shift
3069 implicit in bitfield extractions.
3071 For constants, we emit a compare of the shifted constant with the
3072 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3073 compared. For two fields at the same position, we do the ANDs with the
3074 similar mask and compare the result of the ANDs.
3076 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3077 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3078 are the left and right operands of the comparison, respectively.
3080 If the optimization described above can be done, we return the resulting
3081 tree. Otherwise we return zero. */
3084 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3087 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3088 tree type = TREE_TYPE (lhs);
3089 tree signed_type, unsigned_type;
3090 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3091 enum machine_mode lmode, rmode, nmode;
3092 int lunsignedp, runsignedp;
3093 int lvolatilep = 0, rvolatilep = 0;
3094 tree linner, rinner = NULL_TREE;
3098 /* Get all the information about the extractions being done. If the bit size
3099 if the same as the size of the underlying object, we aren't doing an
3100 extraction at all and so can do nothing. We also don't want to
3101 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3102 then will no longer be able to replace it. */
3103 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3104 &lunsignedp, &lvolatilep);
3105 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3106 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3111 /* If this is not a constant, we can only do something if bit positions,
3112 sizes, and signedness are the same. */
3113 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3114 &runsignedp, &rvolatilep);
3116 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3117 || lunsignedp != runsignedp || offset != 0
3118 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3122 /* See if we can find a mode to refer to this field. We should be able to,
3123 but fail if we can't. */
3124 nmode = get_best_mode (lbitsize, lbitpos,
3125 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3126 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3127 TYPE_ALIGN (TREE_TYPE (rinner))),
3128 word_mode, lvolatilep || rvolatilep);
3129 if (nmode == VOIDmode)
3132 /* Set signed and unsigned types of the precision of this mode for the
3134 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3135 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3137 /* Compute the bit position and size for the new reference and our offset
3138 within it. If the new reference is the same size as the original, we
3139 won't optimize anything, so return zero. */
3140 nbitsize = GET_MODE_BITSIZE (nmode);
3141 nbitpos = lbitpos & ~ (nbitsize - 1);
3143 if (nbitsize == lbitsize)
3146 if (BYTES_BIG_ENDIAN)
3147 lbitpos = nbitsize - lbitsize - lbitpos;
3149 /* Make the mask to be used against the extracted field. */
3150 mask = build_int_2 (~0, ~0);
3151 TREE_TYPE (mask) = unsigned_type;
3152 force_fit_type (mask, 0);
3153 mask = fold_convert (unsigned_type, mask);
3154 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3155 mask = const_binop (RSHIFT_EXPR, mask,
3156 size_int (nbitsize - lbitsize - lbitpos), 0);
3159 /* If not comparing with constant, just rework the comparison
3161 return build2 (code, compare_type,
3162 build2 (BIT_AND_EXPR, unsigned_type,
3163 make_bit_field_ref (linner, unsigned_type,
3164 nbitsize, nbitpos, 1),
3166 build2 (BIT_AND_EXPR, unsigned_type,
3167 make_bit_field_ref (rinner, unsigned_type,
3168 nbitsize, nbitpos, 1),
3171 /* Otherwise, we are handling the constant case. See if the constant is too
3172 big for the field. Warn and return a tree of for 0 (false) if so. We do
3173 this not only for its own sake, but to avoid having to test for this
3174 error case below. If we didn't, we might generate wrong code.
3176 For unsigned fields, the constant shifted right by the field length should
3177 be all zero. For signed fields, the high-order bits should agree with
3182 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3183 fold_convert (unsigned_type, rhs),
3184 size_int (lbitsize), 0)))
3186 warning ("comparison is always %d due to width of bit-field",
3188 return constant_boolean_node (code == NE_EXPR, compare_type);
3193 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3194 size_int (lbitsize - 1), 0);
3195 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3197 warning ("comparison is always %d due to width of bit-field",
3199 return constant_boolean_node (code == NE_EXPR, compare_type);
3203 /* Single-bit compares should always be against zero. */
3204 if (lbitsize == 1 && ! integer_zerop (rhs))
3206 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3207 rhs = fold_convert (type, integer_zero_node);
3210 /* Make a new bitfield reference, shift the constant over the
3211 appropriate number of bits and mask it with the computed mask
3212 (in case this was a signed field). If we changed it, make a new one. */
3213 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3216 TREE_SIDE_EFFECTS (lhs) = 1;
3217 TREE_THIS_VOLATILE (lhs) = 1;
3220 rhs = fold (const_binop (BIT_AND_EXPR,
3221 const_binop (LSHIFT_EXPR,
3222 fold_convert (unsigned_type, rhs),
3223 size_int (lbitpos), 0),
3226 return build2 (code, compare_type,
3227 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3231 /* Subroutine for fold_truthop: decode a field reference.
3233 If EXP is a comparison reference, we return the innermost reference.
3235 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3236 set to the starting bit number.
3238 If the innermost field can be completely contained in a mode-sized
3239 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3241 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3242 otherwise it is not changed.
3244 *PUNSIGNEDP is set to the signedness of the field.
3246 *PMASK is set to the mask used. This is either contained in a
3247 BIT_AND_EXPR or derived from the width of the field.
3249 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3251 Return 0 if this is not a component reference or is one that we can't
3252 do anything with. */
3255 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3256 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3257 int *punsignedp, int *pvolatilep,
3258 tree *pmask, tree *pand_mask)
3260 tree outer_type = 0;
3262 tree mask, inner, offset;
3264 unsigned int precision;
3266 /* All the optimizations using this function assume integer fields.
3267 There are problems with FP fields since the type_for_size call
3268 below can fail for, e.g., XFmode. */
3269 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3272 /* We are interested in the bare arrangement of bits, so strip everything
3273 that doesn't affect the machine mode. However, record the type of the
3274 outermost expression if it may matter below. */
3275 if (TREE_CODE (exp) == NOP_EXPR
3276 || TREE_CODE (exp) == CONVERT_EXPR
3277 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3278 outer_type = TREE_TYPE (exp);
3281 if (TREE_CODE (exp) == BIT_AND_EXPR)
3283 and_mask = TREE_OPERAND (exp, 1);
3284 exp = TREE_OPERAND (exp, 0);
3285 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3286 if (TREE_CODE (and_mask) != INTEGER_CST)
3290 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3291 punsignedp, pvolatilep);
3292 if ((inner == exp && and_mask == 0)
3293 || *pbitsize < 0 || offset != 0
3294 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3297 /* If the number of bits in the reference is the same as the bitsize of
3298 the outer type, then the outer type gives the signedness. Otherwise
3299 (in case of a small bitfield) the signedness is unchanged. */
3300 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3301 *punsignedp = TYPE_UNSIGNED (outer_type);
3303 /* Compute the mask to access the bitfield. */
3304 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3305 precision = TYPE_PRECISION (unsigned_type);
3307 mask = build_int_2 (~0, ~0);
3308 TREE_TYPE (mask) = unsigned_type;
3309 force_fit_type (mask, 0);
3310 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3311 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3313 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3315 mask = fold (build2 (BIT_AND_EXPR, unsigned_type,
3316 fold_convert (unsigned_type, and_mask), mask));
3319 *pand_mask = and_mask;
3323 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3327 all_ones_mask_p (tree mask, int size)
3329 tree type = TREE_TYPE (mask);
3330 unsigned int precision = TYPE_PRECISION (type);
3333 tmask = build_int_2 (~0, ~0);
3334 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3335 force_fit_type (tmask, 0);
3337 tree_int_cst_equal (mask,
3338 const_binop (RSHIFT_EXPR,
3339 const_binop (LSHIFT_EXPR, tmask,
3340 size_int (precision - size),
3342 size_int (precision - size), 0));
3345 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3346 represents the sign bit of EXP's type. If EXP represents a sign
3347 or zero extension, also test VAL against the unextended type.
3348 The return value is the (sub)expression whose sign bit is VAL,
3349 or NULL_TREE otherwise. */
3352 sign_bit_p (tree exp, tree val)
3354 unsigned HOST_WIDE_INT mask_lo, lo;
3355 HOST_WIDE_INT mask_hi, hi;
3359 /* Tree EXP must have an integral type. */
3360 t = TREE_TYPE (exp);
3361 if (! INTEGRAL_TYPE_P (t))
3364 /* Tree VAL must be an integer constant. */
3365 if (TREE_CODE (val) != INTEGER_CST
3366 || TREE_CONSTANT_OVERFLOW (val))
3369 width = TYPE_PRECISION (t);
3370 if (width > HOST_BITS_PER_WIDE_INT)
3372 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3375 mask_hi = ((unsigned HOST_WIDE_INT) -1
3376 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3382 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3385 mask_lo = ((unsigned HOST_WIDE_INT) -1
3386 >> (HOST_BITS_PER_WIDE_INT - width));
3389 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3390 treat VAL as if it were unsigned. */
3391 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3392 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3395 /* Handle extension from a narrower type. */
3396 if (TREE_CODE (exp) == NOP_EXPR
3397 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3398 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3403 /* Subroutine for fold_truthop: determine if an operand is simple enough
3404 to be evaluated unconditionally. */
3407 simple_operand_p (tree exp)
3409 /* Strip any conversions that don't change the machine mode. */
3410 while ((TREE_CODE (exp) == NOP_EXPR
3411 || TREE_CODE (exp) == CONVERT_EXPR)
3412 && (TYPE_MODE (TREE_TYPE (exp))
3413 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3414 exp = TREE_OPERAND (exp, 0);
3416 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3418 && ! TREE_ADDRESSABLE (exp)
3419 && ! TREE_THIS_VOLATILE (exp)
3420 && ! DECL_NONLOCAL (exp)
3421 /* Don't regard global variables as simple. They may be
3422 allocated in ways unknown to the compiler (shared memory,
3423 #pragma weak, etc). */
3424 && ! TREE_PUBLIC (exp)
3425 && ! DECL_EXTERNAL (exp)
3426 /* Loading a static variable is unduly expensive, but global
3427 registers aren't expensive. */
3428 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3431 /* The following functions are subroutines to fold_range_test and allow it to
3432 try to change a logical combination of comparisons into a range test.
3435 X == 2 || X == 3 || X == 4 || X == 5
3439 (unsigned) (X - 2) <= 3
3441 We describe each set of comparisons as being either inside or outside
3442 a range, using a variable named like IN_P, and then describe the
3443 range with a lower and upper bound. If one of the bounds is omitted,
3444 it represents either the highest or lowest value of the type.
3446 In the comments below, we represent a range by two numbers in brackets
3447 preceded by a "+" to designate being inside that range, or a "-" to
3448 designate being outside that range, so the condition can be inverted by
3449 flipping the prefix. An omitted bound is represented by a "-". For
3450 example, "- [-, 10]" means being outside the range starting at the lowest
3451 possible value and ending at 10, in other words, being greater than 10.
3452 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3455 We set up things so that the missing bounds are handled in a consistent
3456 manner so neither a missing bound nor "true" and "false" need to be
3457 handled using a special case. */
3459 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3460 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3461 and UPPER1_P are nonzero if the respective argument is an upper bound
3462 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3463 must be specified for a comparison. ARG1 will be converted to ARG0's
3464 type if both are specified. */
3467 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3468 tree arg1, int upper1_p)
3474 /* If neither arg represents infinity, do the normal operation.
3475 Else, if not a comparison, return infinity. Else handle the special
3476 comparison rules. Note that most of the cases below won't occur, but
3477 are handled for consistency. */
3479 if (arg0 != 0 && arg1 != 0)
3481 tem = fold (build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3482 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3484 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3487 if (TREE_CODE_CLASS (code) != '<')
3490 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3491 for neither. In real maths, we cannot assume open ended ranges are
3492 the same. But, this is computer arithmetic, where numbers are finite.
3493 We can therefore make the transformation of any unbounded range with
3494 the value Z, Z being greater than any representable number. This permits
3495 us to treat unbounded ranges as equal. */
3496 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3497 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3501 result = sgn0 == sgn1;
3504 result = sgn0 != sgn1;
3507 result = sgn0 < sgn1;
3510 result = sgn0 <= sgn1;
3513 result = sgn0 > sgn1;
3516 result = sgn0 >= sgn1;
3522 return constant_boolean_node (result, type);
3525 /* Given EXP, a logical expression, set the range it is testing into
3526 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3527 actually being tested. *PLOW and *PHIGH will be made of the same type
3528 as the returned expression. If EXP is not a comparison, we will most
3529 likely not be returning a useful value and range. */
3532 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3534 enum tree_code code;
3535 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3536 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3538 tree low, high, n_low, n_high;
3540 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3541 and see if we can refine the range. Some of the cases below may not
3542 happen, but it doesn't seem worth worrying about this. We "continue"
3543 the outer loop when we've changed something; otherwise we "break"
3544 the switch, which will "break" the while. */
3547 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3551 code = TREE_CODE (exp);
3552 exp_type = TREE_TYPE (exp);
3554 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3556 if (first_rtl_op (code) > 0)
3557 arg0 = TREE_OPERAND (exp, 0);
3558 if (TREE_CODE_CLASS (code) == '<'
3559 || TREE_CODE_CLASS (code) == '1'
3560 || TREE_CODE_CLASS (code) == '2')
3561 arg0_type = TREE_TYPE (arg0);
3562 if (TREE_CODE_CLASS (code) == '2'
3563 || TREE_CODE_CLASS (code) == '<'
3564 || (TREE_CODE_CLASS (code) == 'e'
3565 && TREE_CODE_LENGTH (code) > 1))
3566 arg1 = TREE_OPERAND (exp, 1);
3571 case TRUTH_NOT_EXPR:
3572 in_p = ! in_p, exp = arg0;
3575 case EQ_EXPR: case NE_EXPR:
3576 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3577 /* We can only do something if the range is testing for zero
3578 and if the second operand is an integer constant. Note that
3579 saying something is "in" the range we make is done by
3580 complementing IN_P since it will set in the initial case of
3581 being not equal to zero; "out" is leaving it alone. */
3582 if (low == 0 || high == 0
3583 || ! integer_zerop (low) || ! integer_zerop (high)
3584 || TREE_CODE (arg1) != INTEGER_CST)
3589 case NE_EXPR: /* - [c, c] */
3592 case EQ_EXPR: /* + [c, c] */
3593 in_p = ! in_p, low = high = arg1;
3595 case GT_EXPR: /* - [-, c] */
3596 low = 0, high = arg1;
3598 case GE_EXPR: /* + [c, -] */
3599 in_p = ! in_p, low = arg1, high = 0;
3601 case LT_EXPR: /* - [c, -] */
3602 low = arg1, high = 0;
3604 case LE_EXPR: /* + [-, c] */
3605 in_p = ! in_p, low = 0, high = arg1;
3611 /* If this is an unsigned comparison, we also know that EXP is
3612 greater than or equal to zero. We base the range tests we make
3613 on that fact, so we record it here so we can parse existing
3614 range tests. We test arg0_type since often the return type
3615 of, e.g. EQ_EXPR, is boolean. */
3616 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3618 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3619 1, fold_convert (arg0_type, integer_zero_node),
3623 in_p = n_in_p, low = n_low, high = n_high;
3625 /* If the high bound is missing, but we have a nonzero low
3626 bound, reverse the range so it goes from zero to the low bound
3628 if (high == 0 && low && ! integer_zerop (low))
3631 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3632 integer_one_node, 0);
3633 low = fold_convert (arg0_type, integer_zero_node);
3641 /* (-x) IN [a,b] -> x in [-b, -a] */
3642 n_low = range_binop (MINUS_EXPR, exp_type,
3643 fold_convert (exp_type, integer_zero_node),
3645 n_high = range_binop (MINUS_EXPR, exp_type,
3646 fold_convert (exp_type, integer_zero_node),
3648 low = n_low, high = n_high;
3654 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3655 fold_convert (exp_type, integer_one_node));
3658 case PLUS_EXPR: case MINUS_EXPR:
3659 if (TREE_CODE (arg1) != INTEGER_CST)
3662 /* If EXP is signed, any overflow in the computation is undefined,
3663 so we don't worry about it so long as our computations on
3664 the bounds don't overflow. For unsigned, overflow is defined
3665 and this is exactly the right thing. */
3666 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3667 arg0_type, low, 0, arg1, 0);
3668 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3669 arg0_type, high, 1, arg1, 0);
3670 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3671 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3674 /* Check for an unsigned range which has wrapped around the maximum
3675 value thus making n_high < n_low, and normalize it. */
3676 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3678 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3679 integer_one_node, 0);
3680 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3681 integer_one_node, 0);
3683 /* If the range is of the form +/- [ x+1, x ], we won't
3684 be able to normalize it. But then, it represents the
3685 whole range or the empty set, so make it
3687 if (tree_int_cst_equal (n_low, low)
3688 && tree_int_cst_equal (n_high, high))
3694 low = n_low, high = n_high;
3699 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3700 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3703 if (! INTEGRAL_TYPE_P (arg0_type)
3704 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3705 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3708 n_low = low, n_high = high;
3711 n_low = fold_convert (arg0_type, n_low);
3714 n_high = fold_convert (arg0_type, n_high);
3717 /* If we're converting arg0 from an unsigned type, to exp,
3718 a signed type, we will be doing the comparison as unsigned.
3719 The tests above have already verified that LOW and HIGH
3722 So we have to ensure that we will handle large unsigned
3723 values the same way that the current signed bounds treat
3726 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3729 tree equiv_type = lang_hooks.types.type_for_mode
3730 (TYPE_MODE (arg0_type), 1);
3732 /* A range without an upper bound is, naturally, unbounded.
3733 Since convert would have cropped a very large value, use
3734 the max value for the destination type. */
3736 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3737 : TYPE_MAX_VALUE (arg0_type);
3739 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3740 high_positive = fold (build2 (RSHIFT_EXPR, arg0_type,
3741 fold_convert (arg0_type,
3743 fold_convert (arg0_type,
3744 integer_one_node)));
3746 /* If the low bound is specified, "and" the range with the
3747 range for which the original unsigned value will be
3751 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3752 1, n_low, n_high, 1,
3753 fold_convert (arg0_type, integer_zero_node),
3757 in_p = (n_in_p == in_p);
3761 /* Otherwise, "or" the range with the range of the input
3762 that will be interpreted as negative. */
3763 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3764 0, n_low, n_high, 1,
3765 fold_convert (arg0_type, integer_zero_node),
3769 in_p = (in_p != n_in_p);
3774 low = n_low, high = n_high;
3784 /* If EXP is a constant, we can evaluate whether this is true or false. */
3785 if (TREE_CODE (exp) == INTEGER_CST)
3787 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3789 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3795 *pin_p = in_p, *plow = low, *phigh = high;
3799 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3800 type, TYPE, return an expression to test if EXP is in (or out of, depending
3801 on IN_P) the range. Return 0 if the test couldn't be created. */
3804 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3806 tree etype = TREE_TYPE (exp);
3811 value = build_range_check (type, exp, 1, low, high);
3813 return invert_truthvalue (value);
3818 if (low == 0 && high == 0)
3819 return fold_convert (type, integer_one_node);
3822 return fold (build2 (LE_EXPR, type, exp, high));
3825 return fold (build2 (GE_EXPR, type, exp, low));
3827 if (operand_equal_p (low, high, 0))
3828 return fold (build2 (EQ_EXPR, type, exp, low));
3830 if (integer_zerop (low))
3832 if (! TYPE_UNSIGNED (etype))
3834 etype = lang_hooks.types.unsigned_type (etype);
3835 high = fold_convert (etype, high);
3836 exp = fold_convert (etype, exp);
3838 return build_range_check (type, exp, 1, 0, high);
3841 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3842 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3844 unsigned HOST_WIDE_INT lo;
3848 prec = TYPE_PRECISION (etype);
3849 if (prec <= HOST_BITS_PER_WIDE_INT)
3852 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3856 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3857 lo = (unsigned HOST_WIDE_INT) -1;
3860 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3862 if (TYPE_UNSIGNED (etype))
3864 etype = lang_hooks.types.signed_type (etype);
3865 exp = fold_convert (etype, exp);
3867 return fold (build2 (GT_EXPR, type, exp,
3868 fold_convert (etype, integer_zero_node)));
3872 value = const_binop (MINUS_EXPR, high, low, 0);
3873 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3875 tree utype, minv, maxv;
3877 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
3878 for the type in question, as we rely on this here. */
3879 switch (TREE_CODE (etype))
3884 utype = lang_hooks.types.unsigned_type (etype);
3885 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
3886 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
3887 integer_one_node, 1);
3888 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
3889 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
3893 high = fold_convert (etype, high);
3894 low = fold_convert (etype, low);
3895 exp = fold_convert (etype, exp);
3896 value = const_binop (MINUS_EXPR, high, low, 0);
3904 if (value != 0 && ! TREE_OVERFLOW (value))
3905 return build_range_check (type,
3906 fold (build2 (MINUS_EXPR, etype, exp, low)),
3907 1, fold_convert (etype, integer_zero_node),
3913 /* Given two ranges, see if we can merge them into one. Return 1 if we
3914 can, 0 if we can't. Set the output range into the specified parameters. */
3917 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3918 tree high0, int in1_p, tree low1, tree high1)
3926 int lowequal = ((low0 == 0 && low1 == 0)
3927 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3928 low0, 0, low1, 0)));
3929 int highequal = ((high0 == 0 && high1 == 0)
3930 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3931 high0, 1, high1, 1)));
3933 /* Make range 0 be the range that starts first, or ends last if they
3934 start at the same value. Swap them if it isn't. */
3935 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3938 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3939 high1, 1, high0, 1))))
3941 temp = in0_p, in0_p = in1_p, in1_p = temp;
3942 tem = low0, low0 = low1, low1 = tem;
3943 tem = high0, high0 = high1, high1 = tem;
3946 /* Now flag two cases, whether the ranges are disjoint or whether the
3947 second range is totally subsumed in the first. Note that the tests
3948 below are simplified by the ones above. */
3949 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3950 high0, 1, low1, 0));
3951 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3952 high1, 1, high0, 1));
3954 /* We now have four cases, depending on whether we are including or
3955 excluding the two ranges. */
3958 /* If they don't overlap, the result is false. If the second range
3959 is a subset it is the result. Otherwise, the range is from the start
3960 of the second to the end of the first. */
3962 in_p = 0, low = high = 0;
3964 in_p = 1, low = low1, high = high1;
3966 in_p = 1, low = low1, high = high0;
3969 else if (in0_p && ! in1_p)
3971 /* If they don't overlap, the result is the first range. If they are
3972 equal, the result is false. If the second range is a subset of the
3973 first, and the ranges begin at the same place, we go from just after
3974 the end of the first range to the end of the second. If the second
3975 range is not a subset of the first, or if it is a subset and both
3976 ranges end at the same place, the range starts at the start of the
3977 first range and ends just before the second range.
3978 Otherwise, we can't describe this as a single range. */
3980 in_p = 1, low = low0, high = high0;
3981 else if (lowequal && highequal)
3982 in_p = 0, low = high = 0;
3983 else if (subset && lowequal)
3985 in_p = 1, high = high0;
3986 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3987 integer_one_node, 0);
3989 else if (! subset || highequal)
3991 in_p = 1, low = low0;
3992 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3993 integer_one_node, 0);
3999 else if (! in0_p && in1_p)
4001 /* If they don't overlap, the result is the second range. If the second
4002 is a subset of the first, the result is false. Otherwise,
4003 the range starts just after the first range and ends at the
4004 end of the second. */
4006 in_p = 1, low = low1, high = high1;
4007 else if (subset || highequal)
4008 in_p = 0, low = high = 0;
4011 in_p = 1, high = high1;
4012 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4013 integer_one_node, 0);
4019 /* The case where we are excluding both ranges. Here the complex case
4020 is if they don't overlap. In that case, the only time we have a
4021 range is if they are adjacent. If the second is a subset of the
4022 first, the result is the first. Otherwise, the range to exclude
4023 starts at the beginning of the first range and ends at the end of the
4027 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4028 range_binop (PLUS_EXPR, NULL_TREE,
4030 integer_one_node, 1),
4032 in_p = 0, low = low0, high = high1;
4035 /* Canonicalize - [min, x] into - [-, x]. */
4036 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4037 switch (TREE_CODE (TREE_TYPE (low0)))
4040 if (TYPE_PRECISION (TREE_TYPE (low0))
4041 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4046 if (tree_int_cst_equal (low0,
4047 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4051 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4052 && integer_zerop (low0))
4059 /* Canonicalize - [x, max] into - [x, -]. */
4060 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4061 switch (TREE_CODE (TREE_TYPE (high1)))
4064 if (TYPE_PRECISION (TREE_TYPE (high1))
4065 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4070 if (tree_int_cst_equal (high1,
4071 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4075 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4076 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4078 integer_one_node, 1)))
4085 /* The ranges might be also adjacent between the maximum and
4086 minimum values of the given type. For
4087 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4088 return + [x + 1, y - 1]. */
4089 if (low0 == 0 && high1 == 0)
4091 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4092 integer_one_node, 1);
4093 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4094 integer_one_node, 0);
4095 if (low == 0 || high == 0)
4105 in_p = 0, low = low0, high = high0;
4107 in_p = 0, low = low0, high = high1;
4110 *pin_p = in_p, *plow = low, *phigh = high;
4115 /* Subroutine of fold, looking inside expressions of the form
4116 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4117 of the COND_EXPR. This function is being used also to optimize
4118 A op B ? C : A, by reversing the comparison first.
4120 Return a folded expression whose code is not a COND_EXPR
4121 anymore, or NULL_TREE if no folding opportunity is found. */
4124 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4126 enum tree_code comp_code = TREE_CODE (arg0);
4127 tree arg00 = TREE_OPERAND (arg0, 0);
4128 tree arg01 = TREE_OPERAND (arg0, 1);
4129 tree arg1_type = TREE_TYPE (arg1);
4135 /* If we have A op 0 ? A : -A, consider applying the following
4138 A == 0? A : -A same as -A
4139 A != 0? A : -A same as A
4140 A >= 0? A : -A same as abs (A)
4141 A > 0? A : -A same as abs (A)
4142 A <= 0? A : -A same as -abs (A)
4143 A < 0? A : -A same as -abs (A)
4145 None of these transformations work for modes with signed
4146 zeros. If A is +/-0, the first two transformations will
4147 change the sign of the result (from +0 to -0, or vice
4148 versa). The last four will fix the sign of the result,
4149 even though the original expressions could be positive or
4150 negative, depending on the sign of A.
4152 Note that all these transformations are correct if A is
4153 NaN, since the two alternatives (A and -A) are also NaNs. */
4154 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4155 ? real_zerop (arg01)
4156 : integer_zerop (arg01))
4157 && TREE_CODE (arg2) == NEGATE_EXPR
4158 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4162 tem = fold_convert (arg1_type, arg1);
4163 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4165 return pedantic_non_lvalue (fold_convert (type, arg1));
4168 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4169 arg1 = fold_convert (lang_hooks.types.signed_type
4170 (TREE_TYPE (arg1)), arg1);
4171 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4172 return pedantic_non_lvalue (fold_convert (type, tem));
4175 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4176 arg1 = fold_convert (lang_hooks.types.signed_type
4177 (TREE_TYPE (arg1)), arg1);
4178 tem = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
4179 return negate_expr (fold_convert (type, tem));
4184 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4185 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4186 both transformations are correct when A is NaN: A != 0
4187 is then true, and A == 0 is false. */
4189 if (integer_zerop (arg01) && integer_zerop (arg2))
4191 if (comp_code == NE_EXPR)
4192 return pedantic_non_lvalue (fold_convert (type, arg1));
4193 else if (comp_code == EQ_EXPR)
4194 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
4197 /* Try some transformations of A op B ? A : B.
4199 A == B? A : B same as B
4200 A != B? A : B same as A
4201 A >= B? A : B same as max (A, B)
4202 A > B? A : B same as max (B, A)
4203 A <= B? A : B same as min (A, B)
4204 A < B? A : B same as min (B, A)
4206 As above, these transformations don't work in the presence
4207 of signed zeros. For example, if A and B are zeros of
4208 opposite sign, the first two transformations will change
4209 the sign of the result. In the last four, the original
4210 expressions give different results for (A=+0, B=-0) and
4211 (A=-0, B=+0), but the transformed expressions do not.
4213 The first two transformations are correct if either A or B
4214 is a NaN. In the first transformation, the condition will
4215 be false, and B will indeed be chosen. In the case of the
4216 second transformation, the condition A != B will be true,
4217 and A will be chosen.
4219 The conversions to max() and min() are not correct if B is
4220 a number and A is not. The conditions in the original
4221 expressions will be false, so all four give B. The min()
4222 and max() versions would give a NaN instead. */
4223 if (operand_equal_for_comparison_p (arg01, arg2, arg00))
4225 tree comp_op0 = arg00;
4226 tree comp_op1 = arg01;
4227 tree comp_type = TREE_TYPE (comp_op0);
4229 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4230 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4240 return pedantic_non_lvalue (fold_convert (type, arg2));
4242 return pedantic_non_lvalue (fold_convert (type, arg1));
4245 /* In C++ a ?: expression can be an lvalue, so put the
4246 operand which will be used if they are equal first
4247 so that we can convert this back to the
4248 corresponding COND_EXPR. */
4249 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4250 return pedantic_non_lvalue (
4251 fold_convert (type, fold (build2 (MIN_EXPR, comp_type,
4252 (comp_code == LE_EXPR
4253 ? comp_op0 : comp_op1),
4254 (comp_code == LE_EXPR
4255 ? comp_op1 : comp_op0)))));
4259 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4260 return pedantic_non_lvalue (
4261 fold_convert (type, fold (build2 (MAX_EXPR, comp_type,
4262 (comp_code == GE_EXPR
4263 ? comp_op0 : comp_op1),
4264 (comp_code == GE_EXPR
4265 ? comp_op1 : comp_op0)))));
4272 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4273 we might still be able to simplify this. For example,
4274 if C1 is one less or one more than C2, this might have started
4275 out as a MIN or MAX and been transformed by this function.
4276 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4278 if (INTEGRAL_TYPE_P (type)
4279 && TREE_CODE (arg01) == INTEGER_CST
4280 && TREE_CODE (arg2) == INTEGER_CST)
4284 /* We can replace A with C1 in this case. */
4285 arg1 = fold_convert (type, arg01);
4286 return fold (build3 (COND_EXPR, type, arg0, arg1, arg2));
4289 /* If C1 is C2 + 1, this is min(A, C2). */
4290 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4292 && operand_equal_p (arg01,
4293 const_binop (PLUS_EXPR, arg2,
4294 integer_one_node, 0),
4296 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4297 type, arg1, arg2)));
4301 /* If C1 is C2 - 1, this is min(A, C2). */
4302 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4304 && operand_equal_p (arg01,
4305 const_binop (MINUS_EXPR, arg2,
4306 integer_one_node, 0),
4308 return pedantic_non_lvalue (fold (build2 (MIN_EXPR,
4309 type, arg1, arg2)));
4313 /* If C1 is C2 - 1, this is max(A, C2). */
4314 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4316 && operand_equal_p (arg01,
4317 const_binop (MINUS_EXPR, arg2,
4318 integer_one_node, 0),
4320 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4321 type, arg1, arg2)));
4325 /* If C1 is C2 + 1, this is max(A, C2). */
4326 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4328 && operand_equal_p (arg01,
4329 const_binop (PLUS_EXPR, arg2,
4330 integer_one_node, 0),
4332 return pedantic_non_lvalue (fold (build2 (MAX_EXPR,
4333 type, arg1, arg2)));
4346 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
4347 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4350 /* EXP is some logical combination of boolean tests. See if we can
4351 merge it into some range test. Return the new tree if so. */
4354 fold_range_test (tree exp)
4356 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
4357 || TREE_CODE (exp) == TRUTH_OR_EXPR);
4358 int in0_p, in1_p, in_p;
4359 tree low0, low1, low, high0, high1, high;
4360 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
4361 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
4364 /* If this is an OR operation, invert both sides; we will invert
4365 again at the end. */
4367 in0_p = ! in0_p, in1_p = ! in1_p;
4369 /* If both expressions are the same, if we can merge the ranges, and we
4370 can build the range test, return it or it inverted. If one of the
4371 ranges is always true or always false, consider it to be the same
4372 expression as the other. */
4373 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4374 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4376 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
4378 : rhs != 0 ? rhs : integer_zero_node,
4380 return or_op ? invert_truthvalue (tem) : tem;
4382 /* On machines where the branch cost is expensive, if this is a
4383 short-circuited branch and the underlying object on both sides
4384 is the same, make a non-short-circuit operation. */
4385 else if (RANGE_TEST_NON_SHORT_CIRCUIT
4386 && lhs != 0 && rhs != 0
4387 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4388 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
4389 && operand_equal_p (lhs, rhs, 0))
4391 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4392 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4393 which cases we can't do this. */
4394 if (simple_operand_p (lhs))
4395 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4396 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4397 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
4398 TREE_OPERAND (exp, 1));
4400 else if (lang_hooks.decls.global_bindings_p () == 0
4401 && ! CONTAINS_PLACEHOLDER_P (lhs))
4403 tree common = save_expr (lhs);
4405 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
4406 or_op ? ! in0_p : in0_p,
4408 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
4409 or_op ? ! in1_p : in1_p,
4411 return build2 (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
4412 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4413 TREE_TYPE (exp), lhs, rhs);
4420 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4421 bit value. Arrange things so the extra bits will be set to zero if and
4422 only if C is signed-extended to its full width. If MASK is nonzero,
4423 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4426 unextend (tree c, int p, int unsignedp, tree mask)
4428 tree type = TREE_TYPE (c);
4429 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4432 if (p == modesize || unsignedp)
4435 /* We work by getting just the sign bit into the low-order bit, then
4436 into the high-order bit, then sign-extend. We then XOR that value
4438 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4439 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4441 /* We must use a signed type in order to get an arithmetic right shift.
4442 However, we must also avoid introducing accidental overflows, so that
4443 a subsequent call to integer_zerop will work. Hence we must
4444 do the type conversion here. At this point, the constant is either
4445 zero or one, and the conversion to a signed type can never overflow.
4446 We could get an overflow if this conversion is done anywhere else. */
4447 if (TYPE_UNSIGNED (type))
4448 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4450 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4451 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4453 temp = const_binop (BIT_AND_EXPR, temp,
4454 fold_convert (TREE_TYPE (c), mask), 0);
4455 /* If necessary, convert the type back to match the type of C. */
4456 if (TYPE_UNSIGNED (type))
4457 temp = fold_convert (type, temp);
4459 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4462 /* Find ways of folding logical expressions of LHS and RHS:
4463 Try to merge two comparisons to the same innermost item.
4464 Look for range tests like "ch >= '0' && ch <= '9'".
4465 Look for combinations of simple terms on machines with expensive branches
4466 and evaluate the RHS unconditionally.
4468 For example, if we have p->a == 2 && p->b == 4 and we can make an
4469 object large enough to span both A and B, we can do this with a comparison
4470 against the object ANDed with the a mask.
4472 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4473 operations to do this with one comparison.
4475 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4476 function and the one above.
4478 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4479 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4481 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4484 We return the simplified tree or 0 if no optimization is possible. */
4487 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4489 /* If this is the "or" of two comparisons, we can do something if
4490 the comparisons are NE_EXPR. If this is the "and", we can do something
4491 if the comparisons are EQ_EXPR. I.e.,
4492 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4494 WANTED_CODE is this operation code. For single bit fields, we can
4495 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4496 comparison for one-bit fields. */
4498 enum tree_code wanted_code;
4499 enum tree_code lcode, rcode;
4500 tree ll_arg, lr_arg, rl_arg, rr_arg;
4501 tree ll_inner, lr_inner, rl_inner, rr_inner;
4502 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4503 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4504 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4505 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4506 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4507 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4508 enum machine_mode lnmode, rnmode;
4509 tree ll_mask, lr_mask, rl_mask, rr_mask;
4510 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4511 tree l_const, r_const;
4512 tree lntype, rntype, result;
4513 int first_bit, end_bit;
4516 /* Start by getting the comparison codes. Fail if anything is volatile.
4517 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4518 it were surrounded with a NE_EXPR. */
4520 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4523 lcode = TREE_CODE (lhs);
4524 rcode = TREE_CODE (rhs);
4526 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4528 lhs = build2 (NE_EXPR, truth_type, lhs, integer_zero_node);
4532 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4534 rhs = build2 (NE_EXPR, truth_type, rhs, integer_zero_node);
4538 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
4541 ll_arg = TREE_OPERAND (lhs, 0);
4542 lr_arg = TREE_OPERAND (lhs, 1);
4543 rl_arg = TREE_OPERAND (rhs, 0);
4544 rr_arg = TREE_OPERAND (rhs, 1);
4546 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4547 if (simple_operand_p (ll_arg)
4548 && simple_operand_p (lr_arg))
4551 if (operand_equal_p (ll_arg, rl_arg, 0)
4552 && operand_equal_p (lr_arg, rr_arg, 0))
4554 result = combine_comparisons (code, lcode, rcode,
4555 truth_type, ll_arg, lr_arg);
4559 else if (operand_equal_p (ll_arg, rr_arg, 0)
4560 && operand_equal_p (lr_arg, rl_arg, 0))
4562 result = combine_comparisons (code, lcode,
4563 swap_tree_comparison (rcode),
4564 truth_type, ll_arg, lr_arg);
4570 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4571 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4573 /* If the RHS can be evaluated unconditionally and its operands are
4574 simple, it wins to evaluate the RHS unconditionally on machines
4575 with expensive branches. In this case, this isn't a comparison
4576 that can be merged. Avoid doing this if the RHS is a floating-point
4577 comparison since those can trap. */
4579 if (BRANCH_COST >= 2
4580 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4581 && simple_operand_p (rl_arg)
4582 && simple_operand_p (rr_arg))
4584 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4585 if (code == TRUTH_OR_EXPR
4586 && lcode == NE_EXPR && integer_zerop (lr_arg)
4587 && rcode == NE_EXPR && integer_zerop (rr_arg)
4588 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4589 return build2 (NE_EXPR, truth_type,
4590 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4592 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4594 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4595 if (code == TRUTH_AND_EXPR
4596 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4597 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4598 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4599 return build2 (EQ_EXPR, truth_type,
4600 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4602 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4604 return build2 (code, truth_type, lhs, rhs);
4607 /* See if the comparisons can be merged. Then get all the parameters for
4610 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4611 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4615 ll_inner = decode_field_reference (ll_arg,
4616 &ll_bitsize, &ll_bitpos, &ll_mode,
4617 &ll_unsignedp, &volatilep, &ll_mask,
4619 lr_inner = decode_field_reference (lr_arg,
4620 &lr_bitsize, &lr_bitpos, &lr_mode,
4621 &lr_unsignedp, &volatilep, &lr_mask,
4623 rl_inner = decode_field_reference (rl_arg,
4624 &rl_bitsize, &rl_bitpos, &rl_mode,
4625 &rl_unsignedp, &volatilep, &rl_mask,
4627 rr_inner = decode_field_reference (rr_arg,
4628 &rr_bitsize, &rr_bitpos, &rr_mode,
4629 &rr_unsignedp, &volatilep, &rr_mask,
4632 /* It must be true that the inner operation on the lhs of each
4633 comparison must be the same if we are to be able to do anything.
4634 Then see if we have constants. If not, the same must be true for
4636 if (volatilep || ll_inner == 0 || rl_inner == 0
4637 || ! operand_equal_p (ll_inner, rl_inner, 0))
4640 if (TREE_CODE (lr_arg) == INTEGER_CST
4641 && TREE_CODE (rr_arg) == INTEGER_CST)
4642 l_const = lr_arg, r_const = rr_arg;
4643 else if (lr_inner == 0 || rr_inner == 0
4644 || ! operand_equal_p (lr_inner, rr_inner, 0))
4647 l_const = r_const = 0;
4649 /* If either comparison code is not correct for our logical operation,
4650 fail. However, we can convert a one-bit comparison against zero into
4651 the opposite comparison against that bit being set in the field. */
4653 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4654 if (lcode != wanted_code)
4656 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4658 /* Make the left operand unsigned, since we are only interested
4659 in the value of one bit. Otherwise we are doing the wrong
4668 /* This is analogous to the code for l_const above. */
4669 if (rcode != wanted_code)
4671 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4680 /* After this point all optimizations will generate bit-field
4681 references, which we might not want. */
4682 if (! lang_hooks.can_use_bit_fields_p ())
4685 /* See if we can find a mode that contains both fields being compared on
4686 the left. If we can't, fail. Otherwise, update all constants and masks
4687 to be relative to a field of that size. */
4688 first_bit = MIN (ll_bitpos, rl_bitpos);
4689 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4690 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4691 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4693 if (lnmode == VOIDmode)
4696 lnbitsize = GET_MODE_BITSIZE (lnmode);
4697 lnbitpos = first_bit & ~ (lnbitsize - 1);
4698 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4699 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4701 if (BYTES_BIG_ENDIAN)
4703 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4704 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4707 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4708 size_int (xll_bitpos), 0);
4709 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4710 size_int (xrl_bitpos), 0);
4714 l_const = fold_convert (lntype, l_const);
4715 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4716 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4717 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4718 fold (build1 (BIT_NOT_EXPR,
4722 warning ("comparison is always %d", wanted_code == NE_EXPR);
4724 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4729 r_const = fold_convert (lntype, r_const);
4730 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4731 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4732 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4733 fold (build1 (BIT_NOT_EXPR,
4737 warning ("comparison is always %d", wanted_code == NE_EXPR);
4739 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4743 /* If the right sides are not constant, do the same for it. Also,
4744 disallow this optimization if a size or signedness mismatch occurs
4745 between the left and right sides. */
4748 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4749 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4750 /* Make sure the two fields on the right
4751 correspond to the left without being swapped. */
4752 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4755 first_bit = MIN (lr_bitpos, rr_bitpos);
4756 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4757 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4758 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4760 if (rnmode == VOIDmode)
4763 rnbitsize = GET_MODE_BITSIZE (rnmode);
4764 rnbitpos = first_bit & ~ (rnbitsize - 1);
4765 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4766 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4768 if (BYTES_BIG_ENDIAN)
4770 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4771 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4774 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4775 size_int (xlr_bitpos), 0);
4776 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4777 size_int (xrr_bitpos), 0);
4779 /* Make a mask that corresponds to both fields being compared.
4780 Do this for both items being compared. If the operands are the
4781 same size and the bits being compared are in the same position
4782 then we can do this by masking both and comparing the masked
4784 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4785 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4786 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4788 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4789 ll_unsignedp || rl_unsignedp);
4790 if (! all_ones_mask_p (ll_mask, lnbitsize))
4791 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4793 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4794 lr_unsignedp || rr_unsignedp);
4795 if (! all_ones_mask_p (lr_mask, rnbitsize))
4796 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4798 return build2 (wanted_code, truth_type, lhs, rhs);
4801 /* There is still another way we can do something: If both pairs of
4802 fields being compared are adjacent, we may be able to make a wider
4803 field containing them both.
4805 Note that we still must mask the lhs/rhs expressions. Furthermore,
4806 the mask must be shifted to account for the shift done by
4807 make_bit_field_ref. */
4808 if ((ll_bitsize + ll_bitpos == rl_bitpos
4809 && lr_bitsize + lr_bitpos == rr_bitpos)
4810 || (ll_bitpos == rl_bitpos + rl_bitsize
4811 && lr_bitpos == rr_bitpos + rr_bitsize))
4815 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4816 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4817 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4818 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4820 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4821 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4822 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4823 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4825 /* Convert to the smaller type before masking out unwanted bits. */
4827 if (lntype != rntype)
4829 if (lnbitsize > rnbitsize)
4831 lhs = fold_convert (rntype, lhs);
4832 ll_mask = fold_convert (rntype, ll_mask);
4835 else if (lnbitsize < rnbitsize)
4837 rhs = fold_convert (lntype, rhs);
4838 lr_mask = fold_convert (lntype, lr_mask);
4843 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4844 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
4846 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4847 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
4849 return build2 (wanted_code, truth_type, lhs, rhs);
4855 /* Handle the case of comparisons with constants. If there is something in
4856 common between the masks, those bits of the constants must be the same.
4857 If not, the condition is always false. Test for this to avoid generating
4858 incorrect code below. */
4859 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4860 if (! integer_zerop (result)
4861 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4862 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4864 if (wanted_code == NE_EXPR)
4866 warning ("`or' of unmatched not-equal tests is always 1");
4867 return constant_boolean_node (true, truth_type);
4871 warning ("`and' of mutually exclusive equal-tests is always 0");
4872 return constant_boolean_node (false, truth_type);
4876 /* Construct the expression we will return. First get the component
4877 reference we will make. Unless the mask is all ones the width of
4878 that field, perform the mask operation. Then compare with the
4880 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4881 ll_unsignedp || rl_unsignedp);
4883 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4884 if (! all_ones_mask_p (ll_mask, lnbitsize))
4885 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
4887 return build2 (wanted_code, truth_type, result,
4888 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4891 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4895 optimize_minmax_comparison (tree t)
4897 tree type = TREE_TYPE (t);
4898 tree arg0 = TREE_OPERAND (t, 0);
4899 enum tree_code op_code;
4900 tree comp_const = TREE_OPERAND (t, 1);
4902 int consts_equal, consts_lt;
4905 STRIP_SIGN_NOPS (arg0);
4907 op_code = TREE_CODE (arg0);
4908 minmax_const = TREE_OPERAND (arg0, 1);
4909 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4910 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4911 inner = TREE_OPERAND (arg0, 0);
4913 /* If something does not permit us to optimize, return the original tree. */
4914 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4915 || TREE_CODE (comp_const) != INTEGER_CST
4916 || TREE_CONSTANT_OVERFLOW (comp_const)
4917 || TREE_CODE (minmax_const) != INTEGER_CST
4918 || TREE_CONSTANT_OVERFLOW (minmax_const))
4921 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4922 and GT_EXPR, doing the rest with recursive calls using logical
4924 switch (TREE_CODE (t))
4926 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4928 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4932 fold (build2 (TRUTH_ORIF_EXPR, type,
4933 optimize_minmax_comparison
4934 (build2 (EQ_EXPR, type, arg0, comp_const)),
4935 optimize_minmax_comparison
4936 (build2 (GT_EXPR, type, arg0, comp_const))));
4939 if (op_code == MAX_EXPR && consts_equal)
4940 /* MAX (X, 0) == 0 -> X <= 0 */
4941 return fold (build2 (LE_EXPR, type, inner, comp_const));
4943 else if (op_code == MAX_EXPR && consts_lt)
4944 /* MAX (X, 0) == 5 -> X == 5 */
4945 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4947 else if (op_code == MAX_EXPR)
4948 /* MAX (X, 0) == -1 -> false */
4949 return omit_one_operand (type, integer_zero_node, inner);
4951 else if (consts_equal)
4952 /* MIN (X, 0) == 0 -> X >= 0 */
4953 return fold (build2 (GE_EXPR, type, inner, comp_const));
4956 /* MIN (X, 0) == 5 -> false */
4957 return omit_one_operand (type, integer_zero_node, inner);
4960 /* MIN (X, 0) == -1 -> X == -1 */
4961 return fold (build2 (EQ_EXPR, type, inner, comp_const));
4964 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4965 /* MAX (X, 0) > 0 -> X > 0
4966 MAX (X, 0) > 5 -> X > 5 */
4967 return fold (build2 (GT_EXPR, type, inner, comp_const));
4969 else if (op_code == MAX_EXPR)
4970 /* MAX (X, 0) > -1 -> true */
4971 return omit_one_operand (type, integer_one_node, inner);
4973 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4974 /* MIN (X, 0) > 0 -> false
4975 MIN (X, 0) > 5 -> false */
4976 return omit_one_operand (type, integer_zero_node, inner);
4979 /* MIN (X, 0) > -1 -> X > -1 */
4980 return fold (build2 (GT_EXPR, type, inner, comp_const));
4987 /* T is an integer expression that is being multiplied, divided, or taken a
4988 modulus (CODE says which and what kind of divide or modulus) by a
4989 constant C. See if we can eliminate that operation by folding it with
4990 other operations already in T. WIDE_TYPE, if non-null, is a type that
4991 should be used for the computation if wider than our type.
4993 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4994 (X * 2) + (Y * 4). We must, however, be assured that either the original
4995 expression would not overflow or that overflow is undefined for the type
4996 in the language in question.
4998 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4999 the machine has a multiply-accumulate insn or that this is part of an
5000 addressing calculation.
5002 If we return a non-null expression, it is an equivalent form of the
5003 original computation, but need not be in the original type. */
5006 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5008 /* To avoid exponential search depth, refuse to allow recursion past
5009 three levels. Beyond that (1) it's highly unlikely that we'll find
5010 something interesting and (2) we've probably processed it before
5011 when we built the inner expression. */
5020 ret = extract_muldiv_1 (t, c, code, wide_type);
5027 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5029 tree type = TREE_TYPE (t);
5030 enum tree_code tcode = TREE_CODE (t);
5031 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5032 > GET_MODE_SIZE (TYPE_MODE (type)))
5033 ? wide_type : type);
5035 int same_p = tcode == code;
5036 tree op0 = NULL_TREE, op1 = NULL_TREE;
5038 /* Don't deal with constants of zero here; they confuse the code below. */
5039 if (integer_zerop (c))
5042 if (TREE_CODE_CLASS (tcode) == '1')
5043 op0 = TREE_OPERAND (t, 0);
5045 if (TREE_CODE_CLASS (tcode) == '2')
5046 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5048 /* Note that we need not handle conditional operations here since fold
5049 already handles those cases. So just do arithmetic here. */
5053 /* For a constant, we can always simplify if we are a multiply
5054 or (for divide and modulus) if it is a multiple of our constant. */
5055 if (code == MULT_EXPR
5056 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5057 return const_binop (code, fold_convert (ctype, t),
5058 fold_convert (ctype, c), 0);
5061 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5062 /* If op0 is an expression ... */
5063 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
5064 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
5065 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
5066 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
5067 /* ... and is unsigned, and its type is smaller than ctype,
5068 then we cannot pass through as widening. */
5069 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5070 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5071 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5072 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5073 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5074 /* ... or its type is larger than ctype,
5075 then we cannot pass through this truncation. */
5076 || (GET_MODE_SIZE (TYPE_MODE (ctype))
5077 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5078 /* ... or signedness changes for division or modulus,
5079 then we cannot pass through this conversion. */
5080 || (code != MULT_EXPR
5081 && (TYPE_UNSIGNED (ctype)
5082 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5085 /* Pass the constant down and see if we can make a simplification. If
5086 we can, replace this expression with the inner simplification for
5087 possible later conversion to our or some other type. */
5088 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5089 && TREE_CODE (t2) == INTEGER_CST
5090 && ! TREE_CONSTANT_OVERFLOW (t2)
5091 && (0 != (t1 = extract_muldiv (op0, t2, code,
5093 ? ctype : NULL_TREE))))
5097 case NEGATE_EXPR: case ABS_EXPR:
5098 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5099 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
5102 case MIN_EXPR: case MAX_EXPR:
5103 /* If widening the type changes the signedness, then we can't perform
5104 this optimization as that changes the result. */
5105 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5108 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5109 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5110 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5112 if (tree_int_cst_sgn (c) < 0)
5113 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5115 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5116 fold_convert (ctype, t2)));
5120 case LSHIFT_EXPR: case RSHIFT_EXPR:
5121 /* If the second operand is constant, this is a multiplication
5122 or floor division, by a power of two, so we can treat it that
5123 way unless the multiplier or divisor overflows. */
5124 if (TREE_CODE (op1) == INTEGER_CST
5125 /* const_binop may not detect overflow correctly,
5126 so check for it explicitly here. */
5127 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5128 && TREE_INT_CST_HIGH (op1) == 0
5129 && 0 != (t1 = fold_convert (ctype,
5130 const_binop (LSHIFT_EXPR,
5133 && ! TREE_OVERFLOW (t1))
5134 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5135 ? MULT_EXPR : FLOOR_DIV_EXPR,
5136 ctype, fold_convert (ctype, op0), t1),
5137 c, code, wide_type);
5140 case PLUS_EXPR: case MINUS_EXPR:
5141 /* See if we can eliminate the operation on both sides. If we can, we
5142 can return a new PLUS or MINUS. If we can't, the only remaining
5143 cases where we can do anything are if the second operand is a
5145 t1 = extract_muldiv (op0, c, code, wide_type);
5146 t2 = extract_muldiv (op1, c, code, wide_type);
5147 if (t1 != 0 && t2 != 0
5148 && (code == MULT_EXPR
5149 /* If not multiplication, we can only do this if both operands
5150 are divisible by c. */
5151 || (multiple_of_p (ctype, op0, c)
5152 && multiple_of_p (ctype, op1, c))))
5153 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5154 fold_convert (ctype, t2)));
5156 /* If this was a subtraction, negate OP1 and set it to be an addition.
5157 This simplifies the logic below. */
5158 if (tcode == MINUS_EXPR)
5159 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5161 if (TREE_CODE (op1) != INTEGER_CST)
5164 /* If either OP1 or C are negative, this optimization is not safe for
5165 some of the division and remainder types while for others we need
5166 to change the code. */
5167 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5169 if (code == CEIL_DIV_EXPR)
5170 code = FLOOR_DIV_EXPR;
5171 else if (code == FLOOR_DIV_EXPR)
5172 code = CEIL_DIV_EXPR;
5173 else if (code != MULT_EXPR
5174 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5178 /* If it's a multiply or a division/modulus operation of a multiple
5179 of our constant, do the operation and verify it doesn't overflow. */
5180 if (code == MULT_EXPR
5181 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5183 op1 = const_binop (code, fold_convert (ctype, op1),
5184 fold_convert (ctype, c), 0);
5185 /* We allow the constant to overflow with wrapping semantics. */
5187 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5193 /* If we have an unsigned type is not a sizetype, we cannot widen
5194 the operation since it will change the result if the original
5195 computation overflowed. */
5196 if (TYPE_UNSIGNED (ctype)
5197 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5201 /* If we were able to eliminate our operation from the first side,
5202 apply our operation to the second side and reform the PLUS. */
5203 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5204 return fold (build2 (tcode, ctype, fold_convert (ctype, t1), op1));
5206 /* The last case is if we are a multiply. In that case, we can
5207 apply the distributive law to commute the multiply and addition
5208 if the multiplication of the constants doesn't overflow. */
5209 if (code == MULT_EXPR)
5210 return fold (build2 (tcode, ctype,
5211 fold (build2 (code, ctype,
5212 fold_convert (ctype, op0),
5213 fold_convert (ctype, c))),
5219 /* We have a special case here if we are doing something like
5220 (C * 8) % 4 since we know that's zero. */
5221 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5222 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5223 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5224 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5225 return omit_one_operand (type, integer_zero_node, op0);
5227 /* ... fall through ... */
5229 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5230 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5231 /* If we can extract our operation from the LHS, do so and return a
5232 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5233 do something only if the second operand is a constant. */
5235 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5236 return fold (build2 (tcode, ctype, fold_convert (ctype, t1),
5237 fold_convert (ctype, op1)));
5238 else if (tcode == MULT_EXPR && code == MULT_EXPR
5239 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5240 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5241 fold_convert (ctype, t1)));
5242 else if (TREE_CODE (op1) != INTEGER_CST)
5245 /* If these are the same operation types, we can associate them
5246 assuming no overflow. */
5248 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5249 fold_convert (ctype, c), 0))
5250 && ! TREE_OVERFLOW (t1))
5251 return fold (build2 (tcode, ctype, fold_convert (ctype, op0), t1));
5253 /* If these operations "cancel" each other, we have the main
5254 optimizations of this pass, which occur when either constant is a
5255 multiple of the other, in which case we replace this with either an
5256 operation or CODE or TCODE.
5258 If we have an unsigned type that is not a sizetype, we cannot do
5259 this since it will change the result if the original computation
5261 if ((! TYPE_UNSIGNED (ctype)
5262 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5264 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5265 || (tcode == MULT_EXPR
5266 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5267 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5269 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5270 return fold (build2 (tcode, ctype, fold_convert (ctype, op0),
5271 fold_convert (ctype,
5272 const_binop (TRUNC_DIV_EXPR,
5274 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5275 return fold (build2 (code, ctype, fold_convert (ctype, op0),
5276 fold_convert (ctype,
5277 const_binop (TRUNC_DIV_EXPR,
5289 /* Return a node which has the indicated constant VALUE (either 0 or
5290 1), and is of the indicated TYPE. */
5293 constant_boolean_node (int value, tree type)
5295 if (type == integer_type_node)
5296 return value ? integer_one_node : integer_zero_node;
5297 else if (type == boolean_type_node)
5298 return value ? boolean_true_node : boolean_false_node;
5299 else if (TREE_CODE (type) == BOOLEAN_TYPE)
5300 return lang_hooks.truthvalue_conversion (value ? integer_one_node
5301 : integer_zero_node);
5304 tree t = build_int_2 (value, 0);
5306 TREE_TYPE (t) = type;
5311 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5312 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5313 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5314 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5315 COND is the first argument to CODE; otherwise (as in the example
5316 given here), it is the second argument. TYPE is the type of the
5317 original expression. Return NULL_TREE if no simplification is
5321 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
5322 tree cond, tree arg, int cond_first_p)
5324 tree test, true_value, false_value;
5325 tree lhs = NULL_TREE;
5326 tree rhs = NULL_TREE;
5328 /* This transformation is only worthwhile if we don't have to wrap
5329 arg in a SAVE_EXPR, and the operation can be simplified on atleast
5330 one of the branches once its pushed inside the COND_EXPR. */
5331 if (!TREE_CONSTANT (arg))
5334 if (TREE_CODE (cond) == COND_EXPR)
5336 test = TREE_OPERAND (cond, 0);
5337 true_value = TREE_OPERAND (cond, 1);
5338 false_value = TREE_OPERAND (cond, 2);
5339 /* If this operand throws an expression, then it does not make
5340 sense to try to perform a logical or arithmetic operation
5342 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5344 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5349 tree testtype = TREE_TYPE (cond);
5351 true_value = constant_boolean_node (true, testtype);
5352 false_value = constant_boolean_node (false, testtype);
5356 lhs = fold (cond_first_p ? build2 (code, type, true_value, arg)
5357 : build2 (code, type, arg, true_value));
5359 rhs = fold (cond_first_p ? build2 (code, type, false_value, arg)
5360 : build2 (code, type, arg, false_value));
5362 test = fold (build3 (COND_EXPR, type, test, lhs, rhs));
5363 return fold_convert (type, test);
5367 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5369 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5370 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5371 ADDEND is the same as X.
5373 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5374 and finite. The problematic cases are when X is zero, and its mode
5375 has signed zeros. In the case of rounding towards -infinity,
5376 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5377 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5380 fold_real_zero_addition_p (tree type, tree addend, int negate)
5382 if (!real_zerop (addend))
5385 /* Don't allow the fold with -fsignaling-nans. */
5386 if (HONOR_SNANS (TYPE_MODE (type)))
5389 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5390 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5393 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5394 if (TREE_CODE (addend) == REAL_CST
5395 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5398 /* The mode has signed zeros, and we have to honor their sign.
5399 In this situation, there is only one case we can return true for.
5400 X - 0 is the same as X unless rounding towards -infinity is
5402 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5405 /* Subroutine of fold() that checks comparisons of built-in math
5406 functions against real constants.
5408 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5409 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5410 is the type of the result and ARG0 and ARG1 are the operands of the
5411 comparison. ARG1 must be a TREE_REAL_CST.
5413 The function returns the constant folded tree if a simplification
5414 can be made, and NULL_TREE otherwise. */
5417 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5418 tree type, tree arg0, tree arg1)
5422 if (BUILTIN_SQRT_P (fcode))
5424 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5425 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5427 c = TREE_REAL_CST (arg1);
5428 if (REAL_VALUE_NEGATIVE (c))
5430 /* sqrt(x) < y is always false, if y is negative. */
5431 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5432 return omit_one_operand (type, integer_zero_node, arg);
5434 /* sqrt(x) > y is always true, if y is negative and we
5435 don't care about NaNs, i.e. negative values of x. */
5436 if (code == NE_EXPR || !HONOR_NANS (mode))
5437 return omit_one_operand (type, integer_one_node, arg);
5439 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5440 return fold (build2 (GE_EXPR, type, arg,
5441 build_real (TREE_TYPE (arg), dconst0)));
5443 else if (code == GT_EXPR || code == GE_EXPR)
5447 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5448 real_convert (&c2, mode, &c2);
5450 if (REAL_VALUE_ISINF (c2))
5452 /* sqrt(x) > y is x == +Inf, when y is very large. */
5453 if (HONOR_INFINITIES (mode))
5454 return fold (build2 (EQ_EXPR, type, arg,
5455 build_real (TREE_TYPE (arg), c2)));
5457 /* sqrt(x) > y is always false, when y is very large
5458 and we don't care about infinities. */
5459 return omit_one_operand (type, integer_zero_node, arg);
5462 /* sqrt(x) > c is the same as x > c*c. */
5463 return fold (build2 (code, type, arg,
5464 build_real (TREE_TYPE (arg), c2)));
5466 else if (code == LT_EXPR || code == LE_EXPR)
5470 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5471 real_convert (&c2, mode, &c2);
5473 if (REAL_VALUE_ISINF (c2))
5475 /* sqrt(x) < y is always true, when y is a very large
5476 value and we don't care about NaNs or Infinities. */
5477 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5478 return omit_one_operand (type, integer_one_node, arg);
5480 /* sqrt(x) < y is x != +Inf when y is very large and we
5481 don't care about NaNs. */
5482 if (! HONOR_NANS (mode))
5483 return fold (build2 (NE_EXPR, type, arg,
5484 build_real (TREE_TYPE (arg), c2)));
5486 /* sqrt(x) < y is x >= 0 when y is very large and we
5487 don't care about Infinities. */
5488 if (! HONOR_INFINITIES (mode))
5489 return fold (build2 (GE_EXPR, type, arg,
5490 build_real (TREE_TYPE (arg), dconst0)));
5492 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5493 if (lang_hooks.decls.global_bindings_p () != 0
5494 || CONTAINS_PLACEHOLDER_P (arg))
5497 arg = save_expr (arg);
5498 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5499 fold (build2 (GE_EXPR, type, arg,
5500 build_real (TREE_TYPE (arg),
5502 fold (build2 (NE_EXPR, type, arg,
5503 build_real (TREE_TYPE (arg),
5507 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5508 if (! HONOR_NANS (mode))
5509 return fold (build2 (code, type, arg,
5510 build_real (TREE_TYPE (arg), c2)));
5512 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5513 if (lang_hooks.decls.global_bindings_p () == 0
5514 && ! CONTAINS_PLACEHOLDER_P (arg))
5516 arg = save_expr (arg);
5517 return fold (build2 (TRUTH_ANDIF_EXPR, type,
5518 fold (build2 (GE_EXPR, type, arg,
5519 build_real (TREE_TYPE (arg),
5521 fold (build2 (code, type, arg,
5522 build_real (TREE_TYPE (arg),
5531 /* Subroutine of fold() that optimizes comparisons against Infinities,
5532 either +Inf or -Inf.
5534 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5535 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5536 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5538 The function returns the constant folded tree if a simplification
5539 can be made, and NULL_TREE otherwise. */
5542 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5544 enum machine_mode mode;
5545 REAL_VALUE_TYPE max;
5549 mode = TYPE_MODE (TREE_TYPE (arg0));
5551 /* For negative infinity swap the sense of the comparison. */
5552 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5554 code = swap_tree_comparison (code);
5559 /* x > +Inf is always false, if with ignore sNANs. */
5560 if (HONOR_SNANS (mode))
5562 return omit_one_operand (type, integer_zero_node, arg0);
5565 /* x <= +Inf is always true, if we don't case about NaNs. */
5566 if (! HONOR_NANS (mode))
5567 return omit_one_operand (type, integer_one_node, arg0);
5569 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5570 if (lang_hooks.decls.global_bindings_p () == 0
5571 && ! CONTAINS_PLACEHOLDER_P (arg0))
5573 arg0 = save_expr (arg0);
5574 return fold (build2 (EQ_EXPR, type, arg0, arg0));
5580 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5581 real_maxval (&max, neg, mode);
5582 return fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5583 arg0, build_real (TREE_TYPE (arg0), max)));
5586 /* x < +Inf is always equal to x <= DBL_MAX. */
5587 real_maxval (&max, neg, mode);
5588 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5589 arg0, build_real (TREE_TYPE (arg0), max)));
5592 /* x != +Inf is always equal to !(x > DBL_MAX). */
5593 real_maxval (&max, neg, mode);
5594 if (! HONOR_NANS (mode))
5595 return fold (build2 (neg ? GE_EXPR : LE_EXPR, type,
5596 arg0, build_real (TREE_TYPE (arg0), max)));
5598 /* The transformation below creates non-gimple code and thus is
5599 not appropriate if we are in gimple form. */
5603 temp = fold (build2 (neg ? LT_EXPR : GT_EXPR, type,
5604 arg0, build_real (TREE_TYPE (arg0), max)));
5605 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5614 /* Subroutine of fold() that optimizes comparisons of a division by
5615 a nonzero integer constant against an integer constant, i.e.
5618 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5619 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5620 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5622 The function returns the constant folded tree if a simplification
5623 can be made, and NULL_TREE otherwise. */
5626 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5628 tree prod, tmp, hi, lo;
5629 tree arg00 = TREE_OPERAND (arg0, 0);
5630 tree arg01 = TREE_OPERAND (arg0, 1);
5631 unsigned HOST_WIDE_INT lpart;
5632 HOST_WIDE_INT hpart;
5635 /* We have to do this the hard way to detect unsigned overflow.
5636 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5637 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5638 TREE_INT_CST_HIGH (arg01),
5639 TREE_INT_CST_LOW (arg1),
5640 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5641 prod = build_int_2 (lpart, hpart);
5642 TREE_TYPE (prod) = TREE_TYPE (arg00);
5643 TREE_OVERFLOW (prod) = force_fit_type (prod, overflow)
5644 || TREE_INT_CST_HIGH (prod) != hpart
5645 || TREE_INT_CST_LOW (prod) != lpart;
5646 TREE_CONSTANT_OVERFLOW (prod) = TREE_OVERFLOW (prod);
5648 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5650 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5653 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5654 overflow = add_double (TREE_INT_CST_LOW (prod),
5655 TREE_INT_CST_HIGH (prod),
5656 TREE_INT_CST_LOW (tmp),
5657 TREE_INT_CST_HIGH (tmp),
5659 hi = build_int_2 (lpart, hpart);
5660 TREE_TYPE (hi) = TREE_TYPE (arg00);
5661 TREE_OVERFLOW (hi) = force_fit_type (hi, overflow)
5662 || TREE_INT_CST_HIGH (hi) != hpart
5663 || TREE_INT_CST_LOW (hi) != lpart
5664 || TREE_OVERFLOW (prod);
5665 TREE_CONSTANT_OVERFLOW (hi) = TREE_OVERFLOW (hi);
5667 else if (tree_int_cst_sgn (arg01) >= 0)
5669 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5670 switch (tree_int_cst_sgn (arg1))
5673 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5678 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5683 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5693 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5694 switch (tree_int_cst_sgn (arg1))
5697 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5702 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5707 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5719 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5720 return omit_one_operand (type, integer_zero_node, arg00);
5721 if (TREE_OVERFLOW (hi))
5722 return fold (build2 (GE_EXPR, type, arg00, lo));
5723 if (TREE_OVERFLOW (lo))
5724 return fold (build2 (LE_EXPR, type, arg00, hi));
5725 return build_range_check (type, arg00, 1, lo, hi);
5728 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5729 return omit_one_operand (type, integer_one_node, arg00);
5730 if (TREE_OVERFLOW (hi))
5731 return fold (build2 (LT_EXPR, type, arg00, lo));
5732 if (TREE_OVERFLOW (lo))
5733 return fold (build2 (GT_EXPR, type, arg00, hi));
5734 return build_range_check (type, arg00, 0, lo, hi);
5737 if (TREE_OVERFLOW (lo))
5738 return omit_one_operand (type, integer_zero_node, arg00);
5739 return fold (build2 (LT_EXPR, type, arg00, lo));
5742 if (TREE_OVERFLOW (hi))
5743 return omit_one_operand (type, integer_one_node, arg00);
5744 return fold (build2 (LE_EXPR, type, arg00, hi));
5747 if (TREE_OVERFLOW (hi))
5748 return omit_one_operand (type, integer_zero_node, arg00);
5749 return fold (build2 (GT_EXPR, type, arg00, hi));
5752 if (TREE_OVERFLOW (lo))
5753 return omit_one_operand (type, integer_one_node, arg00);
5754 return fold (build2 (GE_EXPR, type, arg00, lo));
5764 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5765 equality/inequality test, then return a simplified form of
5766 the test using shifts and logical operations. Otherwise return
5767 NULL. TYPE is the desired result type. */
5770 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5773 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5775 if (code == TRUTH_NOT_EXPR)
5777 code = TREE_CODE (arg0);
5778 if (code != NE_EXPR && code != EQ_EXPR)
5781 /* Extract the arguments of the EQ/NE. */
5782 arg1 = TREE_OPERAND (arg0, 1);
5783 arg0 = TREE_OPERAND (arg0, 0);
5785 /* This requires us to invert the code. */
5786 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5789 /* If this is testing a single bit, we can optimize the test. */
5790 if ((code == NE_EXPR || code == EQ_EXPR)
5791 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5792 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5794 tree inner = TREE_OPERAND (arg0, 0);
5795 tree type = TREE_TYPE (arg0);
5796 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5797 enum machine_mode operand_mode = TYPE_MODE (type);
5799 tree signed_type, unsigned_type, intermediate_type;
5802 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5803 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5804 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5805 if (arg00 != NULL_TREE
5806 /* This is only a win if casting to a signed type is cheap,
5807 i.e. when arg00's type is not a partial mode. */
5808 && TYPE_PRECISION (TREE_TYPE (arg00))
5809 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5811 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5812 return fold (build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
5813 result_type, fold_convert (stype, arg00),
5814 fold_convert (stype, integer_zero_node)));
5817 /* Otherwise we have (A & C) != 0 where C is a single bit,
5818 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5819 Similarly for (A & C) == 0. */
5821 /* If INNER is a right shift of a constant and it plus BITNUM does
5822 not overflow, adjust BITNUM and INNER. */
5823 if (TREE_CODE (inner) == RSHIFT_EXPR
5824 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5825 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5826 && bitnum < TYPE_PRECISION (type)
5827 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5828 bitnum - TYPE_PRECISION (type)))
5830 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5831 inner = TREE_OPERAND (inner, 0);
5834 /* If we are going to be able to omit the AND below, we must do our
5835 operations as unsigned. If we must use the AND, we have a choice.
5836 Normally unsigned is faster, but for some machines signed is. */
5837 #ifdef LOAD_EXTEND_OP
5838 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5843 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5844 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5845 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5846 inner = fold_convert (intermediate_type, inner);
5849 inner = build2 (RSHIFT_EXPR, intermediate_type,
5850 inner, size_int (bitnum));
5852 if (code == EQ_EXPR)
5853 inner = fold (build2 (BIT_XOR_EXPR, intermediate_type,
5854 inner, integer_one_node));
5856 /* Put the AND last so it can combine with more things. */
5857 inner = build2 (BIT_AND_EXPR, intermediate_type,
5858 inner, integer_one_node);
5860 /* Make sure to return the proper type. */
5861 inner = fold_convert (result_type, inner);
5868 /* Check whether we are allowed to reorder operands arg0 and arg1,
5869 such that the evaluation of arg1 occurs before arg0. */
5872 reorder_operands_p (tree arg0, tree arg1)
5874 if (! flag_evaluation_order)
5876 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5878 return ! TREE_SIDE_EFFECTS (arg0)
5879 && ! TREE_SIDE_EFFECTS (arg1);
5882 /* Test whether it is preferable two swap two operands, ARG0 and
5883 ARG1, for example because ARG0 is an integer constant and ARG1
5884 isn't. If REORDER is true, only recommend swapping if we can
5885 evaluate the operands in reverse order. */
5888 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5890 STRIP_SIGN_NOPS (arg0);
5891 STRIP_SIGN_NOPS (arg1);
5893 if (TREE_CODE (arg1) == INTEGER_CST)
5895 if (TREE_CODE (arg0) == INTEGER_CST)
5898 if (TREE_CODE (arg1) == REAL_CST)
5900 if (TREE_CODE (arg0) == REAL_CST)
5903 if (TREE_CODE (arg1) == COMPLEX_CST)
5905 if (TREE_CODE (arg0) == COMPLEX_CST)
5908 if (TREE_CONSTANT (arg1))
5910 if (TREE_CONSTANT (arg0))
5916 if (reorder && flag_evaluation_order
5917 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5925 if (reorder && flag_evaluation_order
5926 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5934 /* It is preferable to swap two SSA_NAME to ensure a canonical form
5935 for commutative and comparison operators. Ensuring a canonical
5936 form allows the optimizers to find additional redundancies without
5937 having to explicitly check for both orderings. */
5938 if (TREE_CODE (arg0) == SSA_NAME
5939 && TREE_CODE (arg1) == SSA_NAME
5940 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
5946 /* Perform constant folding and related simplification of EXPR.
5947 The related simplifications include x*1 => x, x*0 => 0, etc.,
5948 and application of the associative law.
5949 NOP_EXPR conversions may be removed freely (as long as we
5950 are careful not to change the type of the overall expression).
5951 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5952 but we can constant-fold them if they have constant operands. */
5954 #ifdef ENABLE_FOLD_CHECKING
5955 # define fold(x) fold_1 (x)
5956 static tree fold_1 (tree);
5962 const tree t = expr;
5963 const tree type = TREE_TYPE (expr);
5964 tree t1 = NULL_TREE;
5966 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5967 enum tree_code code = TREE_CODE (t);
5968 int kind = TREE_CODE_CLASS (code);
5970 /* WINS will be nonzero when the switch is done
5971 if all operands are constant. */
5974 /* Return right away if a constant. */
5978 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5982 /* Special case for conversion ops that can have fixed point args. */
5983 arg0 = TREE_OPERAND (t, 0);
5985 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5987 STRIP_SIGN_NOPS (arg0);
5989 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5990 subop = TREE_REALPART (arg0);
5994 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5995 && TREE_CODE (subop) != REAL_CST)
5996 /* Note that TREE_CONSTANT isn't enough:
5997 static var addresses are constant but we can't
5998 do arithmetic on them. */
6001 else if (IS_EXPR_CODE_CLASS (kind))
6003 int len = first_rtl_op (code);
6005 for (i = 0; i < len; i++)
6007 tree op = TREE_OPERAND (t, i);
6011 continue; /* Valid for CALL_EXPR, at least. */
6013 /* Strip any conversions that don't change the mode. This is
6014 safe for every expression, except for a comparison expression
6015 because its signedness is derived from its operands. So, in
6016 the latter case, only strip conversions that don't change the
6019 Note that this is done as an internal manipulation within the
6020 constant folder, in order to find the simplest representation
6021 of the arguments so that their form can be studied. In any
6022 cases, the appropriate type conversions should be put back in
6023 the tree that will get out of the constant folder. */
6025 STRIP_SIGN_NOPS (op);
6029 if (TREE_CODE (op) == COMPLEX_CST)
6030 subop = TREE_REALPART (op);
6034 if (TREE_CODE (subop) != INTEGER_CST
6035 && TREE_CODE (subop) != REAL_CST)
6036 /* Note that TREE_CONSTANT isn't enough:
6037 static var addresses are constant but we can't
6038 do arithmetic on them. */
6048 /* If this is a commutative operation, and ARG0 is a constant, move it
6049 to ARG1 to reduce the number of tests below. */
6050 if (commutative_tree_code (code)
6051 && tree_swap_operands_p (arg0, arg1, true))
6052 return fold (build2 (code, type, TREE_OPERAND (t, 1),
6053 TREE_OPERAND (t, 0)));
6055 /* Now WINS is set as described above,
6056 ARG0 is the first operand of EXPR,
6057 and ARG1 is the second operand (if it has more than one operand).
6059 First check for cases where an arithmetic operation is applied to a
6060 compound, conditional, or comparison operation. Push the arithmetic
6061 operation inside the compound or conditional to see if any folding
6062 can then be done. Convert comparison to conditional for this purpose.
6063 The also optimizes non-constant cases that used to be done in
6066 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
6067 one of the operands is a comparison and the other is a comparison, a
6068 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
6069 code below would make the expression more complex. Change it to a
6070 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
6071 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
6073 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
6074 || code == EQ_EXPR || code == NE_EXPR)
6075 && ((truth_value_p (TREE_CODE (arg0))
6076 && (truth_value_p (TREE_CODE (arg1))
6077 || (TREE_CODE (arg1) == BIT_AND_EXPR
6078 && integer_onep (TREE_OPERAND (arg1, 1)))))
6079 || (truth_value_p (TREE_CODE (arg1))
6080 && (truth_value_p (TREE_CODE (arg0))
6081 || (TREE_CODE (arg0) == BIT_AND_EXPR
6082 && integer_onep (TREE_OPERAND (arg0, 1)))))))
6084 tem = fold (build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
6085 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
6087 type, fold_convert (boolean_type_node, arg0),
6088 fold_convert (boolean_type_node, arg1)));
6090 if (code == EQ_EXPR)
6091 tem = invert_truthvalue (tem);
6096 if (TREE_CODE_CLASS (code) == '1')
6098 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6099 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6100 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
6101 else if (TREE_CODE (arg0) == COND_EXPR)
6103 tree arg01 = TREE_OPERAND (arg0, 1);
6104 tree arg02 = TREE_OPERAND (arg0, 2);
6105 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6106 arg01 = fold (build1 (code, type, arg01));
6107 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6108 arg02 = fold (build1 (code, type, arg02));
6109 tem = fold (build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6112 /* If this was a conversion, and all we did was to move into
6113 inside the COND_EXPR, bring it back out. But leave it if
6114 it is a conversion from integer to integer and the
6115 result precision is no wider than a word since such a
6116 conversion is cheap and may be optimized away by combine,
6117 while it couldn't if it were outside the COND_EXPR. Then return
6118 so we don't get into an infinite recursion loop taking the
6119 conversion out and then back in. */
6121 if ((code == NOP_EXPR || code == CONVERT_EXPR
6122 || code == NON_LVALUE_EXPR)
6123 && TREE_CODE (tem) == COND_EXPR
6124 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6125 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6126 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6127 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6128 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6129 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6130 && ! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6132 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6133 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD))
6134 tem = build1 (code, type,
6136 TREE_TYPE (TREE_OPERAND
6137 (TREE_OPERAND (tem, 1), 0)),
6138 TREE_OPERAND (tem, 0),
6139 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6140 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6143 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6145 if (TREE_CODE (type) == BOOLEAN_TYPE)
6147 arg0 = copy_node (arg0);
6148 TREE_TYPE (arg0) = type;
6151 else if (TREE_CODE (type) != INTEGER_TYPE)
6152 return fold (build3 (COND_EXPR, type, arg0,
6153 fold (build1 (code, type,
6155 fold (build1 (code, type,
6156 integer_zero_node))));
6159 else if (TREE_CODE_CLASS (code) == '<'
6160 && TREE_CODE (arg0) == COMPOUND_EXPR)
6161 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6162 fold (build2 (code, type, TREE_OPERAND (arg0, 1), arg1)));
6163 else if (TREE_CODE_CLASS (code) == '<'
6164 && TREE_CODE (arg1) == COMPOUND_EXPR)
6165 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6166 fold (build2 (code, type, arg0, TREE_OPERAND (arg1, 1))));
6167 else if (TREE_CODE_CLASS (code) == '2'
6168 || TREE_CODE_CLASS (code) == '<')
6170 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6171 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6172 fold (build2 (code, type, TREE_OPERAND (arg0, 1),
6174 if (TREE_CODE (arg1) == COMPOUND_EXPR
6175 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
6176 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
6177 fold (build2 (code, type,
6178 arg0, TREE_OPERAND (arg1, 1))));
6180 if (TREE_CODE (arg0) == COND_EXPR
6181 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
6183 tem = fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
6184 /*cond_first_p=*/1);
6185 if (tem != NULL_TREE)
6189 if (TREE_CODE (arg1) == COND_EXPR
6190 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
6192 tem = fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
6193 /*cond_first_p=*/0);
6194 if (tem != NULL_TREE)
6202 return fold (DECL_INITIAL (t));
6207 case FIX_TRUNC_EXPR:
6209 case FIX_FLOOR_EXPR:
6210 case FIX_ROUND_EXPR:
6211 if (TREE_TYPE (TREE_OPERAND (t, 0)) == type)
6212 return TREE_OPERAND (t, 0);
6214 /* Handle cases of two conversions in a row. */
6215 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
6216 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
6218 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6219 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
6220 int inside_int = INTEGRAL_TYPE_P (inside_type);
6221 int inside_ptr = POINTER_TYPE_P (inside_type);
6222 int inside_float = FLOAT_TYPE_P (inside_type);
6223 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6224 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6225 int inter_int = INTEGRAL_TYPE_P (inter_type);
6226 int inter_ptr = POINTER_TYPE_P (inter_type);
6227 int inter_float = FLOAT_TYPE_P (inter_type);
6228 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6229 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6230 int final_int = INTEGRAL_TYPE_P (type);
6231 int final_ptr = POINTER_TYPE_P (type);
6232 int final_float = FLOAT_TYPE_P (type);
6233 unsigned int final_prec = TYPE_PRECISION (type);
6234 int final_unsignedp = TYPE_UNSIGNED (type);
6236 /* In addition to the cases of two conversions in a row
6237 handled below, if we are converting something to its own
6238 type via an object of identical or wider precision, neither
6239 conversion is needed. */
6240 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6241 && ((inter_int && final_int) || (inter_float && final_float))
6242 && inter_prec >= final_prec)
6243 return fold (build1 (code, type,
6244 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6246 /* Likewise, if the intermediate and final types are either both
6247 float or both integer, we don't need the middle conversion if
6248 it is wider than the final type and doesn't change the signedness
6249 (for integers). Avoid this if the final type is a pointer
6250 since then we sometimes need the inner conversion. Likewise if
6251 the outer has a precision not equal to the size of its mode. */
6252 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6253 || (inter_float && inside_float))
6254 && inter_prec >= inside_prec
6255 && (inter_float || inter_unsignedp == inside_unsignedp)
6256 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6257 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6259 return fold (build1 (code, type,
6260 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6262 /* If we have a sign-extension of a zero-extended value, we can
6263 replace that by a single zero-extension. */
6264 if (inside_int && inter_int && final_int
6265 && inside_prec < inter_prec && inter_prec < final_prec
6266 && inside_unsignedp && !inter_unsignedp)
6267 return fold (build1 (code, type,
6268 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6270 /* Two conversions in a row are not needed unless:
6271 - some conversion is floating-point (overstrict for now), or
6272 - the intermediate type is narrower than both initial and
6274 - the intermediate type and innermost type differ in signedness,
6275 and the outermost type is wider than the intermediate, or
6276 - the initial type is a pointer type and the precisions of the
6277 intermediate and final types differ, or
6278 - the final type is a pointer type and the precisions of the
6279 initial and intermediate types differ. */
6280 if (! inside_float && ! inter_float && ! final_float
6281 && (inter_prec > inside_prec || inter_prec > final_prec)
6282 && ! (inside_int && inter_int
6283 && inter_unsignedp != inside_unsignedp
6284 && inter_prec < final_prec)
6285 && ((inter_unsignedp && inter_prec > inside_prec)
6286 == (final_unsignedp && final_prec > inter_prec))
6287 && ! (inside_ptr && inter_prec != final_prec)
6288 && ! (final_ptr && inside_prec != inter_prec)
6289 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6290 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6292 return fold (build1 (code, type,
6293 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
6296 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
6297 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
6298 /* Detect assigning a bitfield. */
6299 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
6300 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
6302 /* Don't leave an assignment inside a conversion
6303 unless assigning a bitfield. */
6304 tree prev = TREE_OPERAND (t, 0);
6305 tem = copy_node (t);
6306 TREE_OPERAND (tem, 0) = TREE_OPERAND (prev, 1);
6307 /* First do the assignment, then return converted constant. */
6308 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), prev, fold (tem));
6309 TREE_NO_WARNING (tem) = 1;
6310 TREE_USED (tem) = 1;
6314 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6315 constants (if x has signed type, the sign bit cannot be set
6316 in c). This folds extension into the BIT_AND_EXPR. */
6317 if (INTEGRAL_TYPE_P (type)
6318 && TREE_CODE (type) != BOOLEAN_TYPE
6319 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
6320 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
6322 tree and = TREE_OPERAND (t, 0);
6323 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6326 if (TYPE_UNSIGNED (TREE_TYPE (and))
6327 || (TYPE_PRECISION (type)
6328 <= TYPE_PRECISION (TREE_TYPE (and))))
6330 else if (TYPE_PRECISION (TREE_TYPE (and1))
6331 <= HOST_BITS_PER_WIDE_INT
6332 && host_integerp (and1, 1))
6334 unsigned HOST_WIDE_INT cst;
6336 cst = tree_low_cst (and1, 1);
6337 cst &= (HOST_WIDE_INT) -1
6338 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6339 change = (cst == 0);
6340 #ifdef LOAD_EXTEND_OP
6342 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6345 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6346 and0 = fold_convert (uns, and0);
6347 and1 = fold_convert (uns, and1);
6352 return fold (build2 (BIT_AND_EXPR, type,
6353 fold_convert (type, and0),
6354 fold_convert (type, and1)));
6357 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6358 T2 being pointers to types of the same size. */
6359 if (POINTER_TYPE_P (TREE_TYPE (t))
6360 && TREE_CODE_CLASS (TREE_CODE (arg0)) == '2'
6361 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6362 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6364 tree arg00 = TREE_OPERAND (arg0, 0);
6365 tree t0 = TREE_TYPE (t);
6366 tree t1 = TREE_TYPE (arg00);
6367 tree tt0 = TREE_TYPE (t0);
6368 tree tt1 = TREE_TYPE (t1);
6369 tree s0 = TYPE_SIZE (tt0);
6370 tree s1 = TYPE_SIZE (tt1);
6372 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6373 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6374 TREE_OPERAND (arg0, 1));
6377 tem = fold_convert_const (code, type, arg0);
6378 return tem ? tem : t;
6380 case VIEW_CONVERT_EXPR:
6381 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
6382 return build1 (VIEW_CONVERT_EXPR, type,
6383 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
6387 if (TREE_CODE (arg0) == CONSTRUCTOR
6388 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
6390 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
6392 return TREE_VALUE (m);
6397 if (TREE_CONSTANT (t) != wins)
6399 tem = copy_node (t);
6400 TREE_CONSTANT (tem) = wins;
6401 TREE_INVARIANT (tem) = wins;
6407 if (negate_expr_p (arg0))
6408 return fold_convert (type, negate_expr (arg0));
6412 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6413 return fold_abs_const (arg0, type);
6414 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6415 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
6416 /* Convert fabs((double)float) into (double)fabsf(float). */
6417 else if (TREE_CODE (arg0) == NOP_EXPR
6418 && TREE_CODE (type) == REAL_TYPE)
6420 tree targ0 = strip_float_extensions (arg0);
6422 return fold_convert (type, fold (build1 (ABS_EXPR,
6426 else if (tree_expr_nonnegative_p (arg0))
6431 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6432 return fold_convert (type, arg0);
6433 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6434 return build2 (COMPLEX_EXPR, type,
6435 TREE_OPERAND (arg0, 0),
6436 negate_expr (TREE_OPERAND (arg0, 1)));
6437 else if (TREE_CODE (arg0) == COMPLEX_CST)
6438 return build_complex (type, TREE_REALPART (arg0),
6439 negate_expr (TREE_IMAGPART (arg0)));
6440 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6441 return fold (build2 (TREE_CODE (arg0), type,
6442 fold (build1 (CONJ_EXPR, type,
6443 TREE_OPERAND (arg0, 0))),
6444 fold (build1 (CONJ_EXPR, type,
6445 TREE_OPERAND (arg0, 1)))));
6446 else if (TREE_CODE (arg0) == CONJ_EXPR)
6447 return TREE_OPERAND (arg0, 0);
6451 if (TREE_CODE (arg0) == INTEGER_CST)
6452 return fold_not_const (arg0, type);
6453 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6454 return TREE_OPERAND (arg0, 0);
6458 /* A + (-B) -> A - B */
6459 if (TREE_CODE (arg1) == NEGATE_EXPR)
6460 return fold (build2 (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6461 /* (-A) + B -> B - A */
6462 if (TREE_CODE (arg0) == NEGATE_EXPR
6463 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
6464 return fold (build2 (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
6465 if (! FLOAT_TYPE_P (type))
6467 if (integer_zerop (arg1))
6468 return non_lvalue (fold_convert (type, arg0));
6470 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
6471 with a constant, and the two constants have no bits in common,
6472 we should treat this as a BIT_IOR_EXPR since this may produce more
6474 if (TREE_CODE (arg0) == BIT_AND_EXPR
6475 && TREE_CODE (arg1) == BIT_AND_EXPR
6476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6477 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6478 && integer_zerop (const_binop (BIT_AND_EXPR,
6479 TREE_OPERAND (arg0, 1),
6480 TREE_OPERAND (arg1, 1), 0)))
6482 code = BIT_IOR_EXPR;
6486 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
6487 (plus (plus (mult) (mult)) (foo)) so that we can
6488 take advantage of the factoring cases below. */
6489 if ((TREE_CODE (arg0) == PLUS_EXPR
6490 && TREE_CODE (arg1) == MULT_EXPR)
6491 || (TREE_CODE (arg1) == PLUS_EXPR
6492 && TREE_CODE (arg0) == MULT_EXPR))
6494 tree parg0, parg1, parg, marg;
6496 if (TREE_CODE (arg0) == PLUS_EXPR)
6497 parg = arg0, marg = arg1;
6499 parg = arg1, marg = arg0;
6500 parg0 = TREE_OPERAND (parg, 0);
6501 parg1 = TREE_OPERAND (parg, 1);
6505 if (TREE_CODE (parg0) == MULT_EXPR
6506 && TREE_CODE (parg1) != MULT_EXPR)
6507 return fold (build2 (PLUS_EXPR, type,
6508 fold (build2 (PLUS_EXPR, type,
6509 fold_convert (type, parg0),
6510 fold_convert (type, marg))),
6511 fold_convert (type, parg1)));
6512 if (TREE_CODE (parg0) != MULT_EXPR
6513 && TREE_CODE (parg1) == MULT_EXPR)
6514 return fold (build2 (PLUS_EXPR, type,
6515 fold (build2 (PLUS_EXPR, type,
6516 fold_convert (type, parg1),
6517 fold_convert (type, marg))),
6518 fold_convert (type, parg0)));
6521 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
6523 tree arg00, arg01, arg10, arg11;
6524 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6526 /* (A * C) + (B * C) -> (A+B) * C.
6527 We are most concerned about the case where C is a constant,
6528 but other combinations show up during loop reduction. Since
6529 it is not difficult, try all four possibilities. */
6531 arg00 = TREE_OPERAND (arg0, 0);
6532 arg01 = TREE_OPERAND (arg0, 1);
6533 arg10 = TREE_OPERAND (arg1, 0);
6534 arg11 = TREE_OPERAND (arg1, 1);
6537 if (operand_equal_p (arg01, arg11, 0))
6538 same = arg01, alt0 = arg00, alt1 = arg10;
6539 else if (operand_equal_p (arg00, arg10, 0))
6540 same = arg00, alt0 = arg01, alt1 = arg11;
6541 else if (operand_equal_p (arg00, arg11, 0))
6542 same = arg00, alt0 = arg01, alt1 = arg10;
6543 else if (operand_equal_p (arg01, arg10, 0))
6544 same = arg01, alt0 = arg00, alt1 = arg11;
6546 /* No identical multiplicands; see if we can find a common
6547 power-of-two factor in non-power-of-two multiplies. This
6548 can help in multi-dimensional array access. */
6549 else if (TREE_CODE (arg01) == INTEGER_CST
6550 && TREE_CODE (arg11) == INTEGER_CST
6551 && TREE_INT_CST_HIGH (arg01) == 0
6552 && TREE_INT_CST_HIGH (arg11) == 0)
6554 HOST_WIDE_INT int01, int11, tmp;
6555 int01 = TREE_INT_CST_LOW (arg01);
6556 int11 = TREE_INT_CST_LOW (arg11);
6558 /* Move min of absolute values to int11. */
6559 if ((int01 >= 0 ? int01 : -int01)
6560 < (int11 >= 0 ? int11 : -int11))
6562 tmp = int01, int01 = int11, int11 = tmp;
6563 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6564 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6567 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6569 alt0 = fold (build2 (MULT_EXPR, type, arg00,
6570 build_int_2 (int01 / int11, 0)));
6577 return fold (build2 (MULT_EXPR, type,
6578 fold (build2 (PLUS_EXPR, type,
6585 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6586 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6587 return non_lvalue (fold_convert (type, arg0));
6589 /* Likewise if the operands are reversed. */
6590 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6591 return non_lvalue (fold_convert (type, arg1));
6593 /* Convert X + -C into X - C. */
6594 if (TREE_CODE (arg1) == REAL_CST
6595 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
6597 tem = fold_negate_const (arg1, type);
6598 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
6599 return fold (build2 (MINUS_EXPR, type,
6600 fold_convert (type, arg0),
6601 fold_convert (type, tem)));
6604 /* Convert x+x into x*2.0. */
6605 if (operand_equal_p (arg0, arg1, 0)
6606 && SCALAR_FLOAT_TYPE_P (type))
6607 return fold (build2 (MULT_EXPR, type, arg0,
6608 build_real (type, dconst2)));
6610 /* Convert x*c+x into x*(c+1). */
6611 if (flag_unsafe_math_optimizations
6612 && TREE_CODE (arg0) == MULT_EXPR
6613 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6614 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6615 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6619 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6620 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6621 return fold (build2 (MULT_EXPR, type, arg1,
6622 build_real (type, c)));
6625 /* Convert x+x*c into x*(c+1). */
6626 if (flag_unsafe_math_optimizations
6627 && TREE_CODE (arg1) == MULT_EXPR
6628 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6629 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6630 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6634 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6635 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6636 return fold (build2 (MULT_EXPR, type, arg0,
6637 build_real (type, c)));
6640 /* Convert x*c1+x*c2 into x*(c1+c2). */
6641 if (flag_unsafe_math_optimizations
6642 && TREE_CODE (arg0) == MULT_EXPR
6643 && TREE_CODE (arg1) == MULT_EXPR
6644 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6645 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6646 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6647 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6648 && operand_equal_p (TREE_OPERAND (arg0, 0),
6649 TREE_OPERAND (arg1, 0), 0))
6651 REAL_VALUE_TYPE c1, c2;
6653 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6654 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6655 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6656 return fold (build2 (MULT_EXPR, type,
6657 TREE_OPERAND (arg0, 0),
6658 build_real (type, c1)));
6660 /* Convert a + (b*c + d*e) into (a + b*c) + d*e */
6661 if (flag_unsafe_math_optimizations
6662 && TREE_CODE (arg1) == PLUS_EXPR
6663 && TREE_CODE (arg0) != MULT_EXPR)
6665 tree tree10 = TREE_OPERAND (arg1, 0);
6666 tree tree11 = TREE_OPERAND (arg1, 1);
6667 if (TREE_CODE (tree11) == MULT_EXPR
6668 && TREE_CODE (tree10) == MULT_EXPR)
6671 tree0 = fold (build2 (PLUS_EXPR, type, arg0, tree10));
6672 return fold (build2 (PLUS_EXPR, type, tree0, tree11));
6675 /* Convert (b*c + d*e) + a into b*c + (d*e +a) */
6676 if (flag_unsafe_math_optimizations
6677 && TREE_CODE (arg0) == PLUS_EXPR
6678 && TREE_CODE (arg1) != MULT_EXPR)
6680 tree tree00 = TREE_OPERAND (arg0, 0);
6681 tree tree01 = TREE_OPERAND (arg0, 1);
6682 if (TREE_CODE (tree01) == MULT_EXPR
6683 && TREE_CODE (tree00) == MULT_EXPR)
6686 tree0 = fold (build2 (PLUS_EXPR, type, tree01, arg1));
6687 return fold (build2 (PLUS_EXPR, type, tree00, tree0));
6693 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6694 is a rotate of A by C1 bits. */
6695 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6696 is a rotate of A by B bits. */
6698 enum tree_code code0, code1;
6699 code0 = TREE_CODE (arg0);
6700 code1 = TREE_CODE (arg1);
6701 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6702 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6703 && operand_equal_p (TREE_OPERAND (arg0, 0),
6704 TREE_OPERAND (arg1, 0), 0)
6705 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6707 tree tree01, tree11;
6708 enum tree_code code01, code11;
6710 tree01 = TREE_OPERAND (arg0, 1);
6711 tree11 = TREE_OPERAND (arg1, 1);
6712 STRIP_NOPS (tree01);
6713 STRIP_NOPS (tree11);
6714 code01 = TREE_CODE (tree01);
6715 code11 = TREE_CODE (tree11);
6716 if (code01 == INTEGER_CST
6717 && code11 == INTEGER_CST
6718 && TREE_INT_CST_HIGH (tree01) == 0
6719 && TREE_INT_CST_HIGH (tree11) == 0
6720 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6721 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6722 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6723 code0 == LSHIFT_EXPR ? tree01 : tree11);
6724 else if (code11 == MINUS_EXPR)
6726 tree tree110, tree111;
6727 tree110 = TREE_OPERAND (tree11, 0);
6728 tree111 = TREE_OPERAND (tree11, 1);
6729 STRIP_NOPS (tree110);
6730 STRIP_NOPS (tree111);
6731 if (TREE_CODE (tree110) == INTEGER_CST
6732 && 0 == compare_tree_int (tree110,
6734 (TREE_TYPE (TREE_OPERAND
6736 && operand_equal_p (tree01, tree111, 0))
6737 return build2 ((code0 == LSHIFT_EXPR
6740 type, TREE_OPERAND (arg0, 0), tree01);
6742 else if (code01 == MINUS_EXPR)
6744 tree tree010, tree011;
6745 tree010 = TREE_OPERAND (tree01, 0);
6746 tree011 = TREE_OPERAND (tree01, 1);
6747 STRIP_NOPS (tree010);
6748 STRIP_NOPS (tree011);
6749 if (TREE_CODE (tree010) == INTEGER_CST
6750 && 0 == compare_tree_int (tree010,
6752 (TREE_TYPE (TREE_OPERAND
6754 && operand_equal_p (tree11, tree011, 0))
6755 return build2 ((code0 != LSHIFT_EXPR
6758 type, TREE_OPERAND (arg0, 0), tree11);
6764 /* In most languages, can't associate operations on floats through
6765 parentheses. Rather than remember where the parentheses were, we
6766 don't associate floats at all, unless the user has specified
6767 -funsafe-math-optimizations. */
6770 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6772 tree var0, con0, lit0, minus_lit0;
6773 tree var1, con1, lit1, minus_lit1;
6775 /* Split both trees into variables, constants, and literals. Then
6776 associate each group together, the constants with literals,
6777 then the result with variables. This increases the chances of
6778 literals being recombined later and of generating relocatable
6779 expressions for the sum of a constant and literal. */
6780 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6781 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6782 code == MINUS_EXPR);
6784 /* Only do something if we found more than two objects. Otherwise,
6785 nothing has changed and we risk infinite recursion. */
6786 if (2 < ((var0 != 0) + (var1 != 0)
6787 + (con0 != 0) + (con1 != 0)
6788 + (lit0 != 0) + (lit1 != 0)
6789 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6791 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6792 if (code == MINUS_EXPR)
6795 var0 = associate_trees (var0, var1, code, type);
6796 con0 = associate_trees (con0, con1, code, type);
6797 lit0 = associate_trees (lit0, lit1, code, type);
6798 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6800 /* Preserve the MINUS_EXPR if the negative part of the literal is
6801 greater than the positive part. Otherwise, the multiplicative
6802 folding code (i.e extract_muldiv) may be fooled in case
6803 unsigned constants are subtracted, like in the following
6804 example: ((X*2 + 4) - 8U)/2. */
6805 if (minus_lit0 && lit0)
6807 if (TREE_CODE (lit0) == INTEGER_CST
6808 && TREE_CODE (minus_lit0) == INTEGER_CST
6809 && tree_int_cst_lt (lit0, minus_lit0))
6811 minus_lit0 = associate_trees (minus_lit0, lit0,
6817 lit0 = associate_trees (lit0, minus_lit0,
6825 return fold_convert (type,
6826 associate_trees (var0, minus_lit0,
6830 con0 = associate_trees (con0, minus_lit0,
6832 return fold_convert (type,
6833 associate_trees (var0, con0,
6838 con0 = associate_trees (con0, lit0, code, type);
6839 return fold_convert (type, associate_trees (var0, con0,
6846 t1 = const_binop (code, arg0, arg1, 0);
6847 if (t1 != NULL_TREE)
6849 /* The return value should always have
6850 the same type as the original expression. */
6851 if (TREE_TYPE (t1) != type)
6852 t1 = fold_convert (type, t1);
6859 /* A - (-B) -> A + B */
6860 if (TREE_CODE (arg1) == NEGATE_EXPR)
6861 return fold (build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6862 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6863 if (TREE_CODE (arg0) == NEGATE_EXPR
6864 && (FLOAT_TYPE_P (type)
6865 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6866 && negate_expr_p (arg1)
6867 && reorder_operands_p (arg0, arg1))
6868 return fold (build2 (MINUS_EXPR, type, negate_expr (arg1),
6869 TREE_OPERAND (arg0, 0)));
6871 if (! FLOAT_TYPE_P (type))
6873 if (! wins && integer_zerop (arg0))
6874 return negate_expr (fold_convert (type, arg1));
6875 if (integer_zerop (arg1))
6876 return non_lvalue (fold_convert (type, arg0));
6878 /* Fold A - (A & B) into ~B & A. */
6879 if (!TREE_SIDE_EFFECTS (arg0)
6880 && TREE_CODE (arg1) == BIT_AND_EXPR)
6882 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6883 return fold (build2 (BIT_AND_EXPR, type,
6884 fold (build1 (BIT_NOT_EXPR, type,
6885 TREE_OPERAND (arg1, 0))),
6887 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6888 return fold (build2 (BIT_AND_EXPR, type,
6889 fold (build1 (BIT_NOT_EXPR, type,
6890 TREE_OPERAND (arg1, 1))),
6894 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6895 any power of 2 minus 1. */
6896 if (TREE_CODE (arg0) == BIT_AND_EXPR
6897 && TREE_CODE (arg1) == BIT_AND_EXPR
6898 && operand_equal_p (TREE_OPERAND (arg0, 0),
6899 TREE_OPERAND (arg1, 0), 0))
6901 tree mask0 = TREE_OPERAND (arg0, 1);
6902 tree mask1 = TREE_OPERAND (arg1, 1);
6903 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6905 if (operand_equal_p (tem, mask1, 0))
6907 tem = fold (build2 (BIT_XOR_EXPR, type,
6908 TREE_OPERAND (arg0, 0), mask1));
6909 return fold (build2 (MINUS_EXPR, type, tem, mask1));
6914 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6915 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6916 return non_lvalue (fold_convert (type, arg0));
6918 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6919 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6920 (-ARG1 + ARG0) reduces to -ARG1. */
6921 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6922 return negate_expr (fold_convert (type, arg1));
6924 /* Fold &x - &x. This can happen from &x.foo - &x.
6925 This is unsafe for certain floats even in non-IEEE formats.
6926 In IEEE, it is unsafe because it does wrong for NaNs.
6927 Also note that operand_equal_p is always false if an operand
6930 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6931 && operand_equal_p (arg0, arg1, 0))
6932 return fold_convert (type, integer_zero_node);
6934 /* A - B -> A + (-B) if B is easily negatable. */
6935 if (!wins && negate_expr_p (arg1)
6936 && ((FLOAT_TYPE_P (type)
6937 /* Avoid this transformation if B is a positive REAL_CST. */
6938 && (TREE_CODE (arg1) != REAL_CST
6939 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
6940 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6941 return fold (build2 (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6943 if (TREE_CODE (arg0) == MULT_EXPR
6944 && TREE_CODE (arg1) == MULT_EXPR
6945 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6947 /* (A * C) - (B * C) -> (A-B) * C. */
6948 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6949 TREE_OPERAND (arg1, 1), 0))
6950 return fold (build2 (MULT_EXPR, type,
6951 fold (build2 (MINUS_EXPR, type,
6952 TREE_OPERAND (arg0, 0),
6953 TREE_OPERAND (arg1, 0))),
6954 TREE_OPERAND (arg0, 1)));
6955 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6956 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6957 TREE_OPERAND (arg1, 0), 0))
6958 return fold (build2 (MULT_EXPR, type,
6959 TREE_OPERAND (arg0, 0),
6960 fold (build2 (MINUS_EXPR, type,
6961 TREE_OPERAND (arg0, 1),
6962 TREE_OPERAND (arg1, 1)))));
6968 /* (-A) * (-B) -> A * B */
6969 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6970 return fold (build2 (MULT_EXPR, type,
6971 TREE_OPERAND (arg0, 0),
6972 negate_expr (arg1)));
6973 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6974 return fold (build2 (MULT_EXPR, type,
6976 TREE_OPERAND (arg1, 0)));
6978 if (! FLOAT_TYPE_P (type))
6980 if (integer_zerop (arg1))
6981 return omit_one_operand (type, arg1, arg0);
6982 if (integer_onep (arg1))
6983 return non_lvalue (fold_convert (type, arg0));
6985 /* (a * (1 << b)) is (a << b) */
6986 if (TREE_CODE (arg1) == LSHIFT_EXPR
6987 && integer_onep (TREE_OPERAND (arg1, 0)))
6988 return fold (build2 (LSHIFT_EXPR, type, arg0,
6989 TREE_OPERAND (arg1, 1)));
6990 if (TREE_CODE (arg0) == LSHIFT_EXPR
6991 && integer_onep (TREE_OPERAND (arg0, 0)))
6992 return fold (build2 (LSHIFT_EXPR, type, arg1,
6993 TREE_OPERAND (arg0, 1)));
6995 if (TREE_CODE (arg1) == INTEGER_CST
6996 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6997 fold_convert (type, arg1),
6999 return fold_convert (type, tem);
7004 /* Maybe fold x * 0 to 0. The expressions aren't the same
7005 when x is NaN, since x * 0 is also NaN. Nor are they the
7006 same in modes with signed zeros, since multiplying a
7007 negative value by 0 gives -0, not +0. */
7008 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7009 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7010 && real_zerop (arg1))
7011 return omit_one_operand (type, arg1, arg0);
7012 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7013 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7014 && real_onep (arg1))
7015 return non_lvalue (fold_convert (type, arg0));
7017 /* Transform x * -1.0 into -x. */
7018 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7019 && real_minus_onep (arg1))
7020 return fold_convert (type, negate_expr (arg0));
7022 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7023 if (flag_unsafe_math_optimizations
7024 && TREE_CODE (arg0) == RDIV_EXPR
7025 && TREE_CODE (arg1) == REAL_CST
7026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7028 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7031 return fold (build2 (RDIV_EXPR, type, tem,
7032 TREE_OPERAND (arg0, 1)));
7035 if (flag_unsafe_math_optimizations)
7037 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7038 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7040 /* Optimizations of root(...)*root(...). */
7041 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7043 tree rootfn, arg, arglist;
7044 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7045 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7047 /* Optimize sqrt(x)*sqrt(x) as x. */
7048 if (BUILTIN_SQRT_P (fcode0)
7049 && operand_equal_p (arg00, arg10, 0)
7050 && ! HONOR_SNANS (TYPE_MODE (type)))
7053 /* Optimize root(x)*root(y) as root(x*y). */
7054 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7055 arg = fold (build2 (MULT_EXPR, type, arg00, arg10));
7056 arglist = build_tree_list (NULL_TREE, arg);
7057 return build_function_call_expr (rootfn, arglist);
7060 /* Optimize expN(x)*expN(y) as expN(x+y). */
7061 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7063 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7064 tree arg = build2 (PLUS_EXPR, type,
7065 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7066 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7067 tree arglist = build_tree_list (NULL_TREE, fold (arg));
7068 return build_function_call_expr (expfn, arglist);
7071 /* Optimizations of pow(...)*pow(...). */
7072 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7073 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7074 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7076 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7077 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7079 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7080 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7083 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7084 if (operand_equal_p (arg01, arg11, 0))
7086 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7087 tree arg = build2 (MULT_EXPR, type, arg00, arg10);
7088 tree arglist = tree_cons (NULL_TREE, fold (arg),
7089 build_tree_list (NULL_TREE,
7091 return build_function_call_expr (powfn, arglist);
7094 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7095 if (operand_equal_p (arg00, arg10, 0))
7097 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7098 tree arg = fold (build2 (PLUS_EXPR, type, arg01, arg11));
7099 tree arglist = tree_cons (NULL_TREE, arg00,
7100 build_tree_list (NULL_TREE,
7102 return build_function_call_expr (powfn, arglist);
7106 /* Optimize tan(x)*cos(x) as sin(x). */
7107 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7108 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7109 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7110 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7111 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7112 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7113 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7114 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7116 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7118 if (sinfn != NULL_TREE)
7119 return build_function_call_expr (sinfn,
7120 TREE_OPERAND (arg0, 1));
7123 /* Optimize x*pow(x,c) as pow(x,c+1). */
7124 if (fcode1 == BUILT_IN_POW
7125 || fcode1 == BUILT_IN_POWF
7126 || fcode1 == BUILT_IN_POWL)
7128 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7129 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7131 if (TREE_CODE (arg11) == REAL_CST
7132 && ! TREE_CONSTANT_OVERFLOW (arg11)
7133 && operand_equal_p (arg0, arg10, 0))
7135 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7139 c = TREE_REAL_CST (arg11);
7140 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7141 arg = build_real (type, c);
7142 arglist = build_tree_list (NULL_TREE, arg);
7143 arglist = tree_cons (NULL_TREE, arg0, arglist);
7144 return build_function_call_expr (powfn, arglist);
7148 /* Optimize pow(x,c)*x as pow(x,c+1). */
7149 if (fcode0 == BUILT_IN_POW
7150 || fcode0 == BUILT_IN_POWF
7151 || fcode0 == BUILT_IN_POWL)
7153 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7154 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7156 if (TREE_CODE (arg01) == REAL_CST
7157 && ! TREE_CONSTANT_OVERFLOW (arg01)
7158 && operand_equal_p (arg1, arg00, 0))
7160 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7164 c = TREE_REAL_CST (arg01);
7165 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7166 arg = build_real (type, c);
7167 arglist = build_tree_list (NULL_TREE, arg);
7168 arglist = tree_cons (NULL_TREE, arg1, arglist);
7169 return build_function_call_expr (powfn, arglist);
7173 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7175 && operand_equal_p (arg0, arg1, 0))
7177 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7181 tree arg = build_real (type, dconst2);
7182 tree arglist = build_tree_list (NULL_TREE, arg);
7183 arglist = tree_cons (NULL_TREE, arg0, arglist);
7184 return build_function_call_expr (powfn, arglist);
7193 if (integer_all_onesp (arg1))
7194 return omit_one_operand (type, arg1, arg0);
7195 if (integer_zerop (arg1))
7196 return non_lvalue (fold_convert (type, arg0));
7197 if (operand_equal_p (arg0, arg1, 0))
7198 return non_lvalue (fold_convert (type, arg0));
7201 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7202 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7204 t1 = build_int_2 (-1, -1);
7205 TREE_TYPE (t1) = type;
7206 force_fit_type (t1, 0);
7207 return omit_one_operand (type, t1, arg1);
7211 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7212 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7214 t1 = build_int_2 (-1, -1);
7215 TREE_TYPE (t1) = type;
7216 force_fit_type (t1, 0);
7217 return omit_one_operand (type, t1, arg0);
7220 t1 = distribute_bit_expr (code, type, arg0, arg1);
7221 if (t1 != NULL_TREE)
7224 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
7226 This results in more efficient code for machines without a NAND
7227 instruction. Combine will canonicalize to the first form
7228 which will allow use of NAND instructions provided by the
7229 backend if they exist. */
7230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7231 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7233 return fold (build1 (BIT_NOT_EXPR, type,
7234 build2 (BIT_AND_EXPR, type,
7235 TREE_OPERAND (arg0, 0),
7236 TREE_OPERAND (arg1, 0))));
7239 /* See if this can be simplified into a rotate first. If that
7240 is unsuccessful continue in the association code. */
7244 if (integer_zerop (arg1))
7245 return non_lvalue (fold_convert (type, arg0));
7246 if (integer_all_onesp (arg1))
7247 return fold (build1 (BIT_NOT_EXPR, type, arg0));
7248 if (operand_equal_p (arg0, arg1, 0))
7249 return omit_one_operand (type, integer_zero_node, arg0);
7252 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7253 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7255 t1 = build_int_2 (-1, -1);
7256 TREE_TYPE (t1) = type;
7257 force_fit_type (t1, 0);
7258 return omit_one_operand (type, t1, arg1);
7262 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7263 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7265 t1 = build_int_2 (-1, -1);
7266 TREE_TYPE (t1) = type;
7267 force_fit_type (t1, 0);
7268 return omit_one_operand (type, t1, arg0);
7271 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
7272 with a constant, and the two constants have no bits in common,
7273 we should treat this as a BIT_IOR_EXPR since this may produce more
7275 if (TREE_CODE (arg0) == BIT_AND_EXPR
7276 && TREE_CODE (arg1) == BIT_AND_EXPR
7277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7278 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7279 && integer_zerop (const_binop (BIT_AND_EXPR,
7280 TREE_OPERAND (arg0, 1),
7281 TREE_OPERAND (arg1, 1), 0)))
7283 code = BIT_IOR_EXPR;
7287 /* See if this can be simplified into a rotate first. If that
7288 is unsuccessful continue in the association code. */
7292 if (integer_all_onesp (arg1))
7293 return non_lvalue (fold_convert (type, arg0));
7294 if (integer_zerop (arg1))
7295 return omit_one_operand (type, arg1, arg0);
7296 if (operand_equal_p (arg0, arg1, 0))
7297 return non_lvalue (fold_convert (type, arg0));
7299 /* ~X & X is always zero. */
7300 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7301 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7302 return omit_one_operand (type, integer_zero_node, arg1);
7304 /* X & ~X is always zero. */
7305 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7306 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7307 return omit_one_operand (type, integer_zero_node, arg0);
7309 t1 = distribute_bit_expr (code, type, arg0, arg1);
7310 if (t1 != NULL_TREE)
7312 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
7313 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
7314 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7317 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
7319 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
7320 && (~TREE_INT_CST_LOW (arg1)
7321 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
7322 return fold_convert (type, TREE_OPERAND (arg0, 0));
7325 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
7327 This results in more efficient code for machines without a NOR
7328 instruction. Combine will canonicalize to the first form
7329 which will allow use of NOR instructions provided by the
7330 backend if they exist. */
7331 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7332 && TREE_CODE (arg1) == BIT_NOT_EXPR)
7334 return fold (build1 (BIT_NOT_EXPR, type,
7335 build2 (BIT_IOR_EXPR, type,
7336 TREE_OPERAND (arg0, 0),
7337 TREE_OPERAND (arg1, 0))));
7343 /* Don't touch a floating-point divide by zero unless the mode
7344 of the constant can represent infinity. */
7345 if (TREE_CODE (arg1) == REAL_CST
7346 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
7347 && real_zerop (arg1))
7350 /* (-A) / (-B) -> A / B */
7351 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7352 return fold (build2 (RDIV_EXPR, type,
7353 TREE_OPERAND (arg0, 0),
7354 negate_expr (arg1)));
7355 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7356 return fold (build2 (RDIV_EXPR, type,
7358 TREE_OPERAND (arg1, 0)));
7360 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
7361 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7362 && real_onep (arg1))
7363 return non_lvalue (fold_convert (type, arg0));
7365 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
7366 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7367 && real_minus_onep (arg1))
7368 return non_lvalue (fold_convert (type, negate_expr (arg0)));
7370 /* If ARG1 is a constant, we can convert this to a multiply by the
7371 reciprocal. This does not have the same rounding properties,
7372 so only do this if -funsafe-math-optimizations. We can actually
7373 always safely do it if ARG1 is a power of two, but it's hard to
7374 tell if it is or not in a portable manner. */
7375 if (TREE_CODE (arg1) == REAL_CST)
7377 if (flag_unsafe_math_optimizations
7378 && 0 != (tem = const_binop (code, build_real (type, dconst1),
7380 return fold (build2 (MULT_EXPR, type, arg0, tem));
7381 /* Find the reciprocal if optimizing and the result is exact. */
7385 r = TREE_REAL_CST (arg1);
7386 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
7388 tem = build_real (type, r);
7389 return fold (build2 (MULT_EXPR, type, arg0, tem));
7393 /* Convert A/B/C to A/(B*C). */
7394 if (flag_unsafe_math_optimizations
7395 && TREE_CODE (arg0) == RDIV_EXPR)
7396 return fold (build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
7397 fold (build2 (MULT_EXPR, type,
7398 TREE_OPERAND (arg0, 1), arg1))));
7400 /* Convert A/(B/C) to (A/B)*C. */
7401 if (flag_unsafe_math_optimizations
7402 && TREE_CODE (arg1) == RDIV_EXPR)
7403 return fold (build2 (MULT_EXPR, type,
7404 fold (build2 (RDIV_EXPR, type, arg0,
7405 TREE_OPERAND (arg1, 0))),
7406 TREE_OPERAND (arg1, 1)));
7408 /* Convert C1/(X*C2) into (C1/C2)/X. */
7409 if (flag_unsafe_math_optimizations
7410 && TREE_CODE (arg1) == MULT_EXPR
7411 && TREE_CODE (arg0) == REAL_CST
7412 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
7414 tree tem = const_binop (RDIV_EXPR, arg0,
7415 TREE_OPERAND (arg1, 1), 0);
7417 return fold (build2 (RDIV_EXPR, type, tem,
7418 TREE_OPERAND (arg1, 0)));
7421 if (flag_unsafe_math_optimizations)
7423 enum built_in_function fcode = builtin_mathfn_code (arg1);
7424 /* Optimize x/expN(y) into x*expN(-y). */
7425 if (BUILTIN_EXPONENT_P (fcode))
7427 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7428 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
7429 tree arglist = build_tree_list (NULL_TREE,
7430 fold_convert (type, arg));
7431 arg1 = build_function_call_expr (expfn, arglist);
7432 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7435 /* Optimize x/pow(y,z) into x*pow(y,-z). */
7436 if (fcode == BUILT_IN_POW
7437 || fcode == BUILT_IN_POWF
7438 || fcode == BUILT_IN_POWL)
7440 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7441 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7442 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
7443 tree neg11 = fold_convert (type, negate_expr (arg11));
7444 tree arglist = tree_cons(NULL_TREE, arg10,
7445 build_tree_list (NULL_TREE, neg11));
7446 arg1 = build_function_call_expr (powfn, arglist);
7447 return fold (build2 (MULT_EXPR, type, arg0, arg1));
7451 if (flag_unsafe_math_optimizations)
7453 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7454 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7456 /* Optimize sin(x)/cos(x) as tan(x). */
7457 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
7458 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
7459 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
7460 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7461 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7463 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7465 if (tanfn != NULL_TREE)
7466 return build_function_call_expr (tanfn,
7467 TREE_OPERAND (arg0, 1));
7470 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
7471 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
7472 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
7473 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
7474 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7475 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7477 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
7479 if (tanfn != NULL_TREE)
7481 tree tmp = TREE_OPERAND (arg0, 1);
7482 tmp = build_function_call_expr (tanfn, tmp);
7483 return fold (build2 (RDIV_EXPR, type,
7484 build_real (type, dconst1), tmp));
7488 /* Optimize pow(x,c)/x as pow(x,c-1). */
7489 if (fcode0 == BUILT_IN_POW
7490 || fcode0 == BUILT_IN_POWF
7491 || fcode0 == BUILT_IN_POWL)
7493 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7494 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
7495 if (TREE_CODE (arg01) == REAL_CST
7496 && ! TREE_CONSTANT_OVERFLOW (arg01)
7497 && operand_equal_p (arg1, arg00, 0))
7499 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7503 c = TREE_REAL_CST (arg01);
7504 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
7505 arg = build_real (type, c);
7506 arglist = build_tree_list (NULL_TREE, arg);
7507 arglist = tree_cons (NULL_TREE, arg1, arglist);
7508 return build_function_call_expr (powfn, arglist);
7514 case TRUNC_DIV_EXPR:
7515 case ROUND_DIV_EXPR:
7516 case FLOOR_DIV_EXPR:
7518 case EXACT_DIV_EXPR:
7519 if (integer_onep (arg1))
7520 return non_lvalue (fold_convert (type, arg0));
7521 if (integer_zerop (arg1))
7524 if (!TYPE_UNSIGNED (type)
7525 && TREE_CODE (arg1) == INTEGER_CST
7526 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7527 && TREE_INT_CST_HIGH (arg1) == -1)
7528 return fold_convert (type, negate_expr (arg0));
7530 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
7531 operation, EXACT_DIV_EXPR.
7533 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
7534 At one time others generated faster code, it's not clear if they do
7535 after the last round to changes to the DIV code in expmed.c. */
7536 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
7537 && multiple_of_p (type, arg0, arg1))
7538 return fold (build2 (EXACT_DIV_EXPR, type, arg0, arg1));
7540 if (TREE_CODE (arg1) == INTEGER_CST
7541 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7543 return fold_convert (type, tem);
7548 case FLOOR_MOD_EXPR:
7549 case ROUND_MOD_EXPR:
7550 case TRUNC_MOD_EXPR:
7551 if (integer_onep (arg1))
7552 return omit_one_operand (type, integer_zero_node, arg0);
7553 if (integer_zerop (arg1))
7556 /* X % -1 is zero. */
7557 if (!TYPE_UNSIGNED (type)
7558 && TREE_CODE (arg1) == INTEGER_CST
7559 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
7560 && TREE_INT_CST_HIGH (arg1) == -1)
7561 return omit_one_operand (type, integer_zero_node, arg0);
7563 /* Optimize unsigned TRUNC_MOD_EXPR by a power of two into a
7564 BIT_AND_EXPR, i.e. "X % C" into "X & C2". */
7565 if (code == TRUNC_MOD_EXPR
7566 && TYPE_UNSIGNED (type)
7567 && integer_pow2p (arg1))
7569 unsigned HOST_WIDE_INT high, low;
7573 l = tree_log2 (arg1);
7574 if (l >= HOST_BITS_PER_WIDE_INT)
7576 high = ((unsigned HOST_WIDE_INT) 1
7577 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
7583 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
7586 mask = build_int_2 (low, high);
7587 TREE_TYPE (mask) = type;
7588 return fold (build2 (BIT_AND_EXPR, type,
7589 fold_convert (type, arg0), mask));
7592 /* X % -C is the same as X % C (for all rounding moduli). */
7593 if (!TYPE_UNSIGNED (type)
7594 && TREE_CODE (arg1) == INTEGER_CST
7595 && TREE_INT_CST_HIGH (arg1) < 0
7597 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
7598 && !sign_bit_p (arg1, arg1))
7599 return fold (build2 (code, type, fold_convert (type, arg0),
7600 fold_convert (type, negate_expr (arg1))));
7602 /* X % -Y is the same as X % Y (for all rounding moduli). */
7603 if (!TYPE_UNSIGNED (type)
7604 && TREE_CODE (arg1) == NEGATE_EXPR
7606 return fold (build2 (code, type, fold_convert (type, arg0),
7607 fold_convert (type, TREE_OPERAND (arg1, 0))));
7609 if (TREE_CODE (arg1) == INTEGER_CST
7610 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
7612 return fold_convert (type, tem);
7618 if (integer_all_onesp (arg0))
7619 return omit_one_operand (type, arg0, arg1);
7623 /* Optimize -1 >> x for arithmetic right shifts. */
7624 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
7625 return omit_one_operand (type, arg0, arg1);
7626 /* ... fall through ... */
7630 if (integer_zerop (arg1))
7631 return non_lvalue (fold_convert (type, arg0));
7632 if (integer_zerop (arg0))
7633 return omit_one_operand (type, arg0, arg1);
7635 /* Since negative shift count is not well-defined,
7636 don't try to compute it in the compiler. */
7637 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
7639 /* Rewrite an LROTATE_EXPR by a constant into an
7640 RROTATE_EXPR by a new constant. */
7641 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
7643 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
7644 tem = fold_convert (TREE_TYPE (arg1), tem);
7645 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
7646 return fold (build2 (RROTATE_EXPR, type, arg0, tem));
7649 /* If we have a rotate of a bit operation with the rotate count and
7650 the second operand of the bit operation both constant,
7651 permute the two operations. */
7652 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7653 && (TREE_CODE (arg0) == BIT_AND_EXPR
7654 || TREE_CODE (arg0) == BIT_IOR_EXPR
7655 || TREE_CODE (arg0) == BIT_XOR_EXPR)
7656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7657 return fold (build2 (TREE_CODE (arg0), type,
7658 fold (build2 (code, type,
7659 TREE_OPERAND (arg0, 0), arg1)),
7660 fold (build2 (code, type,
7661 TREE_OPERAND (arg0, 1), arg1))));
7663 /* Two consecutive rotates adding up to the width of the mode can
7665 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7666 && TREE_CODE (arg0) == RROTATE_EXPR
7667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7668 && TREE_INT_CST_HIGH (arg1) == 0
7669 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7670 && ((TREE_INT_CST_LOW (arg1)
7671 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7672 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7673 return TREE_OPERAND (arg0, 0);
7678 if (operand_equal_p (arg0, arg1, 0))
7679 return omit_one_operand (type, arg0, arg1);
7680 if (INTEGRAL_TYPE_P (type)
7681 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
7682 return omit_one_operand (type, arg1, arg0);
7686 if (operand_equal_p (arg0, arg1, 0))
7687 return omit_one_operand (type, arg0, arg1);
7688 if (INTEGRAL_TYPE_P (type)
7689 && TYPE_MAX_VALUE (type)
7690 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
7691 return omit_one_operand (type, arg1, arg0);
7694 case TRUTH_NOT_EXPR:
7695 /* The argument to invert_truthvalue must have Boolean type. */
7696 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7697 arg0 = fold_convert (boolean_type_node, arg0);
7699 /* Note that the operand of this must be an int
7700 and its values must be 0 or 1.
7701 ("true" is a fixed value perhaps depending on the language,
7702 but we don't handle values other than 1 correctly yet.) */
7703 tem = invert_truthvalue (arg0);
7704 /* Avoid infinite recursion. */
7705 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7707 tem = fold_single_bit_test (code, arg0, arg1, type);
7712 return fold_convert (type, tem);
7714 case TRUTH_ANDIF_EXPR:
7715 /* Note that the operands of this must be ints
7716 and their values must be 0 or 1.
7717 ("true" is a fixed value perhaps depending on the language.) */
7718 /* If first arg is constant zero, return it. */
7719 if (integer_zerop (arg0))
7720 return fold_convert (type, arg0);
7721 case TRUTH_AND_EXPR:
7722 /* If either arg is constant true, drop it. */
7723 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7724 return non_lvalue (fold_convert (type, arg1));
7725 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7726 /* Preserve sequence points. */
7727 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7728 return non_lvalue (fold_convert (type, arg0));
7729 /* If second arg is constant zero, result is zero, but first arg
7730 must be evaluated. */
7731 if (integer_zerop (arg1))
7732 return omit_one_operand (type, arg1, arg0);
7733 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7734 case will be handled here. */
7735 if (integer_zerop (arg0))
7736 return omit_one_operand (type, arg0, arg1);
7738 /* !X && X is always false. */
7739 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7740 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7741 return omit_one_operand (type, integer_zero_node, arg1);
7742 /* X && !X is always false. */
7743 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7744 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7745 return omit_one_operand (type, integer_zero_node, arg0);
7748 /* We only do these simplifications if we are optimizing. */
7752 /* Check for things like (A || B) && (A || C). We can convert this
7753 to A || (B && C). Note that either operator can be any of the four
7754 truth and/or operations and the transformation will still be
7755 valid. Also note that we only care about order for the
7756 ANDIF and ORIF operators. If B contains side effects, this
7757 might change the truth-value of A. */
7758 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7759 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7760 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7761 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7762 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7763 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7765 tree a00 = TREE_OPERAND (arg0, 0);
7766 tree a01 = TREE_OPERAND (arg0, 1);
7767 tree a10 = TREE_OPERAND (arg1, 0);
7768 tree a11 = TREE_OPERAND (arg1, 1);
7769 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7770 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7771 && (code == TRUTH_AND_EXPR
7772 || code == TRUTH_OR_EXPR));
7774 if (operand_equal_p (a00, a10, 0))
7775 return fold (build2 (TREE_CODE (arg0), type, a00,
7776 fold (build2 (code, type, a01, a11))));
7777 else if (commutative && operand_equal_p (a00, a11, 0))
7778 return fold (build2 (TREE_CODE (arg0), type, a00,
7779 fold (build2 (code, type, a01, a10))));
7780 else if (commutative && operand_equal_p (a01, a10, 0))
7781 return fold (build2 (TREE_CODE (arg0), type, a01,
7782 fold (build2 (code, type, a00, a11))));
7784 /* This case if tricky because we must either have commutative
7785 operators or else A10 must not have side-effects. */
7787 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7788 && operand_equal_p (a01, a11, 0))
7789 return fold (build2 (TREE_CODE (arg0), type,
7790 fold (build2 (code, type, a00, a10)),
7794 /* See if we can build a range comparison. */
7795 if (0 != (tem = fold_range_test (t)))
7798 /* Check for the possibility of merging component references. If our
7799 lhs is another similar operation, try to merge its rhs with our
7800 rhs. Then try to merge our lhs and rhs. */
7801 if (TREE_CODE (arg0) == code
7802 && 0 != (tem = fold_truthop (code, type,
7803 TREE_OPERAND (arg0, 1), arg1)))
7804 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7806 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7811 case TRUTH_ORIF_EXPR:
7812 /* Note that the operands of this must be ints
7813 and their values must be 0 or true.
7814 ("true" is a fixed value perhaps depending on the language.) */
7815 /* If first arg is constant true, return it. */
7816 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7817 return fold_convert (type, arg0);
7819 /* If either arg is constant zero, drop it. */
7820 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7821 return non_lvalue (fold_convert (type, arg1));
7822 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7823 /* Preserve sequence points. */
7824 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7825 return non_lvalue (fold_convert (type, arg0));
7826 /* If second arg is constant true, result is true, but we must
7827 evaluate first arg. */
7828 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7829 return omit_one_operand (type, arg1, arg0);
7830 /* Likewise for first arg, but note this only occurs here for
7832 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7833 return omit_one_operand (type, arg0, arg1);
7835 /* !X || X is always true. */
7836 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7837 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7838 return omit_one_operand (type, integer_one_node, arg1);
7839 /* X || !X is always true. */
7840 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7841 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7842 return omit_one_operand (type, integer_one_node, arg0);
7846 case TRUTH_XOR_EXPR:
7847 /* If the second arg is constant zero, drop it. */
7848 if (integer_zerop (arg1))
7849 return non_lvalue (fold_convert (type, arg0));
7850 /* If the second arg is constant true, this is a logical inversion. */
7851 if (integer_onep (arg1))
7852 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7853 /* Identical arguments cancel to zero. */
7854 if (operand_equal_p (arg0, arg1, 0))
7855 return omit_one_operand (type, integer_zero_node, arg0);
7857 /* !X ^ X is always true. */
7858 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
7859 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7860 return omit_one_operand (type, integer_one_node, arg1);
7862 /* X ^ !X is always true. */
7863 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
7864 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7865 return omit_one_operand (type, integer_one_node, arg0);
7875 /* If one arg is a real or integer constant, put it last. */
7876 if (tree_swap_operands_p (arg0, arg1, true))
7877 return fold (build2 (swap_tree_comparison (code), type, arg1, arg0));
7879 /* If this is an equality comparison of the address of a non-weak
7880 object against zero, then we know the result. */
7881 if ((code == EQ_EXPR || code == NE_EXPR)
7882 && TREE_CODE (arg0) == ADDR_EXPR
7883 && DECL_P (TREE_OPERAND (arg0, 0))
7884 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7885 && integer_zerop (arg1))
7886 return constant_boolean_node (code != EQ_EXPR, type);
7888 /* If this is an equality comparison of the address of two non-weak,
7889 unaliased symbols neither of which are extern (since we do not
7890 have access to attributes for externs), then we know the result. */
7891 if ((code == EQ_EXPR || code == NE_EXPR)
7892 && TREE_CODE (arg0) == ADDR_EXPR
7893 && DECL_P (TREE_OPERAND (arg0, 0))
7894 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7895 && ! lookup_attribute ("alias",
7896 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7897 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7898 && TREE_CODE (arg1) == ADDR_EXPR
7899 && DECL_P (TREE_OPERAND (arg1, 0))
7900 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7901 && ! lookup_attribute ("alias",
7902 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7903 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7904 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
7905 ? code == EQ_EXPR : code != EQ_EXPR,
7908 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7910 tree targ0 = strip_float_extensions (arg0);
7911 tree targ1 = strip_float_extensions (arg1);
7912 tree newtype = TREE_TYPE (targ0);
7914 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7915 newtype = TREE_TYPE (targ1);
7917 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7918 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7919 return fold (build2 (code, type, fold_convert (newtype, targ0),
7920 fold_convert (newtype, targ1)));
7922 /* (-a) CMP (-b) -> b CMP a */
7923 if (TREE_CODE (arg0) == NEGATE_EXPR
7924 && TREE_CODE (arg1) == NEGATE_EXPR)
7925 return fold (build2 (code, type, TREE_OPERAND (arg1, 0),
7926 TREE_OPERAND (arg0, 0)));
7928 if (TREE_CODE (arg1) == REAL_CST)
7930 REAL_VALUE_TYPE cst;
7931 cst = TREE_REAL_CST (arg1);
7933 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7934 if (TREE_CODE (arg0) == NEGATE_EXPR)
7936 fold (build2 (swap_tree_comparison (code), type,
7937 TREE_OPERAND (arg0, 0),
7938 build_real (TREE_TYPE (arg1),
7939 REAL_VALUE_NEGATE (cst))));
7941 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7942 /* a CMP (-0) -> a CMP 0 */
7943 if (REAL_VALUE_MINUS_ZERO (cst))
7944 return fold (build2 (code, type, arg0,
7945 build_real (TREE_TYPE (arg1), dconst0)));
7947 /* x != NaN is always true, other ops are always false. */
7948 if (REAL_VALUE_ISNAN (cst)
7949 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7951 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7952 return omit_one_operand (type, tem, arg0);
7955 /* Fold comparisons against infinity. */
7956 if (REAL_VALUE_ISINF (cst))
7958 tem = fold_inf_compare (code, type, arg0, arg1);
7959 if (tem != NULL_TREE)
7964 /* If this is a comparison of a real constant with a PLUS_EXPR
7965 or a MINUS_EXPR of a real constant, we can convert it into a
7966 comparison with a revised real constant as long as no overflow
7967 occurs when unsafe_math_optimizations are enabled. */
7968 if (flag_unsafe_math_optimizations
7969 && TREE_CODE (arg1) == REAL_CST
7970 && (TREE_CODE (arg0) == PLUS_EXPR
7971 || TREE_CODE (arg0) == MINUS_EXPR)
7972 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7973 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7974 ? MINUS_EXPR : PLUS_EXPR,
7975 arg1, TREE_OPERAND (arg0, 1), 0))
7976 && ! TREE_CONSTANT_OVERFLOW (tem))
7977 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
7979 /* Likewise, we can simplify a comparison of a real constant with
7980 a MINUS_EXPR whose first operand is also a real constant, i.e.
7981 (c1 - x) < c2 becomes x > c1-c2. */
7982 if (flag_unsafe_math_optimizations
7983 && TREE_CODE (arg1) == REAL_CST
7984 && TREE_CODE (arg0) == MINUS_EXPR
7985 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7986 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7988 && ! TREE_CONSTANT_OVERFLOW (tem))
7989 return fold (build2 (swap_tree_comparison (code), type,
7990 TREE_OPERAND (arg0, 1), tem));
7992 /* Fold comparisons against built-in math functions. */
7993 if (TREE_CODE (arg1) == REAL_CST
7994 && flag_unsafe_math_optimizations
7995 && ! flag_errno_math)
7997 enum built_in_function fcode = builtin_mathfn_code (arg0);
7999 if (fcode != END_BUILTINS)
8001 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8002 if (tem != NULL_TREE)
8008 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8009 if (TREE_CONSTANT (arg1)
8010 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8011 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8012 /* This optimization is invalid for ordered comparisons
8013 if CONST+INCR overflows or if foo+incr might overflow.
8014 This optimization is invalid for floating point due to rounding.
8015 For pointer types we assume overflow doesn't happen. */
8016 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8017 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8018 && (code == EQ_EXPR || code == NE_EXPR))))
8020 tree varop, newconst;
8022 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8024 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
8025 arg1, TREE_OPERAND (arg0, 1)));
8026 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8027 TREE_OPERAND (arg0, 0),
8028 TREE_OPERAND (arg0, 1));
8032 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
8033 arg1, TREE_OPERAND (arg0, 1)));
8034 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8035 TREE_OPERAND (arg0, 0),
8036 TREE_OPERAND (arg0, 1));
8040 /* If VAROP is a reference to a bitfield, we must mask
8041 the constant by the width of the field. */
8042 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8043 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8044 && host_integerp (DECL_SIZE (TREE_OPERAND
8045 (TREE_OPERAND (varop, 0), 1)), 1))
8047 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8048 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8049 tree folded_compare, shift;
8051 /* First check whether the comparison would come out
8052 always the same. If we don't do that we would
8053 change the meaning with the masking. */
8054 folded_compare = fold (build2 (code, type,
8055 TREE_OPERAND (varop, 0), arg1));
8056 if (integer_zerop (folded_compare)
8057 || integer_onep (folded_compare))
8058 return omit_one_operand (type, folded_compare, varop);
8060 shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
8062 shift = fold_convert (TREE_TYPE (varop), shift);
8063 newconst = fold (build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8065 newconst = fold (build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8069 return fold (build2 (code, type, varop, newconst));
8072 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
8073 This transformation affects the cases which are handled in later
8074 optimizations involving comparisons with non-negative constants. */
8075 if (TREE_CODE (arg1) == INTEGER_CST
8076 && TREE_CODE (arg0) != INTEGER_CST
8077 && tree_int_cst_sgn (arg1) > 0)
8082 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8083 return fold (build2 (GT_EXPR, type, arg0, arg1));
8086 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8087 return fold (build2 (LE_EXPR, type, arg0, arg1));
8094 /* Comparisons with the highest or lowest possible integer of
8095 the specified size will have known values.
8097 This is quite similar to fold_relational_hi_lo; however, my
8098 attempts to share the code have been nothing but trouble.
8099 I give up for now. */
8101 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
8103 if (TREE_CODE (arg1) == INTEGER_CST
8104 && ! TREE_CONSTANT_OVERFLOW (arg1)
8105 && width <= HOST_BITS_PER_WIDE_INT
8106 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8107 || POINTER_TYPE_P (TREE_TYPE (arg1))))
8109 unsigned HOST_WIDE_INT signed_max;
8110 unsigned HOST_WIDE_INT max, min;
8112 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
8114 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
8116 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
8122 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
8125 if (TREE_INT_CST_HIGH (arg1) == 0
8126 && TREE_INT_CST_LOW (arg1) == max)
8130 return omit_one_operand (type, integer_zero_node, arg0);
8133 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8136 return omit_one_operand (type, integer_one_node, arg0);
8139 return fold (build2 (NE_EXPR, type, arg0, arg1));
8141 /* The GE_EXPR and LT_EXPR cases above are not normally
8142 reached because of previous transformations. */
8147 else if (TREE_INT_CST_HIGH (arg1) == 0
8148 && TREE_INT_CST_LOW (arg1) == max - 1)
8152 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8153 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8155 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
8156 return fold (build2 (NE_EXPR, type, arg0, arg1));
8160 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8161 && TREE_INT_CST_LOW (arg1) == min)
8165 return omit_one_operand (type, integer_zero_node, arg0);
8168 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8171 return omit_one_operand (type, integer_one_node, arg0);
8174 return fold (build2 (NE_EXPR, type, arg0, arg1));
8179 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
8180 && TREE_INT_CST_LOW (arg1) == min + 1)
8184 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8185 return fold (build2 (NE_EXPR, type, arg0, arg1));
8187 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
8188 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8193 else if (!in_gimple_form
8194 && TREE_INT_CST_HIGH (arg1) == 0
8195 && TREE_INT_CST_LOW (arg1) == signed_max
8196 && TYPE_UNSIGNED (TREE_TYPE (arg1))
8197 /* signed_type does not work on pointer types. */
8198 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
8200 /* The following case also applies to X < signed_max+1
8201 and X >= signed_max+1 because previous transformations. */
8202 if (code == LE_EXPR || code == GT_EXPR)
8205 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
8206 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
8208 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
8209 type, fold_convert (st0, arg0),
8210 fold_convert (st1, integer_zero_node)));
8216 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
8217 a MINUS_EXPR of a constant, we can convert it into a comparison with
8218 a revised constant as long as no overflow occurs. */
8219 if ((code == EQ_EXPR || code == NE_EXPR)
8220 && TREE_CODE (arg1) == INTEGER_CST
8221 && (TREE_CODE (arg0) == PLUS_EXPR
8222 || TREE_CODE (arg0) == MINUS_EXPR)
8223 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8224 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8225 ? MINUS_EXPR : PLUS_EXPR,
8226 arg1, TREE_OPERAND (arg0, 1), 0))
8227 && ! TREE_CONSTANT_OVERFLOW (tem))
8228 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8230 /* Similarly for a NEGATE_EXPR. */
8231 else if ((code == EQ_EXPR || code == NE_EXPR)
8232 && TREE_CODE (arg0) == NEGATE_EXPR
8233 && TREE_CODE (arg1) == INTEGER_CST
8234 && 0 != (tem = negate_expr (arg1))
8235 && TREE_CODE (tem) == INTEGER_CST
8236 && ! TREE_CONSTANT_OVERFLOW (tem))
8237 return fold (build2 (code, type, TREE_OPERAND (arg0, 0), tem));
8239 /* If we have X - Y == 0, we can convert that to X == Y and similarly
8240 for !=. Don't do this for ordered comparisons due to overflow. */
8241 else if ((code == NE_EXPR || code == EQ_EXPR)
8242 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
8243 return fold (build2 (code, type,
8244 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
8246 /* If we are widening one operand of an integer comparison,
8247 see if the other operand is similarly being widened. Perhaps we
8248 can do the comparison in the narrower type. */
8249 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8250 && TREE_CODE (arg0) == NOP_EXPR
8251 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
8252 && (code == EQ_EXPR || code == NE_EXPR
8253 || TYPE_UNSIGNED (TREE_TYPE (arg0))
8254 == TYPE_UNSIGNED (TREE_TYPE (tem)))
8255 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
8256 && (TREE_TYPE (t1) == TREE_TYPE (tem)
8257 || (TREE_CODE (t1) == INTEGER_CST
8258 && int_fits_type_p (t1, TREE_TYPE (tem)))))
8259 return fold (build2 (code, type, tem,
8260 fold_convert (TREE_TYPE (tem), t1)));
8262 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8263 constant, we can simplify it. */
8264 else if (TREE_CODE (arg1) == INTEGER_CST
8265 && (TREE_CODE (arg0) == MIN_EXPR
8266 || TREE_CODE (arg0) == MAX_EXPR)
8267 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8268 return optimize_minmax_comparison (t);
8270 /* If we are comparing an ABS_EXPR with a constant, we can
8271 convert all the cases into explicit comparisons, but they may
8272 well not be faster than doing the ABS and one comparison.
8273 But ABS (X) <= C is a range comparison, which becomes a subtraction
8274 and a comparison, and is probably faster. */
8275 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8276 && TREE_CODE (arg0) == ABS_EXPR
8277 && ! TREE_SIDE_EFFECTS (arg0)
8278 && (0 != (tem = negate_expr (arg1)))
8279 && TREE_CODE (tem) == INTEGER_CST
8280 && ! TREE_CONSTANT_OVERFLOW (tem))
8281 return fold (build2 (TRUTH_ANDIF_EXPR, type,
8282 build2 (GE_EXPR, type,
8283 TREE_OPERAND (arg0, 0), tem),
8284 build2 (LE_EXPR, type,
8285 TREE_OPERAND (arg0, 0), arg1)));
8287 /* If this is an EQ or NE comparison with zero and ARG0 is
8288 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
8289 two operations, but the latter can be done in one less insn
8290 on machines that have only two-operand insns or on which a
8291 constant cannot be the first operand. */
8292 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
8293 && TREE_CODE (arg0) == BIT_AND_EXPR)
8295 tree arg00 = TREE_OPERAND (arg0, 0);
8296 tree arg01 = TREE_OPERAND (arg0, 1);
8297 if (TREE_CODE (arg00) == LSHIFT_EXPR
8298 && integer_onep (TREE_OPERAND (arg00, 0)))
8300 fold (build2 (code, type,
8301 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8302 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
8303 arg01, TREE_OPERAND (arg00, 1)),
8304 fold_convert (TREE_TYPE (arg0),
8307 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
8308 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
8310 fold (build2 (code, type,
8311 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8312 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
8313 arg00, TREE_OPERAND (arg01, 1)),
8314 fold_convert (TREE_TYPE (arg0),
8319 /* If this is an NE or EQ comparison of zero against the result of a
8320 signed MOD operation whose second operand is a power of 2, make
8321 the MOD operation unsigned since it is simpler and equivalent. */
8322 if ((code == NE_EXPR || code == EQ_EXPR)
8323 && integer_zerop (arg1)
8324 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8325 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
8326 || TREE_CODE (arg0) == CEIL_MOD_EXPR
8327 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
8328 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
8329 && integer_pow2p (TREE_OPERAND (arg0, 1)))
8331 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
8332 tree newmod = fold (build2 (TREE_CODE (arg0), newtype,
8333 fold_convert (newtype,
8334 TREE_OPERAND (arg0, 0)),
8335 fold_convert (newtype,
8336 TREE_OPERAND (arg0, 1))));
8338 return fold (build2 (code, type, newmod,
8339 fold_convert (newtype, arg1)));
8342 /* If this is an NE comparison of zero with an AND of one, remove the
8343 comparison since the AND will give the correct value. */
8344 if (code == NE_EXPR && integer_zerop (arg1)
8345 && TREE_CODE (arg0) == BIT_AND_EXPR
8346 && integer_onep (TREE_OPERAND (arg0, 1)))
8347 return fold_convert (type, arg0);
8349 /* If we have (A & C) == C where C is a power of 2, convert this into
8350 (A & C) != 0. Similarly for NE_EXPR. */
8351 if ((code == EQ_EXPR || code == NE_EXPR)
8352 && TREE_CODE (arg0) == BIT_AND_EXPR
8353 && integer_pow2p (TREE_OPERAND (arg0, 1))
8354 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8355 return fold (build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
8356 arg0, integer_zero_node));
8358 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
8359 2, then fold the expression into shifts and logical operations. */
8360 tem = fold_single_bit_test (code, arg0, arg1, type);
8364 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
8365 Similarly for NE_EXPR. */
8366 if ((code == EQ_EXPR || code == NE_EXPR)
8367 && TREE_CODE (arg0) == BIT_AND_EXPR
8368 && TREE_CODE (arg1) == INTEGER_CST
8369 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8372 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8373 arg1, build1 (BIT_NOT_EXPR,
8374 TREE_TYPE (TREE_OPERAND (arg0, 1)),
8375 TREE_OPERAND (arg0, 1))));
8376 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8377 if (integer_nonzerop (dandnotc))
8378 return omit_one_operand (type, rslt, arg0);
8381 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
8382 Similarly for NE_EXPR. */
8383 if ((code == EQ_EXPR || code == NE_EXPR)
8384 && TREE_CODE (arg0) == BIT_IOR_EXPR
8385 && TREE_CODE (arg1) == INTEGER_CST
8386 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8389 = fold (build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
8390 TREE_OPERAND (arg0, 1),
8391 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
8392 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
8393 if (integer_nonzerop (candnotd))
8394 return omit_one_operand (type, rslt, arg0);
8397 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
8398 and similarly for >= into !=. */
8399 if ((code == LT_EXPR || code == GE_EXPR)
8400 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8401 && TREE_CODE (arg1) == LSHIFT_EXPR
8402 && integer_onep (TREE_OPERAND (arg1, 0)))
8403 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8404 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8405 TREE_OPERAND (arg1, 1)),
8406 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8408 else if ((code == LT_EXPR || code == GE_EXPR)
8409 && TYPE_UNSIGNED (TREE_TYPE (arg0))
8410 && (TREE_CODE (arg1) == NOP_EXPR
8411 || TREE_CODE (arg1) == CONVERT_EXPR)
8412 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
8413 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
8415 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
8416 fold_convert (TREE_TYPE (arg0),
8417 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
8418 TREE_OPERAND (TREE_OPERAND (arg1, 0),
8420 fold_convert (TREE_TYPE (arg0), integer_zero_node));
8422 /* Simplify comparison of something with itself. (For IEEE
8423 floating-point, we can only do some of these simplifications.) */
8424 if (operand_equal_p (arg0, arg1, 0))
8429 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8430 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8431 return constant_boolean_node (1, type);
8436 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8437 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8438 return constant_boolean_node (1, type);
8439 return fold (build2 (EQ_EXPR, type, arg0, arg1));
8442 /* For NE, we can only do this simplification if integer
8443 or we don't honor IEEE floating point NaNs. */
8444 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8445 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8447 /* ... fall through ... */
8450 return constant_boolean_node (0, type);
8456 /* If we are comparing an expression that just has comparisons
8457 of two integer values, arithmetic expressions of those comparisons,
8458 and constants, we can simplify it. There are only three cases
8459 to check: the two values can either be equal, the first can be
8460 greater, or the second can be greater. Fold the expression for
8461 those three values. Since each value must be 0 or 1, we have
8462 eight possibilities, each of which corresponds to the constant 0
8463 or 1 or one of the six possible comparisons.
8465 This handles common cases like (a > b) == 0 but also handles
8466 expressions like ((x > y) - (y > x)) > 0, which supposedly
8467 occur in macroized code. */
8469 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8471 tree cval1 = 0, cval2 = 0;
8474 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8475 /* Don't handle degenerate cases here; they should already
8476 have been handled anyway. */
8477 && cval1 != 0 && cval2 != 0
8478 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8479 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8480 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8481 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8482 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8483 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8484 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8486 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8487 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8489 /* We can't just pass T to eval_subst in case cval1 or cval2
8490 was the same as ARG1. */
8493 = fold (build2 (code, type,
8494 eval_subst (arg0, cval1, maxval,
8498 = fold (build2 (code, type,
8499 eval_subst (arg0, cval1, maxval,
8503 = fold (build2 (code, type,
8504 eval_subst (arg0, cval1, minval,
8508 /* All three of these results should be 0 or 1. Confirm they
8509 are. Then use those values to select the proper code
8512 if ((integer_zerop (high_result)
8513 || integer_onep (high_result))
8514 && (integer_zerop (equal_result)
8515 || integer_onep (equal_result))
8516 && (integer_zerop (low_result)
8517 || integer_onep (low_result)))
8519 /* Make a 3-bit mask with the high-order bit being the
8520 value for `>', the next for '=', and the low for '<'. */
8521 switch ((integer_onep (high_result) * 4)
8522 + (integer_onep (equal_result) * 2)
8523 + integer_onep (low_result))
8527 return omit_one_operand (type, integer_zero_node, arg0);
8548 return omit_one_operand (type, integer_one_node, arg0);
8551 tem = build2 (code, type, cval1, cval2);
8553 return save_expr (tem);
8560 /* If this is a comparison of a field, we may be able to simplify it. */
8561 if (((TREE_CODE (arg0) == COMPONENT_REF
8562 && lang_hooks.can_use_bit_fields_p ())
8563 || TREE_CODE (arg0) == BIT_FIELD_REF)
8564 && (code == EQ_EXPR || code == NE_EXPR)
8565 /* Handle the constant case even without -O
8566 to make sure the warnings are given. */
8567 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
8569 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
8574 /* If this is a comparison of complex values and either or both sides
8575 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
8576 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
8577 This may prevent needless evaluations. */
8578 if ((code == EQ_EXPR || code == NE_EXPR)
8579 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
8580 && (TREE_CODE (arg0) == COMPLEX_EXPR
8581 || TREE_CODE (arg1) == COMPLEX_EXPR
8582 || TREE_CODE (arg0) == COMPLEX_CST
8583 || TREE_CODE (arg1) == COMPLEX_CST))
8585 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
8586 tree real0, imag0, real1, imag1;
8588 arg0 = save_expr (arg0);
8589 arg1 = save_expr (arg1);
8590 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
8591 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
8592 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
8593 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
8595 return fold (build2 ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
8598 fold (build2 (code, type, real0, real1)),
8599 fold (build2 (code, type, imag0, imag1))));
8602 /* Optimize comparisons of strlen vs zero to a compare of the
8603 first character of the string vs zero. To wit,
8604 strlen(ptr) == 0 => *ptr == 0
8605 strlen(ptr) != 0 => *ptr != 0
8606 Other cases should reduce to one of these two (or a constant)
8607 due to the return value of strlen being unsigned. */
8608 if ((code == EQ_EXPR || code == NE_EXPR)
8609 && integer_zerop (arg1)
8610 && TREE_CODE (arg0) == CALL_EXPR)
8612 tree fndecl = get_callee_fndecl (arg0);
8616 && DECL_BUILT_IN (fndecl)
8617 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
8618 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
8619 && (arglist = TREE_OPERAND (arg0, 1))
8620 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
8621 && ! TREE_CHAIN (arglist))
8622 return fold (build2 (code, type,
8623 build1 (INDIRECT_REF, char_type_node,
8624 TREE_VALUE(arglist)),
8625 integer_zero_node));
8628 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8629 into a single range test. */
8630 if (TREE_CODE (arg0) == TRUNC_DIV_EXPR
8631 && TREE_CODE (arg1) == INTEGER_CST
8632 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8633 && !integer_zerop (TREE_OPERAND (arg0, 1))
8634 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8635 && !TREE_OVERFLOW (arg1))
8637 t1 = fold_div_compare (code, type, arg0, arg1);
8638 if (t1 != NULL_TREE)
8642 if ((code == EQ_EXPR || code == NE_EXPR)
8643 && !TREE_SIDE_EFFECTS (arg0)
8644 && integer_zerop (arg1)
8645 && tree_expr_nonzero_p (arg0))
8646 return constant_boolean_node (code==NE_EXPR, type);
8648 t1 = fold_relational_const (code, type, arg0, arg1);
8649 return t1 == NULL_TREE ? t : t1;
8651 case UNORDERED_EXPR:
8659 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8661 t1 = fold_relational_const (code, type, arg0, arg1);
8662 if (t1 != NULL_TREE)
8666 /* If the first operand is NaN, the result is constant. */
8667 if (TREE_CODE (arg0) == REAL_CST
8668 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8669 && (code != LTGT_EXPR || ! flag_trapping_math))
8671 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8674 return omit_one_operand (type, t1, arg1);
8677 /* If the second operand is NaN, the result is constant. */
8678 if (TREE_CODE (arg1) == REAL_CST
8679 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
8680 && (code != LTGT_EXPR || ! flag_trapping_math))
8682 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
8685 return omit_one_operand (type, t1, arg0);
8688 /* Simplify unordered comparison of something with itself. */
8689 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
8690 && operand_equal_p (arg0, arg1, 0))
8691 return constant_boolean_node (1, type);
8693 if (code == LTGT_EXPR
8694 && !flag_trapping_math
8695 && operand_equal_p (arg0, arg1, 0))
8696 return constant_boolean_node (0, type);
8698 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8700 tree targ0 = strip_float_extensions (arg0);
8701 tree targ1 = strip_float_extensions (arg1);
8702 tree newtype = TREE_TYPE (targ0);
8704 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8705 newtype = TREE_TYPE (targ1);
8707 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8708 return fold (build2 (code, type, fold_convert (newtype, targ0),
8709 fold_convert (newtype, targ1)));
8715 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8716 so all simple results must be passed through pedantic_non_lvalue. */
8717 if (TREE_CODE (arg0) == INTEGER_CST)
8719 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8720 /* Only optimize constant conditions when the selected branch
8721 has the same type as the COND_EXPR. This avoids optimizing
8722 away "c ? x : throw", where the throw has a void type. */
8723 if (! VOID_TYPE_P (TREE_TYPE (tem))
8724 || VOID_TYPE_P (type))
8725 return pedantic_non_lvalue (tem);
8728 if (operand_equal_p (arg1, TREE_OPERAND (t, 2), 0))
8729 return pedantic_omit_one_operand (type, arg1, arg0);
8731 /* If we have A op B ? A : C, we may be able to convert this to a
8732 simpler expression, depending on the operation and the values
8733 of B and C. Signed zeros prevent all of these transformations,
8734 for reasons given above each one.
8736 Also try swapping the arguments and inverting the conditional. */
8737 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8738 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8739 arg1, TREE_OPERAND (arg0, 1))
8740 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8742 tem = fold_cond_expr_with_comparison (type, arg0,
8743 TREE_OPERAND (t, 1),
8744 TREE_OPERAND (t, 2));
8749 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8750 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8751 TREE_OPERAND (t, 2),
8752 TREE_OPERAND (arg0, 1))
8753 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 2)))))
8755 tem = invert_truthvalue (arg0);
8756 if (TREE_CODE_CLASS (TREE_CODE (tem)) == '<')
8758 tem = fold_cond_expr_with_comparison (type, tem,
8759 TREE_OPERAND (t, 2),
8760 TREE_OPERAND (t, 1));
8766 /* If the second operand is simpler than the third, swap them
8767 since that produces better jump optimization results. */
8768 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8769 TREE_OPERAND (t, 2), false))
8771 /* See if this can be inverted. If it can't, possibly because
8772 it was a floating-point inequality comparison, don't do
8774 tem = invert_truthvalue (arg0);
8776 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8777 return fold (build3 (code, type, tem,
8778 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8781 /* Convert A ? 1 : 0 to simply A. */
8782 if (integer_onep (TREE_OPERAND (t, 1))
8783 && integer_zerop (TREE_OPERAND (t, 2))
8784 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8785 call to fold will try to move the conversion inside
8786 a COND, which will recurse. In that case, the COND_EXPR
8787 is probably the best choice, so leave it alone. */
8788 && type == TREE_TYPE (arg0))
8789 return pedantic_non_lvalue (arg0);
8791 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8792 over COND_EXPR in cases such as floating point comparisons. */
8793 if (integer_zerop (TREE_OPERAND (t, 1))
8794 && integer_onep (TREE_OPERAND (t, 2))
8795 && truth_value_p (TREE_CODE (arg0)))
8796 return pedantic_non_lvalue (fold_convert (type,
8797 invert_truthvalue (arg0)));
8799 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
8800 if (TREE_CODE (arg0) == LT_EXPR
8801 && integer_zerop (TREE_OPERAND (arg0, 1))
8802 && integer_zerop (TREE_OPERAND (t, 2))
8803 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
8804 return fold_convert (type, fold (build2 (BIT_AND_EXPR,
8805 TREE_TYPE (tem), tem, arg1)));
8807 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
8808 already handled above. */
8809 if (TREE_CODE (arg0) == BIT_AND_EXPR
8810 && integer_onep (TREE_OPERAND (arg0, 1))
8811 && integer_zerop (TREE_OPERAND (t, 2))
8812 && integer_pow2p (arg1))
8814 tree tem = TREE_OPERAND (arg0, 0);
8816 if (TREE_CODE (tem) == RSHIFT_EXPR
8817 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
8818 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
8819 return fold (build2 (BIT_AND_EXPR, type,
8820 TREE_OPERAND (tem, 0), arg1));
8823 /* A & N ? N : 0 is simply A & N if N is a power of two. This
8824 is probably obsolete because the first operand should be a
8825 truth value (that's why we have the two cases above), but let's
8826 leave it in until we can confirm this for all front-ends. */
8827 if (integer_zerop (TREE_OPERAND (t, 2))
8828 && TREE_CODE (arg0) == NE_EXPR
8829 && integer_zerop (TREE_OPERAND (arg0, 1))
8830 && integer_pow2p (arg1)
8831 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8832 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8833 arg1, OEP_ONLY_CONST))
8834 return pedantic_non_lvalue (fold_convert (type,
8835 TREE_OPERAND (arg0, 0)));
8837 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8838 if (integer_zerop (TREE_OPERAND (t, 2))
8839 && truth_value_p (TREE_CODE (arg0))
8840 && truth_value_p (TREE_CODE (arg1)))
8841 return fold (build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1));
8843 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8844 if (integer_onep (TREE_OPERAND (t, 2))
8845 && truth_value_p (TREE_CODE (arg0))
8846 && truth_value_p (TREE_CODE (arg1)))
8848 /* Only perform transformation if ARG0 is easily inverted. */
8849 tem = invert_truthvalue (arg0);
8850 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8851 return fold (build2 (TRUTH_ORIF_EXPR, type, tem, arg1));
8854 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
8855 if (integer_zerop (arg1)
8856 && truth_value_p (TREE_CODE (arg0))
8857 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8859 /* Only perform transformation if ARG0 is easily inverted. */
8860 tem = invert_truthvalue (arg0);
8861 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8862 return fold (build2 (TRUTH_ANDIF_EXPR, type, tem,
8863 TREE_OPERAND (t, 2)));
8866 /* Convert A ? 1 : B into A || B if A and B are truth values. */
8867 if (integer_onep (arg1)
8868 && truth_value_p (TREE_CODE (arg0))
8869 && truth_value_p (TREE_CODE (TREE_OPERAND (t, 2))))
8870 return fold (build2 (TRUTH_ORIF_EXPR, type, arg0,
8871 TREE_OPERAND (t, 2)));
8876 /* When pedantic, a compound expression can be neither an lvalue
8877 nor an integer constant expression. */
8878 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8880 /* Don't let (0, 0) be null pointer constant. */
8881 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8882 : fold_convert (type, arg1);
8883 return pedantic_non_lvalue (tem);
8887 return build_complex (type, arg0, arg1);
8891 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8893 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8894 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8895 TREE_OPERAND (arg0, 1));
8896 else if (TREE_CODE (arg0) == COMPLEX_CST)
8897 return TREE_REALPART (arg0);
8898 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8899 return fold (build2 (TREE_CODE (arg0), type,
8900 fold (build1 (REALPART_EXPR, type,
8901 TREE_OPERAND (arg0, 0))),
8902 fold (build1 (REALPART_EXPR, type,
8903 TREE_OPERAND (arg0, 1)))));
8907 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8908 return fold_convert (type, integer_zero_node);
8909 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8910 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8911 TREE_OPERAND (arg0, 0));
8912 else if (TREE_CODE (arg0) == COMPLEX_CST)
8913 return TREE_IMAGPART (arg0);
8914 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8915 return fold (build2 (TREE_CODE (arg0), type,
8916 fold (build1 (IMAGPART_EXPR, type,
8917 TREE_OPERAND (arg0, 0))),
8918 fold (build1 (IMAGPART_EXPR, type,
8919 TREE_OPERAND (arg0, 1)))));
8922 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8924 case CLEANUP_POINT_EXPR:
8925 if (! has_cleanups (arg0))
8926 return TREE_OPERAND (t, 0);
8929 enum tree_code code0 = TREE_CODE (arg0);
8930 int kind0 = TREE_CODE_CLASS (code0);
8931 tree arg00 = TREE_OPERAND (arg0, 0);
8934 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8935 return fold (build1 (code0, type,
8936 fold (build1 (CLEANUP_POINT_EXPR,
8937 TREE_TYPE (arg00), arg00))));
8939 if (kind0 == '<' || kind0 == '2'
8940 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8941 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8942 || code0 == TRUTH_XOR_EXPR)
8944 arg01 = TREE_OPERAND (arg0, 1);
8946 if (TREE_CONSTANT (arg00)
8947 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8948 && ! has_cleanups (arg00)))
8949 return fold (build2 (code0, type, arg00,
8950 fold (build1 (CLEANUP_POINT_EXPR,
8951 TREE_TYPE (arg01), arg01))));
8953 if (TREE_CONSTANT (arg01))
8954 return fold (build2 (code0, type,
8955 fold (build1 (CLEANUP_POINT_EXPR,
8956 TREE_TYPE (arg00), arg00)),
8964 /* Check for a built-in function. */
8965 if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
8966 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0))
8968 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
8970 tree tmp = fold_builtin (t, false);
8978 } /* switch (code) */
8981 #ifdef ENABLE_FOLD_CHECKING
8984 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8985 static void fold_check_failed (tree, tree);
8986 void print_fold_checksum (tree);
8988 /* When --enable-checking=fold, compute a digest of expr before
8989 and after actual fold call to see if fold did not accidentally
8990 change original expr. */
8997 unsigned char checksum_before[16], checksum_after[16];
9000 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9001 md5_init_ctx (&ctx);
9002 fold_checksum_tree (expr, &ctx, ht);
9003 md5_finish_ctx (&ctx, checksum_before);
9006 ret = fold_1 (expr);
9008 md5_init_ctx (&ctx);
9009 fold_checksum_tree (expr, &ctx, ht);
9010 md5_finish_ctx (&ctx, checksum_after);
9013 if (memcmp (checksum_before, checksum_after, 16))
9014 fold_check_failed (expr, ret);
9020 print_fold_checksum (tree expr)
9023 unsigned char checksum[16], cnt;
9026 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
9027 md5_init_ctx (&ctx);
9028 fold_checksum_tree (expr, &ctx, ht);
9029 md5_finish_ctx (&ctx, checksum);
9031 for (cnt = 0; cnt < 16; ++cnt)
9032 fprintf (stderr, "%02x", checksum[cnt]);
9033 putc ('\n', stderr);
9037 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
9039 internal_error ("fold check: original tree changed by fold");
9043 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
9046 enum tree_code code;
9047 char buf[sizeof (struct tree_decl)];
9050 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
9051 > sizeof (struct tree_decl)
9052 || sizeof (struct tree_type) > sizeof (struct tree_decl))
9056 slot = htab_find_slot (ht, expr, INSERT);
9060 code = TREE_CODE (expr);
9061 if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
9063 /* Allow DECL_ASSEMBLER_NAME to be modified. */
9064 memcpy (buf, expr, tree_size (expr));
9066 SET_DECL_ASSEMBLER_NAME (expr, NULL);
9068 else if (TREE_CODE_CLASS (code) == 't'
9069 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
9071 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
9072 memcpy (buf, expr, tree_size (expr));
9074 TYPE_POINTER_TO (expr) = NULL;
9075 TYPE_REFERENCE_TO (expr) = NULL;
9077 md5_process_bytes (expr, tree_size (expr), ctx);
9078 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
9079 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
9080 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
9081 switch (TREE_CODE_CLASS (code))
9087 md5_process_bytes (TREE_STRING_POINTER (expr),
9088 TREE_STRING_LENGTH (expr), ctx);
9091 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
9092 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
9095 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
9105 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
9106 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
9109 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
9110 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
9122 len = first_rtl_op (code);
9123 for (i = 0; i < len; ++i)
9124 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
9127 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
9128 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
9129 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
9130 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
9131 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
9132 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
9133 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
9134 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
9135 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
9136 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
9137 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
9140 if (TREE_CODE (expr) == ENUMERAL_TYPE)
9141 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
9142 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
9143 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
9144 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
9145 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
9146 if (INTEGRAL_TYPE_P (expr)
9147 || SCALAR_FLOAT_TYPE_P (expr))
9149 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
9150 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
9152 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
9153 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
9154 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
9163 /* Perform constant folding and related simplification of initializer
9164 expression EXPR. This behaves identically to "fold" but ignores
9165 potential run-time traps and exceptions that fold must preserve. */
9168 fold_initializer (tree expr)
9170 int saved_signaling_nans = flag_signaling_nans;
9171 int saved_trapping_math = flag_trapping_math;
9172 int saved_trapv = flag_trapv;
9175 flag_signaling_nans = 0;
9176 flag_trapping_math = 0;
9179 result = fold (expr);
9181 flag_signaling_nans = saved_signaling_nans;
9182 flag_trapping_math = saved_trapping_math;
9183 flag_trapv = saved_trapv;
9188 /* Determine if first argument is a multiple of second argument. Return 0 if
9189 it is not, or we cannot easily determined it to be.
9191 An example of the sort of thing we care about (at this point; this routine
9192 could surely be made more general, and expanded to do what the *_DIV_EXPR's
9193 fold cases do now) is discovering that
9195 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9201 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
9203 This code also handles discovering that
9205 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
9207 is a multiple of 8 so we don't have to worry about dealing with a
9210 Note that we *look* inside a SAVE_EXPR only to determine how it was
9211 calculated; it is not safe for fold to do much of anything else with the
9212 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
9213 at run time. For example, the latter example above *cannot* be implemented
9214 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
9215 evaluation time of the original SAVE_EXPR is not necessarily the same at
9216 the time the new expression is evaluated. The only optimization of this
9217 sort that would be valid is changing
9219 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
9223 SAVE_EXPR (I) * SAVE_EXPR (J)
9225 (where the same SAVE_EXPR (J) is used in the original and the
9226 transformed version). */
9229 multiple_of_p (tree type, tree top, tree bottom)
9231 if (operand_equal_p (top, bottom, 0))
9234 if (TREE_CODE (type) != INTEGER_TYPE)
9237 switch (TREE_CODE (top))
9240 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9241 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9245 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
9246 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
9249 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
9253 op1 = TREE_OPERAND (top, 1);
9254 /* const_binop may not detect overflow correctly,
9255 so check for it explicitly here. */
9256 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
9257 > TREE_INT_CST_LOW (op1)
9258 && TREE_INT_CST_HIGH (op1) == 0
9259 && 0 != (t1 = fold_convert (type,
9260 const_binop (LSHIFT_EXPR,
9263 && ! TREE_OVERFLOW (t1))
9264 return multiple_of_p (type, t1, bottom);
9269 /* Can't handle conversions from non-integral or wider integral type. */
9270 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
9271 || (TYPE_PRECISION (type)
9272 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
9275 /* .. fall through ... */
9278 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
9281 if (TREE_CODE (bottom) != INTEGER_CST
9282 || (TYPE_UNSIGNED (type)
9283 && (tree_int_cst_sgn (top) < 0
9284 || tree_int_cst_sgn (bottom) < 0)))
9286 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
9294 /* Return true if `t' is known to be non-negative. */
9297 tree_expr_nonnegative_p (tree t)
9299 switch (TREE_CODE (t))
9305 return tree_int_cst_sgn (t) >= 0;
9308 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
9311 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9312 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9313 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9315 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
9316 both unsigned and at least 2 bits shorter than the result. */
9317 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9318 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9319 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9321 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9322 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9323 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9324 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9326 unsigned int prec = MAX (TYPE_PRECISION (inner1),
9327 TYPE_PRECISION (inner2)) + 1;
9328 return prec < TYPE_PRECISION (TREE_TYPE (t));
9334 if (FLOAT_TYPE_P (TREE_TYPE (t)))
9336 /* x * x for floating point x is always non-negative. */
9337 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
9339 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9340 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9343 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
9344 both unsigned and their total bits is shorter than the result. */
9345 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
9346 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
9347 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
9349 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
9350 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
9351 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
9352 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
9353 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
9354 < TYPE_PRECISION (TREE_TYPE (t));
9358 case TRUNC_DIV_EXPR:
9360 case FLOOR_DIV_EXPR:
9361 case ROUND_DIV_EXPR:
9362 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9363 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9365 case TRUNC_MOD_EXPR:
9367 case FLOOR_MOD_EXPR:
9368 case ROUND_MOD_EXPR:
9369 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9372 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9373 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9376 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9377 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9380 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9381 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9385 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9386 tree outer_type = TREE_TYPE (t);
9388 if (TREE_CODE (outer_type) == REAL_TYPE)
9390 if (TREE_CODE (inner_type) == REAL_TYPE)
9391 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9392 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9394 if (TYPE_UNSIGNED (inner_type))
9396 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9399 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
9401 if (TREE_CODE (inner_type) == REAL_TYPE)
9402 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
9403 if (TREE_CODE (inner_type) == INTEGER_TYPE)
9404 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
9405 && TYPE_UNSIGNED (inner_type);
9411 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
9412 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
9414 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9416 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9417 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9419 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9420 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9422 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9424 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
9426 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9427 case NON_LVALUE_EXPR:
9428 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9430 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9434 tree temp = TARGET_EXPR_SLOT (t);
9435 t = TARGET_EXPR_INITIAL (t);
9437 /* If the initializer is non-void, then it's a normal expression
9438 that will be assigned to the slot. */
9439 if (!VOID_TYPE_P (t))
9440 return tree_expr_nonnegative_p (t);
9442 /* Otherwise, the initializer sets the slot in some way. One common
9443 way is an assignment statement at the end of the initializer. */
9446 if (TREE_CODE (t) == BIND_EXPR)
9447 t = expr_last (BIND_EXPR_BODY (t));
9448 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
9449 || TREE_CODE (t) == TRY_CATCH_EXPR)
9450 t = expr_last (TREE_OPERAND (t, 0));
9451 else if (TREE_CODE (t) == STATEMENT_LIST)
9456 if (TREE_CODE (t) == MODIFY_EXPR
9457 && TREE_OPERAND (t, 0) == temp)
9458 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
9465 tree fndecl = get_callee_fndecl (t);
9466 tree arglist = TREE_OPERAND (t, 1);
9468 && DECL_BUILT_IN (fndecl)
9469 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
9470 switch (DECL_FUNCTION_CODE (fndecl))
9472 #define CASE_BUILTIN_F(BUILT_IN_FN) \
9473 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
9474 #define CASE_BUILTIN_I(BUILT_IN_FN) \
9475 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
9477 CASE_BUILTIN_F (BUILT_IN_ACOS)
9478 CASE_BUILTIN_F (BUILT_IN_ACOSH)
9479 CASE_BUILTIN_F (BUILT_IN_CABS)
9480 CASE_BUILTIN_F (BUILT_IN_COSH)
9481 CASE_BUILTIN_F (BUILT_IN_ERFC)
9482 CASE_BUILTIN_F (BUILT_IN_EXP)
9483 CASE_BUILTIN_F (BUILT_IN_EXP10)
9484 CASE_BUILTIN_F (BUILT_IN_EXP2)
9485 CASE_BUILTIN_F (BUILT_IN_FABS)
9486 CASE_BUILTIN_F (BUILT_IN_FDIM)
9487 CASE_BUILTIN_F (BUILT_IN_FREXP)
9488 CASE_BUILTIN_F (BUILT_IN_HYPOT)
9489 CASE_BUILTIN_F (BUILT_IN_POW10)
9490 CASE_BUILTIN_I (BUILT_IN_FFS)
9491 CASE_BUILTIN_I (BUILT_IN_PARITY)
9492 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
9496 CASE_BUILTIN_F (BUILT_IN_SQRT)
9497 /* sqrt(-0.0) is -0.0. */
9498 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
9500 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9502 CASE_BUILTIN_F (BUILT_IN_ASINH)
9503 CASE_BUILTIN_F (BUILT_IN_ATAN)
9504 CASE_BUILTIN_F (BUILT_IN_ATANH)
9505 CASE_BUILTIN_F (BUILT_IN_CBRT)
9506 CASE_BUILTIN_F (BUILT_IN_CEIL)
9507 CASE_BUILTIN_F (BUILT_IN_ERF)
9508 CASE_BUILTIN_F (BUILT_IN_EXPM1)
9509 CASE_BUILTIN_F (BUILT_IN_FLOOR)
9510 CASE_BUILTIN_F (BUILT_IN_FMOD)
9511 CASE_BUILTIN_F (BUILT_IN_LDEXP)
9512 CASE_BUILTIN_F (BUILT_IN_LLRINT)
9513 CASE_BUILTIN_F (BUILT_IN_LLROUND)
9514 CASE_BUILTIN_F (BUILT_IN_LRINT)
9515 CASE_BUILTIN_F (BUILT_IN_LROUND)
9516 CASE_BUILTIN_F (BUILT_IN_MODF)
9517 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
9518 CASE_BUILTIN_F (BUILT_IN_POW)
9519 CASE_BUILTIN_F (BUILT_IN_RINT)
9520 CASE_BUILTIN_F (BUILT_IN_ROUND)
9521 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
9522 CASE_BUILTIN_F (BUILT_IN_SINH)
9523 CASE_BUILTIN_F (BUILT_IN_TANH)
9524 CASE_BUILTIN_F (BUILT_IN_TRUNC)
9525 /* True if the 1st argument is nonnegative. */
9526 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
9528 CASE_BUILTIN_F (BUILT_IN_FMAX)
9529 /* True if the 1st OR 2nd arguments are nonnegative. */
9530 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9531 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9533 CASE_BUILTIN_F (BUILT_IN_FMIN)
9534 /* True if the 1st AND 2nd arguments are nonnegative. */
9535 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
9536 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9538 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
9539 /* True if the 2nd argument is nonnegative. */
9540 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
9544 #undef CASE_BUILTIN_F
9545 #undef CASE_BUILTIN_I
9549 /* ... fall through ... */
9552 if (truth_value_p (TREE_CODE (t)))
9553 /* Truth values evaluate to 0 or 1, which is nonnegative. */
9557 /* We don't know sign of `t', so be conservative and return false. */
9561 /* Return true when T is an address and is known to be nonzero.
9562 For floating point we further ensure that T is not denormal.
9563 Similar logic is present in nonzero_address in rtlanal.h */
9566 tree_expr_nonzero_p (tree t)
9568 tree type = TREE_TYPE (t);
9570 /* Doing something useful for floating point would need more work. */
9571 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9574 switch (TREE_CODE (t))
9577 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9578 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9581 return !integer_zerop (t);
9584 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9586 /* With the presence of negative values it is hard
9587 to say something. */
9588 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
9589 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9591 /* One of operands must be positive and the other non-negative. */
9592 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9593 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9598 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
9600 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9601 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9607 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
9608 tree outer_type = TREE_TYPE (t);
9610 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
9611 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
9616 /* Weak declarations may link to NULL. */
9617 if (DECL_P (TREE_OPERAND (t, 0)))
9618 return !DECL_WEAK (TREE_OPERAND (t, 0));
9619 /* Constants and all other cases are never weak. */
9623 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9624 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
9627 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
9628 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
9631 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
9633 /* When both operands are nonzero, then MAX must be too. */
9634 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
9637 /* MAX where operand 0 is positive is positive. */
9638 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
9640 /* MAX where operand 1 is positive is positive. */
9641 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9642 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
9649 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
9652 case NON_LVALUE_EXPR:
9653 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9656 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
9657 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
9665 /* Return true if `r' is known to be non-negative.
9666 Only handles constants at the moment. */
9669 rtl_expr_nonnegative_p (rtx r)
9671 switch (GET_CODE (r))
9674 return INTVAL (r) >= 0;
9677 if (GET_MODE (r) == VOIDmode)
9678 return CONST_DOUBLE_HIGH (r) >= 0;
9686 units = CONST_VECTOR_NUNITS (r);
9688 for (i = 0; i < units; ++i)
9690 elt = CONST_VECTOR_ELT (r, i);
9691 if (!rtl_expr_nonnegative_p (elt))
9700 /* These are always nonnegative. */
9709 /* See if we are applying CODE, a relational to the highest or lowest
9710 possible integer of TYPE. If so, then the result is a compile
9714 fold_relational_hi_lo (enum tree_code *code_p, const tree type, tree *op0_p,
9719 enum tree_code code = *code_p;
9720 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (op1)));
9722 if (TREE_CODE (op1) == INTEGER_CST
9723 && ! TREE_CONSTANT_OVERFLOW (op1)
9724 && width <= HOST_BITS_PER_WIDE_INT
9725 && (INTEGRAL_TYPE_P (TREE_TYPE (op1))
9726 || POINTER_TYPE_P (TREE_TYPE (op1))))
9728 unsigned HOST_WIDE_INT signed_max;
9729 unsigned HOST_WIDE_INT max, min;
9731 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
9733 if (TYPE_UNSIGNED (TREE_TYPE (op1)))
9735 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9741 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9744 if (TREE_INT_CST_HIGH (op1) == 0
9745 && TREE_INT_CST_LOW (op1) == max)
9749 return omit_one_operand (type, integer_zero_node, op0);
9755 return omit_one_operand (type, integer_one_node, op0);
9761 /* The GE_EXPR and LT_EXPR cases above are not normally
9762 reached because of previous transformations. */
9767 else if (TREE_INT_CST_HIGH (op1) == 0
9768 && TREE_INT_CST_LOW (op1) == max - 1)
9773 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9777 *op1_p = const_binop (PLUS_EXPR, op1, integer_one_node, 0);
9782 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9783 && TREE_INT_CST_LOW (op1) == min)
9787 return omit_one_operand (type, integer_zero_node, op0);
9794 return omit_one_operand (type, integer_one_node, op0);
9803 else if (TREE_INT_CST_HIGH (op1) == (min ? -1 : 0)
9804 && TREE_INT_CST_LOW (op1) == min + 1)
9809 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9813 *op1_p = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
9819 else if (TREE_INT_CST_HIGH (op1) == 0
9820 && TREE_INT_CST_LOW (op1) == signed_max
9821 && TYPE_UNSIGNED (TREE_TYPE (op1))
9822 /* signed_type does not work on pointer types. */
9823 && INTEGRAL_TYPE_P (TREE_TYPE (op1)))
9825 /* The following case also applies to X < signed_max+1
9826 and X >= signed_max+1 because previous transformations. */
9827 if (code == LE_EXPR || code == GT_EXPR)
9829 tree st0, st1, exp, retval;
9830 st0 = lang_hooks.types.signed_type (TREE_TYPE (op0));
9831 st1 = lang_hooks.types.signed_type (TREE_TYPE (op1));
9833 exp = build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9835 fold_convert (st0, op0),
9836 fold_convert (st1, integer_zero_node));
9839 = nondestructive_fold_binary_to_constant (TREE_CODE (exp),
9841 TREE_OPERAND (exp, 0),
9842 TREE_OPERAND (exp, 1));
9844 /* If we are in gimple form, then returning EXP would create
9845 non-gimple expressions. Clearing it is safe and insures
9846 we do not allow a non-gimple expression to escape. */
9850 return (retval ? retval : exp);
9859 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
9860 attempt to fold the expression to a constant without modifying TYPE,
9863 If the expression could be simplified to a constant, then return
9864 the constant. If the expression would not be simplified to a
9865 constant, then return NULL_TREE.
9867 Note this is primarily designed to be called after gimplification
9868 of the tree structures and when at least one operand is a constant.
9869 As a result of those simplifying assumptions this routine is far
9870 simpler than the generic fold routine. */
9873 nondestructive_fold_binary_to_constant (enum tree_code code, tree type,
9881 /* If this is a commutative operation, and ARG0 is a constant, move it
9882 to ARG1 to reduce the number of tests below. */
9883 if (commutative_tree_code (code)
9884 && (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST))
9891 /* If either operand is a complex type, extract its real component. */
9892 if (TREE_CODE (op0) == COMPLEX_CST)
9893 subop0 = TREE_REALPART (op0);
9897 if (TREE_CODE (op1) == COMPLEX_CST)
9898 subop1 = TREE_REALPART (op1);
9902 /* Note if either argument is not a real or integer constant.
9903 With a few exceptions, simplification is limited to cases
9904 where both arguments are constants. */
9905 if ((TREE_CODE (subop0) != INTEGER_CST
9906 && TREE_CODE (subop0) != REAL_CST)
9907 || (TREE_CODE (subop1) != INTEGER_CST
9908 && TREE_CODE (subop1) != REAL_CST))
9914 /* (plus (address) (const_int)) is a constant. */
9915 if (TREE_CODE (op0) == PLUS_EXPR
9916 && TREE_CODE (op1) == INTEGER_CST
9917 && (TREE_CODE (TREE_OPERAND (op0, 0)) == ADDR_EXPR
9918 || (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
9919 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (op0, 0), 0))
9921 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
9923 return build2 (PLUS_EXPR, type, TREE_OPERAND (op0, 0),
9924 const_binop (PLUS_EXPR, op1,
9925 TREE_OPERAND (op0, 1), 0));
9933 /* Both arguments are constants. Simplify. */
9934 tem = const_binop (code, op0, op1, 0);
9935 if (tem != NULL_TREE)
9937 /* The return value should always have the same type as
9938 the original expression. */
9939 if (TREE_TYPE (tem) != type)
9940 tem = fold_convert (type, tem);
9947 /* Fold &x - &x. This can happen from &x.foo - &x.
9948 This is unsafe for certain floats even in non-IEEE formats.
9949 In IEEE, it is unsafe because it does wrong for NaNs.
9950 Also note that operand_equal_p is always false if an
9951 operand is volatile. */
9952 if (! FLOAT_TYPE_P (type) && operand_equal_p (op0, op1, 0))
9953 return fold_convert (type, integer_zero_node);
9959 /* Special case multiplication or bitwise AND where one argument
9961 if (! FLOAT_TYPE_P (type) && integer_zerop (op1))
9962 return omit_one_operand (type, op1, op0);
9964 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (op0)))
9965 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op0)))
9966 && real_zerop (op1))
9967 return omit_one_operand (type, op1, op0);
9972 /* Special case when we know the result will be all ones. */
9973 if (integer_all_onesp (op1))
9974 return omit_one_operand (type, op1, op0);
9978 case TRUNC_DIV_EXPR:
9979 case ROUND_DIV_EXPR:
9980 case FLOOR_DIV_EXPR:
9982 case EXACT_DIV_EXPR:
9983 case TRUNC_MOD_EXPR:
9984 case ROUND_MOD_EXPR:
9985 case FLOOR_MOD_EXPR:
9988 /* Division by zero is undefined. */
9989 if (integer_zerop (op1))
9992 if (TREE_CODE (op1) == REAL_CST
9993 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (op1)))
9994 && real_zerop (op1))
10000 if (INTEGRAL_TYPE_P (type)
10001 && operand_equal_p (op1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10002 return omit_one_operand (type, op1, op0);
10007 if (INTEGRAL_TYPE_P (type)
10008 && TYPE_MAX_VALUE (type)
10009 && operand_equal_p (op1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10010 return omit_one_operand (type, op1, op0);
10015 /* Optimize -1 >> x for arithmetic right shifts. */
10016 if (integer_all_onesp (op0) && ! TYPE_UNSIGNED (type))
10017 return omit_one_operand (type, op0, op1);
10018 /* ... fall through ... */
10021 if (integer_zerop (op0))
10022 return omit_one_operand (type, op0, op1);
10024 /* Since negative shift count is not well-defined, don't
10025 try to compute it in the compiler. */
10026 if (TREE_CODE (op1) == INTEGER_CST && tree_int_cst_sgn (op1) < 0)
10033 /* -1 rotated either direction by any amount is still -1. */
10034 if (integer_all_onesp (op0))
10035 return omit_one_operand (type, op0, op1);
10037 /* 0 rotated either direction by any amount is still zero. */
10038 if (integer_zerop (op0))
10039 return omit_one_operand (type, op0, op1);
10045 return build_complex (type, op0, op1);
10054 /* If one arg is a real or integer constant, put it last. */
10055 if ((TREE_CODE (op0) == INTEGER_CST
10056 && TREE_CODE (op1) != INTEGER_CST)
10057 || (TREE_CODE (op0) == REAL_CST
10058 && TREE_CODE (op0) != REAL_CST))
10065 code = swap_tree_comparison (code);
10068 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
10069 This transformation affects the cases which are handled in later
10070 optimizations involving comparisons with non-negative constants. */
10071 if (TREE_CODE (op1) == INTEGER_CST
10072 && TREE_CODE (op0) != INTEGER_CST
10073 && tree_int_cst_sgn (op1) > 0)
10079 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10084 op1 = const_binop (MINUS_EXPR, op1, integer_one_node, 0);
10092 tem = fold_relational_hi_lo (&code, type, &op0, &op1);
10096 /* Fall through. */
10099 case UNORDERED_EXPR:
10109 return fold_relational_const (code, type, op0, op1);
10112 /* This could probably be handled. */
10115 case TRUTH_AND_EXPR:
10116 /* If second arg is constant zero, result is zero, but first arg
10117 must be evaluated. */
10118 if (integer_zerop (op1))
10119 return omit_one_operand (type, op1, op0);
10120 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10121 case will be handled here. */
10122 if (integer_zerop (op0))
10123 return omit_one_operand (type, op0, op1);
10124 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10125 return constant_boolean_node (true, type);
10128 case TRUTH_OR_EXPR:
10129 /* If second arg is constant true, result is true, but we must
10130 evaluate first arg. */
10131 if (TREE_CODE (op1) == INTEGER_CST && ! integer_zerop (op1))
10132 return omit_one_operand (type, op1, op0);
10133 /* Likewise for first arg, but note this only occurs here for
10135 if (TREE_CODE (op0) == INTEGER_CST && ! integer_zerop (op0))
10136 return omit_one_operand (type, op0, op1);
10137 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10138 return constant_boolean_node (false, type);
10141 case TRUTH_XOR_EXPR:
10142 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10144 int x = ! integer_zerop (op0) ^ ! integer_zerop (op1);
10145 return constant_boolean_node (x, type);
10154 /* Given the components of a unary expression CODE, TYPE and OP0,
10155 attempt to fold the expression to a constant without modifying
10158 If the expression could be simplified to a constant, then return
10159 the constant. If the expression would not be simplified to a
10160 constant, then return NULL_TREE.
10162 Note this is primarily designed to be called after gimplification
10163 of the tree structures and when op0 is a constant. As a result
10164 of those simplifying assumptions this routine is far simpler than
10165 the generic fold routine. */
10168 nondestructive_fold_unary_to_constant (enum tree_code code, tree type,
10171 /* Make sure we have a suitable constant argument. */
10172 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
10176 if (TREE_CODE (op0) == COMPLEX_CST)
10177 subop = TREE_REALPART (op0);
10181 if (TREE_CODE (subop) != INTEGER_CST && TREE_CODE (subop) != REAL_CST)
10190 case FIX_TRUNC_EXPR:
10191 case FIX_FLOOR_EXPR:
10192 case FIX_CEIL_EXPR:
10193 return fold_convert_const (code, type, op0);
10196 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10197 return fold_negate_const (op0, type);
10202 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
10203 return fold_abs_const (op0, type);
10208 if (TREE_CODE (op0) == INTEGER_CST)
10209 return fold_not_const (op0, type);
10213 case REALPART_EXPR:
10214 if (TREE_CODE (op0) == COMPLEX_CST)
10215 return TREE_REALPART (op0);
10219 case IMAGPART_EXPR:
10220 if (TREE_CODE (op0) == COMPLEX_CST)
10221 return TREE_IMAGPART (op0);
10226 if (TREE_CODE (op0) == COMPLEX_CST
10227 && TREE_CODE (TREE_TYPE (op0)) == COMPLEX_TYPE)
10228 return build_complex (type, TREE_REALPART (op0),
10229 negate_expr (TREE_IMAGPART (op0)));
10237 /* If EXP represents referencing an element in a constant string
10238 (either via pointer arithmetic or array indexing), return the
10239 tree representing the value accessed, otherwise return NULL. */
10242 fold_read_from_constant_string (tree exp)
10244 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10246 tree exp1 = TREE_OPERAND (exp, 0);
10250 if (TREE_CODE (exp) == INDIRECT_REF)
10251 string = string_constant (exp1, &index);
10254 tree low_bound = array_ref_low_bound (exp);
10255 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10257 /* Optimize the special-case of a zero lower bound.
10259 We convert the low_bound to sizetype to avoid some problems
10260 with constant folding. (E.g. suppose the lower bound is 1,
10261 and its mode is QI. Without the conversion,l (ARRAY
10262 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10263 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10264 if (! integer_zerop (low_bound))
10265 index = size_diffop (index, fold_convert (sizetype, low_bound));
10271 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10272 && TREE_CODE (string) == STRING_CST
10273 && TREE_CODE (index) == INTEGER_CST
10274 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10275 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10277 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10278 return fold_convert (TREE_TYPE (exp),
10279 build_int_2 ((TREE_STRING_POINTER (string)
10280 [TREE_INT_CST_LOW (index)]), 0));
10285 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10286 an integer constant or real constant.
10288 TYPE is the type of the result. */
10291 fold_negate_const (tree arg0, tree type)
10293 tree t = NULL_TREE;
10295 if (TREE_CODE (arg0) == INTEGER_CST)
10297 unsigned HOST_WIDE_INT low;
10298 HOST_WIDE_INT high;
10299 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10300 TREE_INT_CST_HIGH (arg0),
10302 t = build_int_2 (low, high);
10303 TREE_TYPE (t) = type;
10305 = (TREE_OVERFLOW (arg0)
10306 | force_fit_type (t, overflow && !TYPE_UNSIGNED (type)));
10307 TREE_CONSTANT_OVERFLOW (t)
10308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10310 else if (TREE_CODE (arg0) == REAL_CST)
10311 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10312 #ifdef ENABLE_CHECKING
10320 /* Return the tree for abs (ARG0) when ARG0 is known to be either
10321 an integer constant or real constant.
10323 TYPE is the type of the result. */
10326 fold_abs_const (tree arg0, tree type)
10328 tree t = NULL_TREE;
10330 if (TREE_CODE (arg0) == INTEGER_CST)
10332 /* If the value is unsigned, then the absolute value is
10333 the same as the ordinary value. */
10334 if (TYPE_UNSIGNED (type))
10336 /* Similarly, if the value is non-negative. */
10337 else if (INT_CST_LT (integer_minus_one_node, arg0))
10339 /* If the value is negative, then the absolute value is
10343 unsigned HOST_WIDE_INT low;
10344 HOST_WIDE_INT high;
10345 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10346 TREE_INT_CST_HIGH (arg0),
10348 t = build_int_2 (low, high);
10349 TREE_TYPE (t) = type;
10351 = (TREE_OVERFLOW (arg0)
10352 | force_fit_type (t, overflow));
10353 TREE_CONSTANT_OVERFLOW (t)
10354 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
10358 else if (TREE_CODE (arg0) == REAL_CST)
10360 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
10361 return build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
10365 #ifdef ENABLE_CHECKING
10373 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
10374 constant. TYPE is the type of the result. */
10377 fold_not_const (tree arg0, tree type)
10379 tree t = NULL_TREE;
10381 if (TREE_CODE (arg0) == INTEGER_CST)
10383 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
10384 ~ TREE_INT_CST_HIGH (arg0));
10385 TREE_TYPE (t) = type;
10386 force_fit_type (t, 0);
10387 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
10388 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
10390 #ifdef ENABLE_CHECKING
10398 /* Given CODE, a relational operator, the target type, TYPE and two
10399 constant operands OP0 and OP1, return the result of the
10400 relational operation. If the result is not a compile time
10401 constant, then return NULL_TREE. */
10404 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
10406 int result, invert;
10408 /* From here on, the only cases we handle are when the result is
10409 known to be a constant. */
10411 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
10413 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
10414 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
10416 /* Handle the cases where either operand is a NaN. */
10417 if (real_isnan (c0) || real_isnan (c1))
10427 case UNORDERED_EXPR:
10441 if (flag_trapping_math)
10450 return constant_boolean_node (result, type);
10453 return constant_boolean_node (real_compare (code, c0, c1), type);
10456 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
10458 To compute GT, swap the arguments and do LT.
10459 To compute GE, do LT and invert the result.
10460 To compute LE, swap the arguments, do LT and invert the result.
10461 To compute NE, do EQ and invert the result.
10463 Therefore, the code below must handle only EQ and LT. */
10465 if (code == LE_EXPR || code == GT_EXPR)
10470 code = swap_tree_comparison (code);
10473 /* Note that it is safe to invert for real values here because we
10474 have already handled the one case that it matters. */
10477 if (code == NE_EXPR || code == GE_EXPR)
10480 code = invert_tree_comparison (code, false);
10483 /* Compute a result for LT or EQ if args permit;
10484 Otherwise return T. */
10485 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
10487 if (code == EQ_EXPR)
10488 result = tree_int_cst_equal (op0, op1);
10489 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
10490 result = INT_CST_LT_UNSIGNED (op0, op1);
10492 result = INT_CST_LT (op0, op1);
10499 return constant_boolean_node (result, type);
10502 /* Build an expression for the address of T. Folds away INDIRECT_REF to
10503 avoid confusing the gimplify process. */
10506 build_fold_addr_expr_with_type (tree t, tree ptrtype)
10508 if (TREE_CODE (t) == INDIRECT_REF)
10510 t = TREE_OPERAND (t, 0);
10511 if (TREE_TYPE (t) != ptrtype)
10512 t = build1 (NOP_EXPR, ptrtype, t);
10518 while (handled_component_p (base)
10519 || TREE_CODE (base) == REALPART_EXPR
10520 || TREE_CODE (base) == IMAGPART_EXPR)
10521 base = TREE_OPERAND (base, 0);
10523 TREE_ADDRESSABLE (base) = 1;
10525 t = build1 (ADDR_EXPR, ptrtype, t);
10532 build_fold_addr_expr (tree t)
10534 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
10537 /* Builds an expression for an indirection through T, simplifying some
10541 build_fold_indirect_ref (tree t)
10543 tree type = TREE_TYPE (TREE_TYPE (t));
10548 if (TREE_CODE (sub) == ADDR_EXPR)
10550 tree op = TREE_OPERAND (sub, 0);
10551 tree optype = TREE_TYPE (op);
10553 if (lang_hooks.types_compatible_p (type, optype))
10555 /* *(foo *)&fooarray => fooarray[0] */
10556 else if (TREE_CODE (optype) == ARRAY_TYPE
10557 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
10558 return build4 (ARRAY_REF, type, op, size_zero_node, NULL_TREE, NULL_TREE);
10561 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
10562 subtype = TREE_TYPE (sub);
10563 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
10564 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
10566 sub = build_fold_indirect_ref (sub);
10567 return build4 (ARRAY_REF, type, sub, size_zero_node, NULL_TREE, NULL_TREE);
10570 return build1 (INDIRECT_REF, type, t);
10573 /* Strip non-trapping, non-side-effecting tree nodes from an expression
10574 whose result is ignored. The type of the returned tree need not be
10575 the same as the original expression. */
10578 fold_ignored_result (tree t)
10580 if (!TREE_SIDE_EFFECTS (t))
10581 return integer_zero_node;
10584 switch (TREE_CODE_CLASS (TREE_CODE (t)))
10587 t = TREE_OPERAND (t, 0);
10592 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10593 t = TREE_OPERAND (t, 0);
10594 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
10595 t = TREE_OPERAND (t, 1);
10601 switch (TREE_CODE (t))
10603 case COMPOUND_EXPR:
10604 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
10606 t = TREE_OPERAND (t, 0);
10610 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
10611 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
10613 t = TREE_OPERAND (t, 0);
10626 #include "gt-fold-const.h"