1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static hashval_t size_htab_hash PARAMS ((const void *));
70 static int size_htab_eq PARAMS ((const void *, const void *));
71 static void fold_convert_1 PARAMS ((PTR));
72 static tree fold_convert PARAMS ((tree, tree));
73 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
74 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
75 static int truth_value_p PARAMS ((enum tree_code));
76 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
77 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
78 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
79 static tree omit_one_operand PARAMS ((tree, tree, tree));
80 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
81 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
82 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
83 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
85 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
87 enum machine_mode *, int *,
88 int *, tree *, tree *));
89 static int all_ones_mask_p PARAMS ((tree, int));
90 static int simple_operand_p PARAMS ((tree));
91 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
93 static tree make_range PARAMS ((tree, int *, tree *, tree *));
94 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
95 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
97 static tree fold_range_test PARAMS ((tree));
98 static tree unextend PARAMS ((tree, int, int, tree));
99 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
100 static tree optimize_minmax_comparison PARAMS ((tree));
101 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
102 static tree strip_compound_expr PARAMS ((tree, tree));
103 static int multiple_of_p PARAMS ((tree, tree, tree));
104 static tree constant_boolean_node PARAMS ((int, tree));
105 static int count_cond PARAMS ((tree, int));
106 static tree fold_binary_op_with_conditional_arg
107 PARAMS ((enum tree_code, tree, tree, tree, int));
110 #define BRANCH_COST 1
113 #if defined(HOST_EBCDIC)
114 /* bit 8 is significant in EBCDIC */
115 #define CHARMASK 0xff
117 #define CHARMASK 0x7f
120 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
121 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
122 and SUM1. Then this yields nonzero if overflow occurred during the
125 Overflow occurs if A and B have the same sign, but A and SUM differ in
126 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
128 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
130 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
131 We do that by representing the two-word integer in 4 words, with only
132 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
133 number. The value of the word is LOWPART + HIGHPART * BASE. */
136 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
137 #define HIGHPART(x) \
138 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
139 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
141 /* Unpack a two-word integer into 4 words.
142 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
143 WORDS points to the array of HOST_WIDE_INTs. */
146 encode (words, low, hi)
147 HOST_WIDE_INT *words;
148 unsigned HOST_WIDE_INT low;
151 words[0] = LOWPART (low);
152 words[1] = HIGHPART (low);
153 words[2] = LOWPART (hi);
154 words[3] = HIGHPART (hi);
157 /* Pack an array of 4 words into a two-word integer.
158 WORDS points to the array of words.
159 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
162 decode (words, low, hi)
163 HOST_WIDE_INT *words;
164 unsigned HOST_WIDE_INT *low;
167 *low = words[0] + words[1] * BASE;
168 *hi = words[2] + words[3] * BASE;
171 /* Make the integer constant T valid for its type by setting to 0 or 1 all
172 the bits in the constant that don't belong in the type.
174 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
175 nonzero, a signed overflow has already occurred in calculating T, so
178 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
182 force_fit_type (t, overflow)
186 unsigned HOST_WIDE_INT low;
190 if (TREE_CODE (t) == REAL_CST)
192 #ifdef CHECK_FLOAT_VALUE
193 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
199 else if (TREE_CODE (t) != INTEGER_CST)
202 low = TREE_INT_CST_LOW (t);
203 high = TREE_INT_CST_HIGH (t);
205 if (POINTER_TYPE_P (TREE_TYPE (t)))
208 prec = TYPE_PRECISION (TREE_TYPE (t));
210 /* First clear all bits that are beyond the type's precision. */
212 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
214 else if (prec > HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_HIGH (t)
216 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
219 TREE_INT_CST_HIGH (t) = 0;
220 if (prec < HOST_BITS_PER_WIDE_INT)
221 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
224 /* Unsigned types do not suffer sign extension or overflow unless they
226 if (TREE_UNSIGNED (TREE_TYPE (t))
227 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
228 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
231 /* If the value's sign bit is set, extend the sign. */
232 if (prec != 2 * HOST_BITS_PER_WIDE_INT
233 && (prec > HOST_BITS_PER_WIDE_INT
234 ? 0 != (TREE_INT_CST_HIGH (t)
236 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
237 : 0 != (TREE_INT_CST_LOW (t)
238 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
240 /* Value is negative:
241 set to 1 all the bits that are outside this type's precision. */
242 if (prec > HOST_BITS_PER_WIDE_INT)
243 TREE_INT_CST_HIGH (t)
244 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
247 TREE_INT_CST_HIGH (t) = -1;
248 if (prec < HOST_BITS_PER_WIDE_INT)
249 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
253 /* Return nonzero if signed overflow occurred. */
255 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
259 /* Add two doubleword integers with doubleword result.
260 Each argument is given as two `HOST_WIDE_INT' pieces.
261 One argument is L1 and H1; the other, L2 and H2.
262 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
265 add_double (l1, h1, l2, h2, lv, hv)
266 unsigned HOST_WIDE_INT l1, l2;
267 HOST_WIDE_INT h1, h2;
268 unsigned HOST_WIDE_INT *lv;
271 unsigned HOST_WIDE_INT l;
275 h = h1 + h2 + (l < l1);
279 return OVERFLOW_SUM_SIGN (h1, h2, h);
282 /* Negate a doubleword integer with doubleword result.
283 Return nonzero if the operation overflows, assuming it's signed.
284 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
285 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
288 neg_double (l1, h1, lv, hv)
289 unsigned HOST_WIDE_INT l1;
291 unsigned HOST_WIDE_INT *lv;
298 return (*hv & h1) < 0;
308 /* Multiply two doubleword integers with doubleword result.
309 Return nonzero if the operation overflows, assuming it's signed.
310 Each argument is given as two `HOST_WIDE_INT' pieces.
311 One argument is L1 and H1; the other, L2 and H2.
312 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
315 mul_double (l1, h1, l2, h2, lv, hv)
316 unsigned HOST_WIDE_INT l1, l2;
317 HOST_WIDE_INT h1, h2;
318 unsigned HOST_WIDE_INT *lv;
321 HOST_WIDE_INT arg1[4];
322 HOST_WIDE_INT arg2[4];
323 HOST_WIDE_INT prod[4 * 2];
324 unsigned HOST_WIDE_INT carry;
326 unsigned HOST_WIDE_INT toplow, neglow;
327 HOST_WIDE_INT tophigh, neghigh;
329 encode (arg1, l1, h1);
330 encode (arg2, l2, h2);
332 memset ((char *) prod, 0, sizeof prod);
334 for (i = 0; i < 4; i++)
337 for (j = 0; j < 4; j++)
340 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
341 carry += arg1[i] * arg2[j];
342 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
344 prod[k] = LOWPART (carry);
345 carry = HIGHPART (carry);
350 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
352 /* Check for overflow by calculating the top half of the answer in full;
353 it should agree with the low half's sign bit. */
354 decode (prod + 4, &toplow, &tophigh);
357 neg_double (l2, h2, &neglow, &neghigh);
358 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
362 neg_double (l1, h1, &neglow, &neghigh);
363 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
365 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
368 /* Shift the doubleword integer in L1, H1 left by COUNT places
369 keeping only PREC bits of result.
370 Shift right if COUNT is negative.
371 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
372 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
375 lshift_double (l1, h1, count, prec, lv, hv, arith)
376 unsigned HOST_WIDE_INT l1;
377 HOST_WIDE_INT h1, count;
379 unsigned HOST_WIDE_INT *lv;
383 unsigned HOST_WIDE_INT signmask;
387 rshift_double (l1, h1, -count, prec, lv, hv, arith);
391 #ifdef SHIFT_COUNT_TRUNCATED
392 if (SHIFT_COUNT_TRUNCATED)
396 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
398 /* Shifting by the host word size is undefined according to the
399 ANSI standard, so we must handle this as a special case. */
403 else if (count >= HOST_BITS_PER_WIDE_INT)
405 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
410 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
411 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
415 /* Sign extend all bits that are beyond the precision. */
417 signmask = -((prec > HOST_BITS_PER_WIDE_INT
418 ? (*hv >> (prec - HOST_BITS_PER_WIDE_INT - 1))
419 : (*lv >> (prec - 1))) & 1);
421 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
423 else if (prec >= HOST_BITS_PER_WIDE_INT)
425 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
426 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
431 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
432 *lv |= signmask << prec;
436 /* Shift the doubleword integer in L1, H1 right by COUNT places
437 keeping only PREC bits of result. COUNT must be positive.
438 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
439 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
442 rshift_double (l1, h1, count, prec, lv, hv, arith)
443 unsigned HOST_WIDE_INT l1;
444 HOST_WIDE_INT h1, count;
446 unsigned HOST_WIDE_INT *lv;
450 unsigned HOST_WIDE_INT signmask;
453 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
456 #ifdef SHIFT_COUNT_TRUNCATED
457 if (SHIFT_COUNT_TRUNCATED)
461 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
463 /* Shifting by the host word size is undefined according to the
464 ANSI standard, so we must handle this as a special case. */
468 else if (count >= HOST_BITS_PER_WIDE_INT)
471 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
475 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
477 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
480 /* Zero / sign extend all bits that are beyond the precision. */
482 if (count >= (HOST_WIDE_INT)prec)
487 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
489 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
491 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
492 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
497 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
498 *lv |= signmask << (prec - count);
502 /* Rotate the doubleword integer in L1, H1 left by COUNT places
503 keeping only PREC bits of result.
504 Rotate right if COUNT is negative.
505 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
508 lrotate_double (l1, h1, count, prec, lv, hv)
509 unsigned HOST_WIDE_INT l1;
510 HOST_WIDE_INT h1, count;
512 unsigned HOST_WIDE_INT *lv;
515 unsigned HOST_WIDE_INT s1l, s2l;
516 HOST_WIDE_INT s1h, s2h;
522 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
523 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
528 /* Rotate the doubleword integer in L1, H1 left by COUNT places
529 keeping only PREC bits of result. COUNT must be positive.
530 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
533 rrotate_double (l1, h1, count, prec, lv, hv)
534 unsigned HOST_WIDE_INT l1;
535 HOST_WIDE_INT h1, count;
537 unsigned HOST_WIDE_INT *lv;
540 unsigned HOST_WIDE_INT s1l, s2l;
541 HOST_WIDE_INT s1h, s2h;
547 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
548 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
553 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
554 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
555 CODE is a tree code for a kind of division, one of
556 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
558 It controls how the quotient is rounded to an integer.
559 Return nonzero if the operation overflows.
560 UNS nonzero says do unsigned division. */
563 div_and_round_double (code, uns,
564 lnum_orig, hnum_orig, lden_orig, hden_orig,
565 lquo, hquo, lrem, hrem)
568 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
569 HOST_WIDE_INT hnum_orig;
570 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
571 HOST_WIDE_INT hden_orig;
572 unsigned HOST_WIDE_INT *lquo, *lrem;
573 HOST_WIDE_INT *hquo, *hrem;
576 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
577 HOST_WIDE_INT den[4], quo[4];
579 unsigned HOST_WIDE_INT work;
580 unsigned HOST_WIDE_INT carry = 0;
581 unsigned HOST_WIDE_INT lnum = lnum_orig;
582 HOST_WIDE_INT hnum = hnum_orig;
583 unsigned HOST_WIDE_INT lden = lden_orig;
584 HOST_WIDE_INT hden = hden_orig;
587 if (hden == 0 && lden == 0)
588 overflow = 1, lden = 1;
590 /* calculate quotient sign and convert operands to unsigned. */
596 /* (minimum integer) / (-1) is the only overflow case. */
597 if (neg_double (lnum, hnum, &lnum, &hnum)
598 && ((HOST_WIDE_INT) lden & hden) == -1)
604 neg_double (lden, hden, &lden, &hden);
608 if (hnum == 0 && hden == 0)
609 { /* single precision */
611 /* This unsigned division rounds toward zero. */
617 { /* trivial case: dividend < divisor */
618 /* hden != 0 already checked. */
625 memset ((char *) quo, 0, sizeof quo);
627 memset ((char *) num, 0, sizeof num); /* to zero 9th element */
628 memset ((char *) den, 0, sizeof den);
630 encode (num, lnum, hnum);
631 encode (den, lden, hden);
633 /* Special code for when the divisor < BASE. */
634 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 /* hnum != 0 already checked. */
637 for (i = 4 - 1; i >= 0; i--)
639 work = num[i] + carry * BASE;
640 quo[i] = work / lden;
646 /* Full double precision division,
647 with thanks to Don Knuth's "Seminumerical Algorithms". */
648 int num_hi_sig, den_hi_sig;
649 unsigned HOST_WIDE_INT quo_est, scale;
651 /* Find the highest non-zero divisor digit. */
652 for (i = 4 - 1;; i--)
659 /* Insure that the first digit of the divisor is at least BASE/2.
660 This is required by the quotient digit estimation algorithm. */
662 scale = BASE / (den[den_hi_sig] + 1);
664 { /* scale divisor and dividend */
666 for (i = 0; i <= 4 - 1; i++)
668 work = (num[i] * scale) + carry;
669 num[i] = LOWPART (work);
670 carry = HIGHPART (work);
675 for (i = 0; i <= 4 - 1; i++)
677 work = (den[i] * scale) + carry;
678 den[i] = LOWPART (work);
679 carry = HIGHPART (work);
680 if (den[i] != 0) den_hi_sig = i;
687 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 /* Guess the next quotient digit, quo_est, by dividing the first
690 two remaining dividend digits by the high order quotient digit.
691 quo_est is never low and is at most 2 high. */
692 unsigned HOST_WIDE_INT tmp;
694 num_hi_sig = i + den_hi_sig + 1;
695 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
696 if (num[num_hi_sig] != den[den_hi_sig])
697 quo_est = work / den[den_hi_sig];
701 /* Refine quo_est so it's usually correct, and at most one high. */
702 tmp = work - quo_est * den[den_hi_sig];
704 && (den[den_hi_sig - 1] * quo_est
705 > (tmp * BASE + num[num_hi_sig - 2])))
708 /* Try QUO_EST as the quotient digit, by multiplying the
709 divisor by QUO_EST and subtracting from the remaining dividend.
710 Keep in mind that QUO_EST is the I - 1st digit. */
713 for (j = 0; j <= den_hi_sig; j++)
715 work = quo_est * den[j] + carry;
716 carry = HIGHPART (work);
717 work = num[i + j] - LOWPART (work);
718 num[i + j] = LOWPART (work);
719 carry += HIGHPART (work) != 0;
722 /* If quo_est was high by one, then num[i] went negative and
723 we need to correct things. */
724 if (num[num_hi_sig] < carry)
727 carry = 0; /* add divisor back in */
728 for (j = 0; j <= den_hi_sig; j++)
730 work = num[i + j] + den[j] + carry;
731 carry = HIGHPART (work);
732 num[i + j] = LOWPART (work);
735 num [num_hi_sig] += carry;
738 /* Store the quotient digit. */
743 decode (quo, lquo, hquo);
746 /* if result is negative, make it so. */
748 neg_double (*lquo, *hquo, lquo, hquo);
750 /* compute trial remainder: rem = num - (quo * den) */
751 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
752 neg_double (*lrem, *hrem, lrem, hrem);
753 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 case TRUNC_MOD_EXPR: /* round toward zero */
759 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 case FLOOR_MOD_EXPR: /* round toward negative infinity */
764 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
775 case CEIL_MOD_EXPR: /* round toward positive infinity */
776 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
786 case ROUND_MOD_EXPR: /* round to closest integer */
788 unsigned HOST_WIDE_INT labs_rem = *lrem;
789 HOST_WIDE_INT habs_rem = *hrem;
790 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
791 HOST_WIDE_INT habs_den = hden, htwice;
793 /* Get absolute values */
795 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 neg_double (lden, hden, &labs_den, &habs_den);
799 /* If (2 * abs (lrem) >= abs (lden)) */
800 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
801 labs_rem, habs_rem, <wice, &htwice);
803 if (((unsigned HOST_WIDE_INT) habs_den
804 < (unsigned HOST_WIDE_INT) htwice)
805 || (((unsigned HOST_WIDE_INT) habs_den
806 == (unsigned HOST_WIDE_INT) htwice)
807 && (labs_den < ltwice)))
811 add_double (*lquo, *hquo,
812 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827 /* compute true remainder: rem = num - (quo * den) */
828 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
829 neg_double (*lrem, *hrem, lrem, hrem);
830 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 /* Given T, an expression, return the negation of T. Allow for T to be
835 null, in which case return null. */
847 type = TREE_TYPE (t);
850 switch (TREE_CODE (t))
854 if (! TREE_UNSIGNED (type)
855 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
856 && ! TREE_OVERFLOW (tem))
861 return convert (type, TREE_OPERAND (t, 0));
864 /* - (A - B) -> B - A */
865 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
866 return convert (type,
867 fold (build (MINUS_EXPR, TREE_TYPE (t),
869 TREE_OPERAND (t, 0))));
876 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
879 /* Split a tree IN into a constant, literal and variable parts that could be
880 combined with CODE to make IN. "constant" means an expression with
881 TREE_CONSTANT but that isn't an actual constant. CODE must be a
882 commutative arithmetic operation. Store the constant part into *CONP,
883 the literal in &LITP and return the variable part. If a part isn't
884 present, set it to null. If the tree does not decompose in this way,
885 return the entire tree as the variable part and the other parts as null.
887 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
888 case, we negate an operand that was subtracted. If NEGATE_P is true, we
889 are negating all of IN.
891 If IN is itself a literal or constant, return it as appropriate.
893 Note that we do not guarantee that any of the three values will be the
894 same type as IN, but they will have the same signedness and mode. */
897 split_tree (in, code, conp, litp, negate_p)
908 /* Strip any conversions that don't change the machine mode or signedness. */
909 STRIP_SIGN_NOPS (in);
911 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
913 else if (TREE_CODE (in) == code
914 || (! FLOAT_TYPE_P (TREE_TYPE (in))
915 /* We can associate addition and subtraction together (even
916 though the C standard doesn't say so) for integers because
917 the value is not affected. For reals, the value might be
918 affected, so we can't. */
919 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
920 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
922 tree op0 = TREE_OPERAND (in, 0);
923 tree op1 = TREE_OPERAND (in, 1);
924 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
925 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
927 /* First see if either of the operands is a literal, then a constant. */
928 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
929 *litp = op0, op0 = 0;
930 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
931 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
933 if (op0 != 0 && TREE_CONSTANT (op0))
934 *conp = op0, op0 = 0;
935 else if (op1 != 0 && TREE_CONSTANT (op1))
936 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
938 /* If we haven't dealt with either operand, this is not a case we can
939 decompose. Otherwise, VAR is either of the ones remaining, if any. */
940 if (op0 != 0 && op1 != 0)
945 var = op1, neg_var_p = neg1_p;
947 /* Now do any needed negations. */
948 if (neg_litp_p) *litp = negate_expr (*litp);
949 if (neg_conp_p) *conp = negate_expr (*conp);
950 if (neg_var_p) var = negate_expr (var);
952 else if (TREE_CONSTANT (in))
959 var = negate_expr (var);
960 *conp = negate_expr (*conp);
961 *litp = negate_expr (*litp);
967 /* Re-associate trees split by the above function. T1 and T2 are either
968 expressions to associate or null. Return the new expression, if any. If
969 we build an operation, do it in TYPE and with CODE, except if CODE is a
970 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
971 have taken care of the negations. */
974 associate_trees (t1, t2, code, type)
984 if (code == MINUS_EXPR)
987 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
988 try to fold this since we will have infinite recursion. But do
989 deal with any NEGATE_EXPRs. */
990 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
991 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
993 if (TREE_CODE (t1) == NEGATE_EXPR)
994 return build (MINUS_EXPR, type, convert (type, t2),
995 convert (type, TREE_OPERAND (t1, 0)));
996 else if (TREE_CODE (t2) == NEGATE_EXPR)
997 return build (MINUS_EXPR, type, convert (type, t1),
998 convert (type, TREE_OPERAND (t2, 0)));
1000 return build (code, type, convert (type, t1), convert (type, t2));
1003 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1006 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1007 to produce a new constant.
1009 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1012 int_const_binop (code, arg1, arg2, notrunc)
1013 enum tree_code code;
1017 unsigned HOST_WIDE_INT int1l, int2l;
1018 HOST_WIDE_INT int1h, int2h;
1019 unsigned HOST_WIDE_INT low;
1021 unsigned HOST_WIDE_INT garbagel;
1022 HOST_WIDE_INT garbageh;
1024 tree type = TREE_TYPE (arg1);
1025 int uns = TREE_UNSIGNED (type);
1027 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1029 int no_overflow = 0;
1031 int1l = TREE_INT_CST_LOW (arg1);
1032 int1h = TREE_INT_CST_HIGH (arg1);
1033 int2l = TREE_INT_CST_LOW (arg2);
1034 int2h = TREE_INT_CST_HIGH (arg2);
1039 low = int1l | int2l, hi = int1h | int2h;
1043 low = int1l ^ int2l, hi = int1h ^ int2h;
1047 low = int1l & int2l, hi = int1h & int2h;
1050 case BIT_ANDTC_EXPR:
1051 low = int1l & ~int2l, hi = int1h & ~int2h;
1057 /* It's unclear from the C standard whether shifts can overflow.
1058 The following code ignores overflow; perhaps a C standard
1059 interpretation ruling is needed. */
1060 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1068 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1073 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1077 neg_double (int2l, int2h, &low, &hi);
1078 add_double (int1l, int1h, low, hi, &low, &hi);
1079 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1083 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1086 case TRUNC_DIV_EXPR:
1087 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1088 case EXACT_DIV_EXPR:
1089 /* This is a shortcut for a common special case. */
1090 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1091 && ! TREE_CONSTANT_OVERFLOW (arg1)
1092 && ! TREE_CONSTANT_OVERFLOW (arg2)
1093 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1095 if (code == CEIL_DIV_EXPR)
1098 low = int1l / int2l, hi = 0;
1102 /* ... fall through ... */
1104 case ROUND_DIV_EXPR:
1105 if (int2h == 0 && int2l == 1)
1107 low = int1l, hi = int1h;
1110 if (int1l == int2l && int1h == int2h
1111 && ! (int1l == 0 && int1h == 0))
1116 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1117 &low, &hi, &garbagel, &garbageh);
1120 case TRUNC_MOD_EXPR:
1121 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1122 /* This is a shortcut for a common special case. */
1123 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1124 && ! TREE_CONSTANT_OVERFLOW (arg1)
1125 && ! TREE_CONSTANT_OVERFLOW (arg2)
1126 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1128 if (code == CEIL_MOD_EXPR)
1130 low = int1l % int2l, hi = 0;
1134 /* ... fall through ... */
1136 case ROUND_MOD_EXPR:
1137 overflow = div_and_round_double (code, uns,
1138 int1l, int1h, int2l, int2h,
1139 &garbagel, &garbageh, &low, &hi);
1145 low = (((unsigned HOST_WIDE_INT) int1h
1146 < (unsigned HOST_WIDE_INT) int2h)
1147 || (((unsigned HOST_WIDE_INT) int1h
1148 == (unsigned HOST_WIDE_INT) int2h)
1151 low = (int1h < int2h
1152 || (int1h == int2h && int1l < int2l));
1154 if (low == (code == MIN_EXPR))
1155 low = int1l, hi = int1h;
1157 low = int2l, hi = int2h;
1164 /* If this is for a sizetype, can be represented as one (signed)
1165 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1168 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1169 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1170 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1171 return size_int_type_wide (low, type);
1174 t = build_int_2 (low, hi);
1175 TREE_TYPE (t) = TREE_TYPE (arg1);
1180 ? (!uns || is_sizetype) && overflow
1181 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1183 | TREE_OVERFLOW (arg1)
1184 | TREE_OVERFLOW (arg2));
1186 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1187 So check if force_fit_type truncated the value. */
1189 && ! TREE_OVERFLOW (t)
1190 && (TREE_INT_CST_HIGH (t) != hi
1191 || TREE_INT_CST_LOW (t) != low))
1192 TREE_OVERFLOW (t) = 1;
1194 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1195 | TREE_CONSTANT_OVERFLOW (arg1)
1196 | TREE_CONSTANT_OVERFLOW (arg2));
1200 /* Define input and output argument for const_binop_1. */
1203 enum tree_code code; /* Input: tree code for operation. */
1204 tree type; /* Input: tree type for operation. */
1205 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1206 tree t; /* Output: constant for result. */
1209 /* Do the real arithmetic for const_binop while protected by a
1210 float overflow handler. */
1213 const_binop_1 (data)
1216 struct cb_args *args = (struct cb_args *) data;
1217 REAL_VALUE_TYPE value;
1219 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1222 = build_real (args->type,
1223 real_value_truncate (TYPE_MODE (args->type), value));
1226 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1227 constant. We assume ARG1 and ARG2 have the same data type, or at least
1228 are the same kind of constant and the same machine mode.
1230 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1233 const_binop (code, arg1, arg2, notrunc)
1234 enum tree_code code;
1241 if (TREE_CODE (arg1) == INTEGER_CST)
1242 return int_const_binop (code, arg1, arg2, notrunc);
1244 if (TREE_CODE (arg1) == REAL_CST)
1250 struct cb_args args;
1252 d1 = TREE_REAL_CST (arg1);
1253 d2 = TREE_REAL_CST (arg2);
1255 /* If either operand is a NaN, just return it. Otherwise, set up
1256 for floating-point trap; we return an overflow. */
1257 if (REAL_VALUE_ISNAN (d1))
1259 else if (REAL_VALUE_ISNAN (d2))
1262 /* Setup input for const_binop_1() */
1263 args.type = TREE_TYPE (arg1);
1268 if (do_float_handler (const_binop_1, (PTR) &args))
1269 /* Receive output from const_binop_1. */
1273 /* We got an exception from const_binop_1. */
1274 t = copy_node (arg1);
1279 = (force_fit_type (t, overflow)
1280 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1281 TREE_CONSTANT_OVERFLOW (t)
1283 | TREE_CONSTANT_OVERFLOW (arg1)
1284 | TREE_CONSTANT_OVERFLOW (arg2);
1287 if (TREE_CODE (arg1) == COMPLEX_CST)
1289 tree type = TREE_TYPE (arg1);
1290 tree r1 = TREE_REALPART (arg1);
1291 tree i1 = TREE_IMAGPART (arg1);
1292 tree r2 = TREE_REALPART (arg2);
1293 tree i2 = TREE_IMAGPART (arg2);
1299 t = build_complex (type,
1300 const_binop (PLUS_EXPR, r1, r2, notrunc),
1301 const_binop (PLUS_EXPR, i1, i2, notrunc));
1305 t = build_complex (type,
1306 const_binop (MINUS_EXPR, r1, r2, notrunc),
1307 const_binop (MINUS_EXPR, i1, i2, notrunc));
1311 t = build_complex (type,
1312 const_binop (MINUS_EXPR,
1313 const_binop (MULT_EXPR,
1315 const_binop (MULT_EXPR,
1318 const_binop (PLUS_EXPR,
1319 const_binop (MULT_EXPR,
1321 const_binop (MULT_EXPR,
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r2, r2, notrunc),
1331 const_binop (MULT_EXPR, i2, i2, notrunc),
1334 t = build_complex (type,
1336 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1337 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1338 const_binop (PLUS_EXPR,
1339 const_binop (MULT_EXPR, r1, r2,
1341 const_binop (MULT_EXPR, i1, i2,
1344 magsquared, notrunc),
1346 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1347 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1348 const_binop (MINUS_EXPR,
1349 const_binop (MULT_EXPR, i1, r2,
1351 const_binop (MULT_EXPR, r1, i2,
1354 magsquared, notrunc));
1366 /* These are the hash table functions for the hash table of INTEGER_CST
1367 nodes of a sizetype. */
1369 /* Return the hash code code X, an INTEGER_CST. */
1377 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1378 ^ (hashval_t) ((long) TREE_TYPE (t) >> 3)
1379 ^ (TREE_OVERFLOW (t) << 20));
1382 /* Return non-zero if the value represented by *X (an INTEGER_CST tree node)
1383 is the same as that given by *Y, which is the same. */
1393 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1394 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1395 && TREE_TYPE (xt) == TREE_TYPE (yt)
1396 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1399 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1400 bits are given by NUMBER and of the sizetype represented by KIND. */
1403 size_int_wide (number, kind)
1404 HOST_WIDE_INT number;
1405 enum size_type_kind kind;
1407 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1410 /* Likewise, but the desired type is specified explicitly. */
1413 size_int_type_wide (number, type)
1414 HOST_WIDE_INT number;
1417 static htab_t size_htab = 0;
1418 static tree new_const = 0;
1423 size_htab = htab_create (1024, size_htab_hash, size_htab_eq, NULL);
1424 ggc_add_deletable_htab (size_htab, NULL, NULL);
1425 new_const = make_node (INTEGER_CST);
1426 ggc_add_tree_root (&new_const, 1);
1429 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1430 hash table, we return the value from the hash table. Otherwise, we
1431 place that in the hash table and make a new node for the next time. */
1432 TREE_INT_CST_LOW (new_const) = number;
1433 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1434 TREE_TYPE (new_const) = type;
1435 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1436 = force_fit_type (new_const, 0);
1438 slot = htab_find_slot (size_htab, new_const, INSERT);
1443 *slot = (PTR) new_const;
1444 new_const = make_node (INTEGER_CST);
1448 return (tree) *slot;
1451 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1452 is a tree code. The type of the result is taken from the operands.
1453 Both must be the same type integer type and it must be a size type.
1454 If the operands are constant, so is the result. */
1457 size_binop (code, arg0, arg1)
1458 enum tree_code code;
1461 tree type = TREE_TYPE (arg0);
1463 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1464 || type != TREE_TYPE (arg1))
1467 /* Handle the special case of two integer constants faster. */
1468 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1470 /* And some specific cases even faster than that. */
1471 if (code == PLUS_EXPR && integer_zerop (arg0))
1473 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1474 && integer_zerop (arg1))
1476 else if (code == MULT_EXPR && integer_onep (arg0))
1479 /* Handle general case of two integer constants. */
1480 return int_const_binop (code, arg0, arg1, 0);
1483 if (arg0 == error_mark_node || arg1 == error_mark_node)
1484 return error_mark_node;
1486 return fold (build (code, type, arg0, arg1));
1489 /* Given two values, either both of sizetype or both of bitsizetype,
1490 compute the difference between the two values. Return the value
1491 in signed type corresponding to the type of the operands. */
1494 size_diffop (arg0, arg1)
1497 tree type = TREE_TYPE (arg0);
1500 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1501 || type != TREE_TYPE (arg1))
1504 /* If the type is already signed, just do the simple thing. */
1505 if (! TREE_UNSIGNED (type))
1506 return size_binop (MINUS_EXPR, arg0, arg1);
1508 ctype = (type == bitsizetype || type == ubitsizetype
1509 ? sbitsizetype : ssizetype);
1511 /* If either operand is not a constant, do the conversions to the signed
1512 type and subtract. The hardware will do the right thing with any
1513 overflow in the subtraction. */
1514 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1515 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1516 convert (ctype, arg1));
1518 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1519 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1520 overflow) and negate (which can't either). Special-case a result
1521 of zero while we're here. */
1522 if (tree_int_cst_equal (arg0, arg1))
1523 return convert (ctype, integer_zero_node);
1524 else if (tree_int_cst_lt (arg1, arg0))
1525 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1527 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1528 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1531 /* This structure is used to communicate arguments to fold_convert_1. */
1534 tree arg1; /* Input: value to convert. */
1535 tree type; /* Input: type to convert value to. */
1536 tree t; /* Output: result of conversion. */
1539 /* Function to convert floating-point constants, protected by floating
1540 point exception handler. */
1543 fold_convert_1 (data)
1546 struct fc_args *args = (struct fc_args *) data;
1548 args->t = build_real (args->type,
1549 real_value_truncate (TYPE_MODE (args->type),
1550 TREE_REAL_CST (args->arg1)));
1553 /* Given T, a tree representing type conversion of ARG1, a constant,
1554 return a constant tree representing the result of conversion. */
1557 fold_convert (t, arg1)
1561 tree type = TREE_TYPE (t);
1564 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1566 if (TREE_CODE (arg1) == INTEGER_CST)
1568 /* If we would build a constant wider than GCC supports,
1569 leave the conversion unfolded. */
1570 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1573 /* If we are trying to make a sizetype for a small integer, use
1574 size_int to pick up cached types to reduce duplicate nodes. */
1575 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1576 && !TREE_CONSTANT_OVERFLOW (arg1)
1577 && compare_tree_int (arg1, 10000) < 0)
1578 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1580 /* Given an integer constant, make new constant with new type,
1581 appropriately sign-extended or truncated. */
1582 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1583 TREE_INT_CST_HIGH (arg1));
1584 TREE_TYPE (t) = type;
1585 /* Indicate an overflow if (1) ARG1 already overflowed,
1586 or (2) force_fit_type indicates an overflow.
1587 Tell force_fit_type that an overflow has already occurred
1588 if ARG1 is a too-large unsigned value and T is signed.
1589 But don't indicate an overflow if converting a pointer. */
1591 = ((force_fit_type (t,
1592 (TREE_INT_CST_HIGH (arg1) < 0
1593 && (TREE_UNSIGNED (type)
1594 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1595 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1596 || TREE_OVERFLOW (arg1));
1597 TREE_CONSTANT_OVERFLOW (t)
1598 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1600 else if (TREE_CODE (arg1) == REAL_CST)
1602 /* Don't initialize these, use assignments.
1603 Initialized local aggregates don't work on old compilers. */
1607 tree type1 = TREE_TYPE (arg1);
1610 x = TREE_REAL_CST (arg1);
1611 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1613 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1614 if (!no_upper_bound)
1615 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1617 /* See if X will be in range after truncation towards 0.
1618 To compensate for truncation, move the bounds away from 0,
1619 but reject if X exactly equals the adjusted bounds. */
1620 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1621 if (!no_upper_bound)
1622 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1623 /* If X is a NaN, use zero instead and show we have an overflow.
1624 Otherwise, range check. */
1625 if (REAL_VALUE_ISNAN (x))
1626 overflow = 1, x = dconst0;
1627 else if (! (REAL_VALUES_LESS (l, x)
1629 && REAL_VALUES_LESS (x, u)))
1633 HOST_WIDE_INT low, high;
1634 REAL_VALUE_TO_INT (&low, &high, x);
1635 t = build_int_2 (low, high);
1637 TREE_TYPE (t) = type;
1639 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1640 TREE_CONSTANT_OVERFLOW (t)
1641 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1643 TREE_TYPE (t) = type;
1645 else if (TREE_CODE (type) == REAL_TYPE)
1647 if (TREE_CODE (arg1) == INTEGER_CST)
1648 return build_real_from_int_cst (type, arg1);
1649 if (TREE_CODE (arg1) == REAL_CST)
1651 struct fc_args args;
1653 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1656 TREE_TYPE (arg1) = type;
1660 /* Setup input for fold_convert_1() */
1664 if (do_float_handler (fold_convert_1, (PTR) &args))
1666 /* Receive output from fold_convert_1() */
1671 /* We got an exception from fold_convert_1() */
1673 t = copy_node (arg1);
1677 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1678 TREE_CONSTANT_OVERFLOW (t)
1679 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1683 TREE_CONSTANT (t) = 1;
1687 /* Return an expr equal to X but certainly not valid as an lvalue. */
1695 /* These things are certainly not lvalues. */
1696 if (TREE_CODE (x) == NON_LVALUE_EXPR
1697 || TREE_CODE (x) == INTEGER_CST
1698 || TREE_CODE (x) == REAL_CST
1699 || TREE_CODE (x) == STRING_CST
1700 || TREE_CODE (x) == ADDR_EXPR)
1703 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1704 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1708 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1709 Zero means allow extended lvalues. */
1711 int pedantic_lvalues;
1713 /* When pedantic, return an expr equal to X but certainly not valid as a
1714 pedantic lvalue. Otherwise, return X. */
1717 pedantic_non_lvalue (x)
1720 if (pedantic_lvalues)
1721 return non_lvalue (x);
1726 /* Given a tree comparison code, return the code that is the logical inverse
1727 of the given code. It is not safe to do this for floating-point
1728 comparisons, except for NE_EXPR and EQ_EXPR. */
1730 static enum tree_code
1731 invert_tree_comparison (code)
1732 enum tree_code code;
1753 /* Similar, but return the comparison that results if the operands are
1754 swapped. This is safe for floating-point. */
1756 static enum tree_code
1757 swap_tree_comparison (code)
1758 enum tree_code code;
1778 /* Return nonzero if CODE is a tree code that represents a truth value. */
1781 truth_value_p (code)
1782 enum tree_code code;
1784 return (TREE_CODE_CLASS (code) == '<'
1785 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1786 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1787 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1790 /* Return nonzero if two operands are necessarily equal.
1791 If ONLY_CONST is non-zero, only return non-zero for constants.
1792 This function tests whether the operands are indistinguishable;
1793 it does not test whether they are equal using C's == operation.
1794 The distinction is important for IEEE floating point, because
1795 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1796 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1799 operand_equal_p (arg0, arg1, only_const)
1803 /* If both types don't have the same signedness, then we can't consider
1804 them equal. We must check this before the STRIP_NOPS calls
1805 because they may change the signedness of the arguments. */
1806 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1812 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1813 /* This is needed for conversions and for COMPONENT_REF.
1814 Might as well play it safe and always test this. */
1815 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1816 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1817 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1820 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1821 We don't care about side effects in that case because the SAVE_EXPR
1822 takes care of that for us. In all other cases, two expressions are
1823 equal if they have no side effects. If we have two identical
1824 expressions with side effects that should be treated the same due
1825 to the only side effects being identical SAVE_EXPR's, that will
1826 be detected in the recursive calls below. */
1827 if (arg0 == arg1 && ! only_const
1828 && (TREE_CODE (arg0) == SAVE_EXPR
1829 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1832 /* Next handle constant cases, those for which we can return 1 even
1833 if ONLY_CONST is set. */
1834 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1835 switch (TREE_CODE (arg0))
1838 return (! TREE_CONSTANT_OVERFLOW (arg0)
1839 && ! TREE_CONSTANT_OVERFLOW (arg1)
1840 && tree_int_cst_equal (arg0, arg1));
1843 return (! TREE_CONSTANT_OVERFLOW (arg0)
1844 && ! TREE_CONSTANT_OVERFLOW (arg1)
1845 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1846 TREE_REAL_CST (arg1)));
1852 if (TREE_CONSTANT_OVERFLOW (arg0)
1853 || TREE_CONSTANT_OVERFLOW (arg1))
1856 v1 = TREE_VECTOR_CST_ELTS (arg0);
1857 v2 = TREE_VECTOR_CST_ELTS (arg1);
1860 if (!operand_equal_p (v1, v2, only_const))
1862 v1 = TREE_CHAIN (v1);
1863 v2 = TREE_CHAIN (v2);
1870 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1872 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1876 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1877 && ! memcmp (TREE_STRING_POINTER (arg0),
1878 TREE_STRING_POINTER (arg1),
1879 TREE_STRING_LENGTH (arg0)));
1882 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1891 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1894 /* Two conversions are equal only if signedness and modes match. */
1895 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1896 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1897 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1900 return operand_equal_p (TREE_OPERAND (arg0, 0),
1901 TREE_OPERAND (arg1, 0), 0);
1905 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1906 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1910 /* For commutative ops, allow the other order. */
1911 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1912 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1913 || TREE_CODE (arg0) == BIT_IOR_EXPR
1914 || TREE_CODE (arg0) == BIT_XOR_EXPR
1915 || TREE_CODE (arg0) == BIT_AND_EXPR
1916 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1917 && operand_equal_p (TREE_OPERAND (arg0, 0),
1918 TREE_OPERAND (arg1, 1), 0)
1919 && operand_equal_p (TREE_OPERAND (arg0, 1),
1920 TREE_OPERAND (arg1, 0), 0));
1923 /* If either of the pointer (or reference) expressions we are dereferencing
1924 contain a side effect, these cannot be equal. */
1925 if (TREE_SIDE_EFFECTS (arg0)
1926 || TREE_SIDE_EFFECTS (arg1))
1929 switch (TREE_CODE (arg0))
1932 return operand_equal_p (TREE_OPERAND (arg0, 0),
1933 TREE_OPERAND (arg1, 0), 0);
1937 case ARRAY_RANGE_REF:
1938 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1939 TREE_OPERAND (arg1, 0), 0)
1940 && operand_equal_p (TREE_OPERAND (arg0, 1),
1941 TREE_OPERAND (arg1, 1), 0));
1944 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1945 TREE_OPERAND (arg1, 0), 0)
1946 && operand_equal_p (TREE_OPERAND (arg0, 1),
1947 TREE_OPERAND (arg1, 1), 0)
1948 && operand_equal_p (TREE_OPERAND (arg0, 2),
1949 TREE_OPERAND (arg1, 2), 0));
1955 if (TREE_CODE (arg0) == RTL_EXPR)
1956 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
1964 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1965 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1967 When in doubt, return 0. */
1970 operand_equal_for_comparison_p (arg0, arg1, other)
1974 int unsignedp1, unsignedpo;
1975 tree primarg0, primarg1, primother;
1976 unsigned int correct_width;
1978 if (operand_equal_p (arg0, arg1, 0))
1981 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1982 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1985 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1986 and see if the inner values are the same. This removes any
1987 signedness comparison, which doesn't matter here. */
1988 primarg0 = arg0, primarg1 = arg1;
1989 STRIP_NOPS (primarg0);
1990 STRIP_NOPS (primarg1);
1991 if (operand_equal_p (primarg0, primarg1, 0))
1994 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1995 actual comparison operand, ARG0.
1997 First throw away any conversions to wider types
1998 already present in the operands. */
2000 primarg1 = get_narrower (arg1, &unsignedp1);
2001 primother = get_narrower (other, &unsignedpo);
2003 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2004 if (unsignedp1 == unsignedpo
2005 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2006 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2008 tree type = TREE_TYPE (arg0);
2010 /* Make sure shorter operand is extended the right way
2011 to match the longer operand. */
2012 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2013 TREE_TYPE (primarg1)),
2016 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2023 /* See if ARG is an expression that is either a comparison or is performing
2024 arithmetic on comparisons. The comparisons must only be comparing
2025 two different values, which will be stored in *CVAL1 and *CVAL2; if
2026 they are non-zero it means that some operands have already been found.
2027 No variables may be used anywhere else in the expression except in the
2028 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2029 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2031 If this is true, return 1. Otherwise, return zero. */
2034 twoval_comparison_p (arg, cval1, cval2, save_p)
2036 tree *cval1, *cval2;
2039 enum tree_code code = TREE_CODE (arg);
2040 char class = TREE_CODE_CLASS (code);
2042 /* We can handle some of the 'e' cases here. */
2043 if (class == 'e' && code == TRUTH_NOT_EXPR)
2045 else if (class == 'e'
2046 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2047 || code == COMPOUND_EXPR))
2050 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2051 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2053 /* If we've already found a CVAL1 or CVAL2, this expression is
2054 two complex to handle. */
2055 if (*cval1 || *cval2)
2065 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2068 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2069 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2070 cval1, cval2, save_p));
2076 if (code == COND_EXPR)
2077 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2078 cval1, cval2, save_p)
2079 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2080 cval1, cval2, save_p)
2081 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2082 cval1, cval2, save_p));
2086 /* First see if we can handle the first operand, then the second. For
2087 the second operand, we know *CVAL1 can't be zero. It must be that
2088 one side of the comparison is each of the values; test for the
2089 case where this isn't true by failing if the two operands
2092 if (operand_equal_p (TREE_OPERAND (arg, 0),
2093 TREE_OPERAND (arg, 1), 0))
2097 *cval1 = TREE_OPERAND (arg, 0);
2098 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2100 else if (*cval2 == 0)
2101 *cval2 = TREE_OPERAND (arg, 0);
2102 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2107 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2109 else if (*cval2 == 0)
2110 *cval2 = TREE_OPERAND (arg, 1);
2111 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2123 /* ARG is a tree that is known to contain just arithmetic operations and
2124 comparisons. Evaluate the operations in the tree substituting NEW0 for
2125 any occurrence of OLD0 as an operand of a comparison and likewise for
2129 eval_subst (arg, old0, new0, old1, new1)
2131 tree old0, new0, old1, new1;
2133 tree type = TREE_TYPE (arg);
2134 enum tree_code code = TREE_CODE (arg);
2135 char class = TREE_CODE_CLASS (code);
2137 /* We can handle some of the 'e' cases here. */
2138 if (class == 'e' && code == TRUTH_NOT_EXPR)
2140 else if (class == 'e'
2141 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2147 return fold (build1 (code, type,
2148 eval_subst (TREE_OPERAND (arg, 0),
2149 old0, new0, old1, new1)));
2152 return fold (build (code, type,
2153 eval_subst (TREE_OPERAND (arg, 0),
2154 old0, new0, old1, new1),
2155 eval_subst (TREE_OPERAND (arg, 1),
2156 old0, new0, old1, new1)));
2162 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2165 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2168 return fold (build (code, type,
2169 eval_subst (TREE_OPERAND (arg, 0),
2170 old0, new0, old1, new1),
2171 eval_subst (TREE_OPERAND (arg, 1),
2172 old0, new0, old1, new1),
2173 eval_subst (TREE_OPERAND (arg, 2),
2174 old0, new0, old1, new1)));
2178 /* fall through - ??? */
2182 tree arg0 = TREE_OPERAND (arg, 0);
2183 tree arg1 = TREE_OPERAND (arg, 1);
2185 /* We need to check both for exact equality and tree equality. The
2186 former will be true if the operand has a side-effect. In that
2187 case, we know the operand occurred exactly once. */
2189 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2191 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2194 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2196 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2199 return fold (build (code, type, arg0, arg1));
2207 /* Return a tree for the case when the result of an expression is RESULT
2208 converted to TYPE and OMITTED was previously an operand of the expression
2209 but is now not needed (e.g., we folded OMITTED * 0).
2211 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2212 the conversion of RESULT to TYPE. */
2215 omit_one_operand (type, result, omitted)
2216 tree type, result, omitted;
2218 tree t = convert (type, result);
2220 if (TREE_SIDE_EFFECTS (omitted))
2221 return build (COMPOUND_EXPR, type, omitted, t);
2223 return non_lvalue (t);
2226 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2229 pedantic_omit_one_operand (type, result, omitted)
2230 tree type, result, omitted;
2232 tree t = convert (type, result);
2234 if (TREE_SIDE_EFFECTS (omitted))
2235 return build (COMPOUND_EXPR, type, omitted, t);
2237 return pedantic_non_lvalue (t);
2240 /* Return a simplified tree node for the truth-negation of ARG. This
2241 never alters ARG itself. We assume that ARG is an operation that
2242 returns a truth value (0 or 1). */
2245 invert_truthvalue (arg)
2248 tree type = TREE_TYPE (arg);
2249 enum tree_code code = TREE_CODE (arg);
2251 if (code == ERROR_MARK)
2254 /* If this is a comparison, we can simply invert it, except for
2255 floating-point non-equality comparisons, in which case we just
2256 enclose a TRUTH_NOT_EXPR around what we have. */
2258 if (TREE_CODE_CLASS (code) == '<')
2260 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2261 && !flag_unsafe_math_optimizations
2264 return build1 (TRUTH_NOT_EXPR, type, arg);
2266 return build (invert_tree_comparison (code), type,
2267 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2273 return convert (type, build_int_2 (integer_zerop (arg), 0));
2275 case TRUTH_AND_EXPR:
2276 return build (TRUTH_OR_EXPR, type,
2277 invert_truthvalue (TREE_OPERAND (arg, 0)),
2278 invert_truthvalue (TREE_OPERAND (arg, 1)));
2281 return build (TRUTH_AND_EXPR, type,
2282 invert_truthvalue (TREE_OPERAND (arg, 0)),
2283 invert_truthvalue (TREE_OPERAND (arg, 1)));
2285 case TRUTH_XOR_EXPR:
2286 /* Here we can invert either operand. We invert the first operand
2287 unless the second operand is a TRUTH_NOT_EXPR in which case our
2288 result is the XOR of the first operand with the inside of the
2289 negation of the second operand. */
2291 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2292 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2293 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2295 return build (TRUTH_XOR_EXPR, type,
2296 invert_truthvalue (TREE_OPERAND (arg, 0)),
2297 TREE_OPERAND (arg, 1));
2299 case TRUTH_ANDIF_EXPR:
2300 return build (TRUTH_ORIF_EXPR, type,
2301 invert_truthvalue (TREE_OPERAND (arg, 0)),
2302 invert_truthvalue (TREE_OPERAND (arg, 1)));
2304 case TRUTH_ORIF_EXPR:
2305 return build (TRUTH_ANDIF_EXPR, type,
2306 invert_truthvalue (TREE_OPERAND (arg, 0)),
2307 invert_truthvalue (TREE_OPERAND (arg, 1)));
2309 case TRUTH_NOT_EXPR:
2310 return TREE_OPERAND (arg, 0);
2313 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2314 invert_truthvalue (TREE_OPERAND (arg, 1)),
2315 invert_truthvalue (TREE_OPERAND (arg, 2)));
2318 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2319 invert_truthvalue (TREE_OPERAND (arg, 1)));
2321 case WITH_RECORD_EXPR:
2322 return build (WITH_RECORD_EXPR, type,
2323 invert_truthvalue (TREE_OPERAND (arg, 0)),
2324 TREE_OPERAND (arg, 1));
2326 case NON_LVALUE_EXPR:
2327 return invert_truthvalue (TREE_OPERAND (arg, 0));
2332 return build1 (TREE_CODE (arg), type,
2333 invert_truthvalue (TREE_OPERAND (arg, 0)));
2336 if (!integer_onep (TREE_OPERAND (arg, 1)))
2338 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2341 return build1 (TRUTH_NOT_EXPR, type, arg);
2343 case CLEANUP_POINT_EXPR:
2344 return build1 (CLEANUP_POINT_EXPR, type,
2345 invert_truthvalue (TREE_OPERAND (arg, 0)));
2350 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2352 return build1 (TRUTH_NOT_EXPR, type, arg);
2355 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2356 operands are another bit-wise operation with a common input. If so,
2357 distribute the bit operations to save an operation and possibly two if
2358 constants are involved. For example, convert
2359 (A | B) & (A | C) into A | (B & C)
2360 Further simplification will occur if B and C are constants.
2362 If this optimization cannot be done, 0 will be returned. */
2365 distribute_bit_expr (code, type, arg0, arg1)
2366 enum tree_code code;
2373 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2374 || TREE_CODE (arg0) == code
2375 || (TREE_CODE (arg0) != BIT_AND_EXPR
2376 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2379 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2381 common = TREE_OPERAND (arg0, 0);
2382 left = TREE_OPERAND (arg0, 1);
2383 right = TREE_OPERAND (arg1, 1);
2385 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2387 common = TREE_OPERAND (arg0, 0);
2388 left = TREE_OPERAND (arg0, 1);
2389 right = TREE_OPERAND (arg1, 0);
2391 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2393 common = TREE_OPERAND (arg0, 1);
2394 left = TREE_OPERAND (arg0, 0);
2395 right = TREE_OPERAND (arg1, 1);
2397 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2399 common = TREE_OPERAND (arg0, 1);
2400 left = TREE_OPERAND (arg0, 0);
2401 right = TREE_OPERAND (arg1, 0);
2406 return fold (build (TREE_CODE (arg0), type, common,
2407 fold (build (code, type, left, right))));
2410 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2411 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2414 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2417 int bitsize, bitpos;
2420 tree result = build (BIT_FIELD_REF, type, inner,
2421 size_int (bitsize), bitsize_int (bitpos));
2423 TREE_UNSIGNED (result) = unsignedp;
2428 /* Optimize a bit-field compare.
2430 There are two cases: First is a compare against a constant and the
2431 second is a comparison of two items where the fields are at the same
2432 bit position relative to the start of a chunk (byte, halfword, word)
2433 large enough to contain it. In these cases we can avoid the shift
2434 implicit in bitfield extractions.
2436 For constants, we emit a compare of the shifted constant with the
2437 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2438 compared. For two fields at the same position, we do the ANDs with the
2439 similar mask and compare the result of the ANDs.
2441 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2442 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2443 are the left and right operands of the comparison, respectively.
2445 If the optimization described above can be done, we return the resulting
2446 tree. Otherwise we return zero. */
2449 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2450 enum tree_code code;
2454 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2455 tree type = TREE_TYPE (lhs);
2456 tree signed_type, unsigned_type;
2457 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2458 enum machine_mode lmode, rmode, nmode;
2459 int lunsignedp, runsignedp;
2460 int lvolatilep = 0, rvolatilep = 0;
2461 tree linner, rinner = NULL_TREE;
2465 /* Get all the information about the extractions being done. If the bit size
2466 if the same as the size of the underlying object, we aren't doing an
2467 extraction at all and so can do nothing. We also don't want to
2468 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2469 then will no longer be able to replace it. */
2470 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2471 &lunsignedp, &lvolatilep);
2472 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2473 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2478 /* If this is not a constant, we can only do something if bit positions,
2479 sizes, and signedness are the same. */
2480 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2481 &runsignedp, &rvolatilep);
2483 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2484 || lunsignedp != runsignedp || offset != 0
2485 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2489 /* See if we can find a mode to refer to this field. We should be able to,
2490 but fail if we can't. */
2491 nmode = get_best_mode (lbitsize, lbitpos,
2492 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2493 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2494 TYPE_ALIGN (TREE_TYPE (rinner))),
2495 word_mode, lvolatilep || rvolatilep);
2496 if (nmode == VOIDmode)
2499 /* Set signed and unsigned types of the precision of this mode for the
2501 signed_type = type_for_mode (nmode, 0);
2502 unsigned_type = type_for_mode (nmode, 1);
2504 /* Compute the bit position and size for the new reference and our offset
2505 within it. If the new reference is the same size as the original, we
2506 won't optimize anything, so return zero. */
2507 nbitsize = GET_MODE_BITSIZE (nmode);
2508 nbitpos = lbitpos & ~ (nbitsize - 1);
2510 if (nbitsize == lbitsize)
2513 if (BYTES_BIG_ENDIAN)
2514 lbitpos = nbitsize - lbitsize - lbitpos;
2516 /* Make the mask to be used against the extracted field. */
2517 mask = build_int_2 (~0, ~0);
2518 TREE_TYPE (mask) = unsigned_type;
2519 force_fit_type (mask, 0);
2520 mask = convert (unsigned_type, mask);
2521 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2522 mask = const_binop (RSHIFT_EXPR, mask,
2523 size_int (nbitsize - lbitsize - lbitpos), 0);
2526 /* If not comparing with constant, just rework the comparison
2528 return build (code, compare_type,
2529 build (BIT_AND_EXPR, unsigned_type,
2530 make_bit_field_ref (linner, unsigned_type,
2531 nbitsize, nbitpos, 1),
2533 build (BIT_AND_EXPR, unsigned_type,
2534 make_bit_field_ref (rinner, unsigned_type,
2535 nbitsize, nbitpos, 1),
2538 /* Otherwise, we are handling the constant case. See if the constant is too
2539 big for the field. Warn and return a tree of for 0 (false) if so. We do
2540 this not only for its own sake, but to avoid having to test for this
2541 error case below. If we didn't, we might generate wrong code.
2543 For unsigned fields, the constant shifted right by the field length should
2544 be all zero. For signed fields, the high-order bits should agree with
2549 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2550 convert (unsigned_type, rhs),
2551 size_int (lbitsize), 0)))
2553 warning ("comparison is always %d due to width of bit-field",
2555 return convert (compare_type,
2557 ? integer_one_node : integer_zero_node));
2562 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2563 size_int (lbitsize - 1), 0);
2564 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2566 warning ("comparison is always %d due to width of bit-field",
2568 return convert (compare_type,
2570 ? integer_one_node : integer_zero_node));
2574 /* Single-bit compares should always be against zero. */
2575 if (lbitsize == 1 && ! integer_zerop (rhs))
2577 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2578 rhs = convert (type, integer_zero_node);
2581 /* Make a new bitfield reference, shift the constant over the
2582 appropriate number of bits and mask it with the computed mask
2583 (in case this was a signed field). If we changed it, make a new one. */
2584 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2587 TREE_SIDE_EFFECTS (lhs) = 1;
2588 TREE_THIS_VOLATILE (lhs) = 1;
2591 rhs = fold (const_binop (BIT_AND_EXPR,
2592 const_binop (LSHIFT_EXPR,
2593 convert (unsigned_type, rhs),
2594 size_int (lbitpos), 0),
2597 return build (code, compare_type,
2598 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2602 /* Subroutine for fold_truthop: decode a field reference.
2604 If EXP is a comparison reference, we return the innermost reference.
2606 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2607 set to the starting bit number.
2609 If the innermost field can be completely contained in a mode-sized
2610 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2612 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2613 otherwise it is not changed.
2615 *PUNSIGNEDP is set to the signedness of the field.
2617 *PMASK is set to the mask used. This is either contained in a
2618 BIT_AND_EXPR or derived from the width of the field.
2620 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2622 Return 0 if this is not a component reference or is one that we can't
2623 do anything with. */
2626 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2627 pvolatilep, pmask, pand_mask)
2629 HOST_WIDE_INT *pbitsize, *pbitpos;
2630 enum machine_mode *pmode;
2631 int *punsignedp, *pvolatilep;
2636 tree mask, inner, offset;
2638 unsigned int precision;
2640 /* All the optimizations using this function assume integer fields.
2641 There are problems with FP fields since the type_for_size call
2642 below can fail for, e.g., XFmode. */
2643 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2648 if (TREE_CODE (exp) == BIT_AND_EXPR)
2650 and_mask = TREE_OPERAND (exp, 1);
2651 exp = TREE_OPERAND (exp, 0);
2652 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2653 if (TREE_CODE (and_mask) != INTEGER_CST)
2657 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2658 punsignedp, pvolatilep);
2659 if ((inner == exp && and_mask == 0)
2660 || *pbitsize < 0 || offset != 0
2661 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2664 /* Compute the mask to access the bitfield. */
2665 unsigned_type = type_for_size (*pbitsize, 1);
2666 precision = TYPE_PRECISION (unsigned_type);
2668 mask = build_int_2 (~0, ~0);
2669 TREE_TYPE (mask) = unsigned_type;
2670 force_fit_type (mask, 0);
2671 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2672 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2674 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2676 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2677 convert (unsigned_type, and_mask), mask));
2680 *pand_mask = and_mask;
2684 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2688 all_ones_mask_p (mask, size)
2692 tree type = TREE_TYPE (mask);
2693 unsigned int precision = TYPE_PRECISION (type);
2696 tmask = build_int_2 (~0, ~0);
2697 TREE_TYPE (tmask) = signed_type (type);
2698 force_fit_type (tmask, 0);
2700 tree_int_cst_equal (mask,
2701 const_binop (RSHIFT_EXPR,
2702 const_binop (LSHIFT_EXPR, tmask,
2703 size_int (precision - size),
2705 size_int (precision - size), 0));
2708 /* Subroutine for fold_truthop: determine if an operand is simple enough
2709 to be evaluated unconditionally. */
2712 simple_operand_p (exp)
2715 /* Strip any conversions that don't change the machine mode. */
2716 while ((TREE_CODE (exp) == NOP_EXPR
2717 || TREE_CODE (exp) == CONVERT_EXPR)
2718 && (TYPE_MODE (TREE_TYPE (exp))
2719 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2720 exp = TREE_OPERAND (exp, 0);
2722 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2724 && ! TREE_ADDRESSABLE (exp)
2725 && ! TREE_THIS_VOLATILE (exp)
2726 && ! DECL_NONLOCAL (exp)
2727 /* Don't regard global variables as simple. They may be
2728 allocated in ways unknown to the compiler (shared memory,
2729 #pragma weak, etc). */
2730 && ! TREE_PUBLIC (exp)
2731 && ! DECL_EXTERNAL (exp)
2732 /* Loading a static variable is unduly expensive, but global
2733 registers aren't expensive. */
2734 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2737 /* The following functions are subroutines to fold_range_test and allow it to
2738 try to change a logical combination of comparisons into a range test.
2741 X == 2 || X == 3 || X == 4 || X == 5
2745 (unsigned) (X - 2) <= 3
2747 We describe each set of comparisons as being either inside or outside
2748 a range, using a variable named like IN_P, and then describe the
2749 range with a lower and upper bound. If one of the bounds is omitted,
2750 it represents either the highest or lowest value of the type.
2752 In the comments below, we represent a range by two numbers in brackets
2753 preceded by a "+" to designate being inside that range, or a "-" to
2754 designate being outside that range, so the condition can be inverted by
2755 flipping the prefix. An omitted bound is represented by a "-". For
2756 example, "- [-, 10]" means being outside the range starting at the lowest
2757 possible value and ending at 10, in other words, being greater than 10.
2758 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2761 We set up things so that the missing bounds are handled in a consistent
2762 manner so neither a missing bound nor "true" and "false" need to be
2763 handled using a special case. */
2765 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2766 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2767 and UPPER1_P are nonzero if the respective argument is an upper bound
2768 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2769 must be specified for a comparison. ARG1 will be converted to ARG0's
2770 type if both are specified. */
2773 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2774 enum tree_code code;
2777 int upper0_p, upper1_p;
2783 /* If neither arg represents infinity, do the normal operation.
2784 Else, if not a comparison, return infinity. Else handle the special
2785 comparison rules. Note that most of the cases below won't occur, but
2786 are handled for consistency. */
2788 if (arg0 != 0 && arg1 != 0)
2790 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2791 arg0, convert (TREE_TYPE (arg0), arg1)));
2793 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2796 if (TREE_CODE_CLASS (code) != '<')
2799 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2800 for neither. In real maths, we cannot assume open ended ranges are
2801 the same. But, this is computer arithmetic, where numbers are finite.
2802 We can therefore make the transformation of any unbounded range with
2803 the value Z, Z being greater than any representable number. This permits
2804 us to treat unbounded ranges as equal. */
2805 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2806 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2810 result = sgn0 == sgn1;
2813 result = sgn0 != sgn1;
2816 result = sgn0 < sgn1;
2819 result = sgn0 <= sgn1;
2822 result = sgn0 > sgn1;
2825 result = sgn0 >= sgn1;
2831 return convert (type, result ? integer_one_node : integer_zero_node);
2834 /* Given EXP, a logical expression, set the range it is testing into
2835 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2836 actually being tested. *PLOW and *PHIGH will be made of the same type
2837 as the returned expression. If EXP is not a comparison, we will most
2838 likely not be returning a useful value and range. */
2841 make_range (exp, pin_p, plow, phigh)
2846 enum tree_code code;
2847 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2848 tree orig_type = NULL_TREE;
2850 tree low, high, n_low, n_high;
2852 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2853 and see if we can refine the range. Some of the cases below may not
2854 happen, but it doesn't seem worth worrying about this. We "continue"
2855 the outer loop when we've changed something; otherwise we "break"
2856 the switch, which will "break" the while. */
2858 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2862 code = TREE_CODE (exp);
2864 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
2866 arg0 = TREE_OPERAND (exp, 0);
2867 if (TREE_CODE_CLASS (code) == '<'
2868 || TREE_CODE_CLASS (code) == '1'
2869 || TREE_CODE_CLASS (code) == '2')
2870 type = TREE_TYPE (arg0);
2871 if (TREE_CODE_CLASS (code) == '2'
2872 || TREE_CODE_CLASS (code) == '<'
2873 || (TREE_CODE_CLASS (code) == 'e'
2874 && TREE_CODE_LENGTH (code) > 1))
2875 arg1 = TREE_OPERAND (exp, 1);
2878 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
2879 lose a cast by accident. */
2880 if (type != NULL_TREE && orig_type == NULL_TREE)
2885 case TRUTH_NOT_EXPR:
2886 in_p = ! in_p, exp = arg0;
2889 case EQ_EXPR: case NE_EXPR:
2890 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2891 /* We can only do something if the range is testing for zero
2892 and if the second operand is an integer constant. Note that
2893 saying something is "in" the range we make is done by
2894 complementing IN_P since it will set in the initial case of
2895 being not equal to zero; "out" is leaving it alone. */
2896 if (low == 0 || high == 0
2897 || ! integer_zerop (low) || ! integer_zerop (high)
2898 || TREE_CODE (arg1) != INTEGER_CST)
2903 case NE_EXPR: /* - [c, c] */
2906 case EQ_EXPR: /* + [c, c] */
2907 in_p = ! in_p, low = high = arg1;
2909 case GT_EXPR: /* - [-, c] */
2910 low = 0, high = arg1;
2912 case GE_EXPR: /* + [c, -] */
2913 in_p = ! in_p, low = arg1, high = 0;
2915 case LT_EXPR: /* - [c, -] */
2916 low = arg1, high = 0;
2918 case LE_EXPR: /* + [-, c] */
2919 in_p = ! in_p, low = 0, high = arg1;
2927 /* If this is an unsigned comparison, we also know that EXP is
2928 greater than or equal to zero. We base the range tests we make
2929 on that fact, so we record it here so we can parse existing
2931 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2933 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2934 1, convert (type, integer_zero_node),
2938 in_p = n_in_p, low = n_low, high = n_high;
2940 /* If the high bound is missing, but we
2941 have a low bound, reverse the range so
2942 it goes from zero to the low bound minus 1. */
2943 if (high == 0 && low)
2946 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2947 integer_one_node, 0);
2948 low = convert (type, integer_zero_node);
2954 /* (-x) IN [a,b] -> x in [-b, -a] */
2955 n_low = range_binop (MINUS_EXPR, type,
2956 convert (type, integer_zero_node), 0, high, 1);
2957 n_high = range_binop (MINUS_EXPR, type,
2958 convert (type, integer_zero_node), 0, low, 0);
2959 low = n_low, high = n_high;
2965 exp = build (MINUS_EXPR, type, negate_expr (arg0),
2966 convert (type, integer_one_node));
2969 case PLUS_EXPR: case MINUS_EXPR:
2970 if (TREE_CODE (arg1) != INTEGER_CST)
2973 /* If EXP is signed, any overflow in the computation is undefined,
2974 so we don't worry about it so long as our computations on
2975 the bounds don't overflow. For unsigned, overflow is defined
2976 and this is exactly the right thing. */
2977 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2978 type, low, 0, arg1, 0);
2979 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2980 type, high, 1, arg1, 0);
2981 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2982 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2985 /* Check for an unsigned range which has wrapped around the maximum
2986 value thus making n_high < n_low, and normalize it. */
2987 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2989 low = range_binop (PLUS_EXPR, type, n_high, 0,
2990 integer_one_node, 0);
2991 high = range_binop (MINUS_EXPR, type, n_low, 0,
2992 integer_one_node, 0);
2994 /* If the range is of the form +/- [ x+1, x ], we won't
2995 be able to normalize it. But then, it represents the
2996 whole range or the empty set, so make it
2998 if (tree_int_cst_equal (n_low, low)
2999 && tree_int_cst_equal (n_high, high))
3005 low = n_low, high = n_high;
3010 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3011 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3014 if (! INTEGRAL_TYPE_P (type)
3015 || (low != 0 && ! int_fits_type_p (low, type))
3016 || (high != 0 && ! int_fits_type_p (high, type)))
3019 n_low = low, n_high = high;
3022 n_low = convert (type, n_low);
3025 n_high = convert (type, n_high);
3027 /* If we're converting from an unsigned to a signed type,
3028 we will be doing the comparison as unsigned. The tests above
3029 have already verified that LOW and HIGH are both positive.
3031 So we have to make sure that the original unsigned value will
3032 be interpreted as positive. */
3033 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3035 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3038 /* A range without an upper bound is, naturally, unbounded.
3039 Since convert would have cropped a very large value, use
3040 the max value for the destination type. */
3042 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3043 : TYPE_MAX_VALUE (type);
3045 high_positive = fold (build (RSHIFT_EXPR, type,
3046 convert (type, high_positive),
3047 convert (type, integer_one_node)));
3049 /* If the low bound is specified, "and" the range with the
3050 range for which the original unsigned value will be
3054 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3056 1, convert (type, integer_zero_node),
3060 in_p = (n_in_p == in_p);
3064 /* Otherwise, "or" the range with the range of the input
3065 that will be interpreted as negative. */
3066 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3068 1, convert (type, integer_zero_node),
3072 in_p = (in_p != n_in_p);
3077 low = n_low, high = n_high;
3087 /* If EXP is a constant, we can evaluate whether this is true or false. */
3088 if (TREE_CODE (exp) == INTEGER_CST)
3090 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3092 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3098 *pin_p = in_p, *plow = low, *phigh = high;
3102 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3103 type, TYPE, return an expression to test if EXP is in (or out of, depending
3104 on IN_P) the range. */
3107 build_range_check (type, exp, in_p, low, high)
3113 tree etype = TREE_TYPE (exp);
3117 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3118 return invert_truthvalue (value);
3120 else if (low == 0 && high == 0)
3121 return convert (type, integer_one_node);
3124 return fold (build (LE_EXPR, type, exp, high));
3127 return fold (build (GE_EXPR, type, exp, low));
3129 else if (operand_equal_p (low, high, 0))
3130 return fold (build (EQ_EXPR, type, exp, low));
3132 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3133 return build_range_check (type, exp, 1, 0, high);
3135 else if (integer_zerop (low))
3137 utype = unsigned_type (etype);
3138 return build_range_check (type, convert (utype, exp), 1, 0,
3139 convert (utype, high));
3142 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3143 && ! TREE_OVERFLOW (value))
3144 return build_range_check (type,
3145 fold (build (MINUS_EXPR, etype, exp, low)),
3146 1, convert (etype, integer_zero_node), value);
3151 /* Given two ranges, see if we can merge them into one. Return 1 if we
3152 can, 0 if we can't. Set the output range into the specified parameters. */
3155 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3159 tree low0, high0, low1, high1;
3167 int lowequal = ((low0 == 0 && low1 == 0)
3168 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3169 low0, 0, low1, 0)));
3170 int highequal = ((high0 == 0 && high1 == 0)
3171 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3172 high0, 1, high1, 1)));
3174 /* Make range 0 be the range that starts first, or ends last if they
3175 start at the same value. Swap them if it isn't. */
3176 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3179 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3180 high1, 1, high0, 1))))
3182 temp = in0_p, in0_p = in1_p, in1_p = temp;
3183 tem = low0, low0 = low1, low1 = tem;
3184 tem = high0, high0 = high1, high1 = tem;
3187 /* Now flag two cases, whether the ranges are disjoint or whether the
3188 second range is totally subsumed in the first. Note that the tests
3189 below are simplified by the ones above. */
3190 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3191 high0, 1, low1, 0));
3192 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3193 high1, 1, high0, 1));
3195 /* We now have four cases, depending on whether we are including or
3196 excluding the two ranges. */
3199 /* If they don't overlap, the result is false. If the second range
3200 is a subset it is the result. Otherwise, the range is from the start
3201 of the second to the end of the first. */
3203 in_p = 0, low = high = 0;
3205 in_p = 1, low = low1, high = high1;
3207 in_p = 1, low = low1, high = high0;
3210 else if (in0_p && ! in1_p)
3212 /* If they don't overlap, the result is the first range. If they are
3213 equal, the result is false. If the second range is a subset of the
3214 first, and the ranges begin at the same place, we go from just after
3215 the end of the first range to the end of the second. If the second
3216 range is not a subset of the first, or if it is a subset and both
3217 ranges end at the same place, the range starts at the start of the
3218 first range and ends just before the second range.
3219 Otherwise, we can't describe this as a single range. */
3221 in_p = 1, low = low0, high = high0;
3222 else if (lowequal && highequal)
3223 in_p = 0, low = high = 0;
3224 else if (subset && lowequal)
3226 in_p = 1, high = high0;
3227 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3228 integer_one_node, 0);
3230 else if (! subset || highequal)
3232 in_p = 1, low = low0;
3233 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3234 integer_one_node, 0);
3240 else if (! in0_p && in1_p)
3242 /* If they don't overlap, the result is the second range. If the second
3243 is a subset of the first, the result is false. Otherwise,
3244 the range starts just after the first range and ends at the
3245 end of the second. */
3247 in_p = 1, low = low1, high = high1;
3248 else if (subset || highequal)
3249 in_p = 0, low = high = 0;
3252 in_p = 1, high = high1;
3253 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3254 integer_one_node, 0);
3260 /* The case where we are excluding both ranges. Here the complex case
3261 is if they don't overlap. In that case, the only time we have a
3262 range is if they are adjacent. If the second is a subset of the
3263 first, the result is the first. Otherwise, the range to exclude
3264 starts at the beginning of the first range and ends at the end of the
3268 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3269 range_binop (PLUS_EXPR, NULL_TREE,
3271 integer_one_node, 1),
3273 in_p = 0, low = low0, high = high1;
3278 in_p = 0, low = low0, high = high0;
3280 in_p = 0, low = low0, high = high1;
3283 *pin_p = in_p, *plow = low, *phigh = high;
3287 /* EXP is some logical combination of boolean tests. See if we can
3288 merge it into some range test. Return the new tree if so. */
3291 fold_range_test (exp)
3294 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3295 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3296 int in0_p, in1_p, in_p;
3297 tree low0, low1, low, high0, high1, high;
3298 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3299 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3302 /* If this is an OR operation, invert both sides; we will invert
3303 again at the end. */
3305 in0_p = ! in0_p, in1_p = ! in1_p;
3307 /* If both expressions are the same, if we can merge the ranges, and we
3308 can build the range test, return it or it inverted. If one of the
3309 ranges is always true or always false, consider it to be the same
3310 expression as the other. */
3311 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3312 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3314 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3316 : rhs != 0 ? rhs : integer_zero_node,
3318 return or_op ? invert_truthvalue (tem) : tem;
3320 /* On machines where the branch cost is expensive, if this is a
3321 short-circuited branch and the underlying object on both sides
3322 is the same, make a non-short-circuit operation. */
3323 else if (BRANCH_COST >= 2
3324 && lhs != 0 && rhs != 0
3325 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3326 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3327 && operand_equal_p (lhs, rhs, 0))
3329 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3330 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3331 which cases we can't do this. */
3332 if (simple_operand_p (lhs))
3333 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3334 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3335 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3336 TREE_OPERAND (exp, 1));
3338 else if (global_bindings_p () == 0
3339 && ! contains_placeholder_p (lhs))
3341 tree common = save_expr (lhs);
3343 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3344 or_op ? ! in0_p : in0_p,
3346 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3347 or_op ? ! in1_p : in1_p,
3349 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3350 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3351 TREE_TYPE (exp), lhs, rhs);
3358 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3359 bit value. Arrange things so the extra bits will be set to zero if and
3360 only if C is signed-extended to its full width. If MASK is nonzero,
3361 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3364 unextend (c, p, unsignedp, mask)
3370 tree type = TREE_TYPE (c);
3371 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3374 if (p == modesize || unsignedp)
3377 /* We work by getting just the sign bit into the low-order bit, then
3378 into the high-order bit, then sign-extend. We then XOR that value
3380 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3381 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3383 /* We must use a signed type in order to get an arithmetic right shift.
3384 However, we must also avoid introducing accidental overflows, so that
3385 a subsequent call to integer_zerop will work. Hence we must
3386 do the type conversion here. At this point, the constant is either
3387 zero or one, and the conversion to a signed type can never overflow.
3388 We could get an overflow if this conversion is done anywhere else. */
3389 if (TREE_UNSIGNED (type))
3390 temp = convert (signed_type (type), temp);
3392 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3393 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3395 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3396 /* If necessary, convert the type back to match the type of C. */
3397 if (TREE_UNSIGNED (type))
3398 temp = convert (type, temp);
3400 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3403 /* Find ways of folding logical expressions of LHS and RHS:
3404 Try to merge two comparisons to the same innermost item.
3405 Look for range tests like "ch >= '0' && ch <= '9'".
3406 Look for combinations of simple terms on machines with expensive branches
3407 and evaluate the RHS unconditionally.
3409 For example, if we have p->a == 2 && p->b == 4 and we can make an
3410 object large enough to span both A and B, we can do this with a comparison
3411 against the object ANDed with the a mask.
3413 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3414 operations to do this with one comparison.
3416 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3417 function and the one above.
3419 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3420 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3422 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3425 We return the simplified tree or 0 if no optimization is possible. */
3428 fold_truthop (code, truth_type, lhs, rhs)
3429 enum tree_code code;
3430 tree truth_type, lhs, rhs;
3432 /* If this is the "or" of two comparisons, we can do something if
3433 the comparisons are NE_EXPR. If this is the "and", we can do something
3434 if the comparisons are EQ_EXPR. I.e.,
3435 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3437 WANTED_CODE is this operation code. For single bit fields, we can
3438 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3439 comparison for one-bit fields. */
3441 enum tree_code wanted_code;
3442 enum tree_code lcode, rcode;
3443 tree ll_arg, lr_arg, rl_arg, rr_arg;
3444 tree ll_inner, lr_inner, rl_inner, rr_inner;
3445 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3446 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3447 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3448 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3449 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3450 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3451 enum machine_mode lnmode, rnmode;
3452 tree ll_mask, lr_mask, rl_mask, rr_mask;
3453 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3454 tree l_const, r_const;
3455 tree lntype, rntype, result;
3456 int first_bit, end_bit;
3459 /* Start by getting the comparison codes. Fail if anything is volatile.
3460 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3461 it were surrounded with a NE_EXPR. */
3463 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3466 lcode = TREE_CODE (lhs);
3467 rcode = TREE_CODE (rhs);
3469 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3470 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3472 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3473 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3475 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3478 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3479 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3481 ll_arg = TREE_OPERAND (lhs, 0);
3482 lr_arg = TREE_OPERAND (lhs, 1);
3483 rl_arg = TREE_OPERAND (rhs, 0);
3484 rr_arg = TREE_OPERAND (rhs, 1);
3486 /* If the RHS can be evaluated unconditionally and its operands are
3487 simple, it wins to evaluate the RHS unconditionally on machines
3488 with expensive branches. In this case, this isn't a comparison
3489 that can be merged. Avoid doing this if the RHS is a floating-point
3490 comparison since those can trap. */
3492 if (BRANCH_COST >= 2
3493 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3494 && simple_operand_p (rl_arg)
3495 && simple_operand_p (rr_arg))
3496 return build (code, truth_type, lhs, rhs);
3498 /* See if the comparisons can be merged. Then get all the parameters for
3501 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3502 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3506 ll_inner = decode_field_reference (ll_arg,
3507 &ll_bitsize, &ll_bitpos, &ll_mode,
3508 &ll_unsignedp, &volatilep, &ll_mask,
3510 lr_inner = decode_field_reference (lr_arg,
3511 &lr_bitsize, &lr_bitpos, &lr_mode,
3512 &lr_unsignedp, &volatilep, &lr_mask,
3514 rl_inner = decode_field_reference (rl_arg,
3515 &rl_bitsize, &rl_bitpos, &rl_mode,
3516 &rl_unsignedp, &volatilep, &rl_mask,
3518 rr_inner = decode_field_reference (rr_arg,
3519 &rr_bitsize, &rr_bitpos, &rr_mode,
3520 &rr_unsignedp, &volatilep, &rr_mask,
3523 /* It must be true that the inner operation on the lhs of each
3524 comparison must be the same if we are to be able to do anything.
3525 Then see if we have constants. If not, the same must be true for
3527 if (volatilep || ll_inner == 0 || rl_inner == 0
3528 || ! operand_equal_p (ll_inner, rl_inner, 0))
3531 if (TREE_CODE (lr_arg) == INTEGER_CST
3532 && TREE_CODE (rr_arg) == INTEGER_CST)
3533 l_const = lr_arg, r_const = rr_arg;
3534 else if (lr_inner == 0 || rr_inner == 0
3535 || ! operand_equal_p (lr_inner, rr_inner, 0))
3538 l_const = r_const = 0;
3540 /* If either comparison code is not correct for our logical operation,
3541 fail. However, we can convert a one-bit comparison against zero into
3542 the opposite comparison against that bit being set in the field. */
3544 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3545 if (lcode != wanted_code)
3547 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3549 /* Make the left operand unsigned, since we are only interested
3550 in the value of one bit. Otherwise we are doing the wrong
3559 /* This is analogous to the code for l_const above. */
3560 if (rcode != wanted_code)
3562 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3571 /* See if we can find a mode that contains both fields being compared on
3572 the left. If we can't, fail. Otherwise, update all constants and masks
3573 to be relative to a field of that size. */
3574 first_bit = MIN (ll_bitpos, rl_bitpos);
3575 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3576 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3577 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3579 if (lnmode == VOIDmode)
3582 lnbitsize = GET_MODE_BITSIZE (lnmode);
3583 lnbitpos = first_bit & ~ (lnbitsize - 1);
3584 lntype = type_for_size (lnbitsize, 1);
3585 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3587 if (BYTES_BIG_ENDIAN)
3589 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3590 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3593 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3594 size_int (xll_bitpos), 0);
3595 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3596 size_int (xrl_bitpos), 0);
3600 l_const = convert (lntype, l_const);
3601 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3602 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3603 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3604 fold (build1 (BIT_NOT_EXPR,
3608 warning ("comparison is always %d", wanted_code == NE_EXPR);
3610 return convert (truth_type,
3611 wanted_code == NE_EXPR
3612 ? integer_one_node : integer_zero_node);
3617 r_const = convert (lntype, r_const);
3618 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3619 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3620 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3621 fold (build1 (BIT_NOT_EXPR,
3625 warning ("comparison is always %d", wanted_code == NE_EXPR);
3627 return convert (truth_type,
3628 wanted_code == NE_EXPR
3629 ? integer_one_node : integer_zero_node);
3633 /* If the right sides are not constant, do the same for it. Also,
3634 disallow this optimization if a size or signedness mismatch occurs
3635 between the left and right sides. */
3638 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3639 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3640 /* Make sure the two fields on the right
3641 correspond to the left without being swapped. */
3642 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3645 first_bit = MIN (lr_bitpos, rr_bitpos);
3646 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3647 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3648 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3650 if (rnmode == VOIDmode)
3653 rnbitsize = GET_MODE_BITSIZE (rnmode);
3654 rnbitpos = first_bit & ~ (rnbitsize - 1);
3655 rntype = type_for_size (rnbitsize, 1);
3656 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3658 if (BYTES_BIG_ENDIAN)
3660 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3661 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3664 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3665 size_int (xlr_bitpos), 0);
3666 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3667 size_int (xrr_bitpos), 0);
3669 /* Make a mask that corresponds to both fields being compared.
3670 Do this for both items being compared. If the operands are the
3671 same size and the bits being compared are in the same position
3672 then we can do this by masking both and comparing the masked
3674 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3675 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3676 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3678 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3679 ll_unsignedp || rl_unsignedp);
3680 if (! all_ones_mask_p (ll_mask, lnbitsize))
3681 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3683 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3684 lr_unsignedp || rr_unsignedp);
3685 if (! all_ones_mask_p (lr_mask, rnbitsize))
3686 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3688 return build (wanted_code, truth_type, lhs, rhs);
3691 /* There is still another way we can do something: If both pairs of
3692 fields being compared are adjacent, we may be able to make a wider
3693 field containing them both.
3695 Note that we still must mask the lhs/rhs expressions. Furthermore,
3696 the mask must be shifted to account for the shift done by
3697 make_bit_field_ref. */
3698 if ((ll_bitsize + ll_bitpos == rl_bitpos
3699 && lr_bitsize + lr_bitpos == rr_bitpos)
3700 || (ll_bitpos == rl_bitpos + rl_bitsize
3701 && lr_bitpos == rr_bitpos + rr_bitsize))
3705 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3706 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3707 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3708 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3710 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3711 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3712 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3713 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3715 /* Convert to the smaller type before masking out unwanted bits. */
3717 if (lntype != rntype)
3719 if (lnbitsize > rnbitsize)
3721 lhs = convert (rntype, lhs);
3722 ll_mask = convert (rntype, ll_mask);
3725 else if (lnbitsize < rnbitsize)
3727 rhs = convert (lntype, rhs);
3728 lr_mask = convert (lntype, lr_mask);
3733 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3734 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3736 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3737 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3739 return build (wanted_code, truth_type, lhs, rhs);
3745 /* Handle the case of comparisons with constants. If there is something in
3746 common between the masks, those bits of the constants must be the same.
3747 If not, the condition is always false. Test for this to avoid generating
3748 incorrect code below. */
3749 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3750 if (! integer_zerop (result)
3751 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3752 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3754 if (wanted_code == NE_EXPR)
3756 warning ("`or' of unmatched not-equal tests is always 1");
3757 return convert (truth_type, integer_one_node);
3761 warning ("`and' of mutually exclusive equal-tests is always 0");
3762 return convert (truth_type, integer_zero_node);
3766 /* Construct the expression we will return. First get the component
3767 reference we will make. Unless the mask is all ones the width of
3768 that field, perform the mask operation. Then compare with the
3770 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3771 ll_unsignedp || rl_unsignedp);
3773 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3774 if (! all_ones_mask_p (ll_mask, lnbitsize))
3775 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
3777 return build (wanted_code, truth_type, result,
3778 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3781 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
3785 optimize_minmax_comparison (t)
3788 tree type = TREE_TYPE (t);
3789 tree arg0 = TREE_OPERAND (t, 0);
3790 enum tree_code op_code;
3791 tree comp_const = TREE_OPERAND (t, 1);
3793 int consts_equal, consts_lt;
3796 STRIP_SIGN_NOPS (arg0);
3798 op_code = TREE_CODE (arg0);
3799 minmax_const = TREE_OPERAND (arg0, 1);
3800 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
3801 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
3802 inner = TREE_OPERAND (arg0, 0);
3804 /* If something does not permit us to optimize, return the original tree. */
3805 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
3806 || TREE_CODE (comp_const) != INTEGER_CST
3807 || TREE_CONSTANT_OVERFLOW (comp_const)
3808 || TREE_CODE (minmax_const) != INTEGER_CST
3809 || TREE_CONSTANT_OVERFLOW (minmax_const))
3812 /* Now handle all the various comparison codes. We only handle EQ_EXPR
3813 and GT_EXPR, doing the rest with recursive calls using logical
3815 switch (TREE_CODE (t))
3817 case NE_EXPR: case LT_EXPR: case LE_EXPR:
3819 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
3823 fold (build (TRUTH_ORIF_EXPR, type,
3824 optimize_minmax_comparison
3825 (build (EQ_EXPR, type, arg0, comp_const)),
3826 optimize_minmax_comparison
3827 (build (GT_EXPR, type, arg0, comp_const))));
3830 if (op_code == MAX_EXPR && consts_equal)
3831 /* MAX (X, 0) == 0 -> X <= 0 */
3832 return fold (build (LE_EXPR, type, inner, comp_const));
3834 else if (op_code == MAX_EXPR && consts_lt)
3835 /* MAX (X, 0) == 5 -> X == 5 */
3836 return fold (build (EQ_EXPR, type, inner, comp_const));
3838 else if (op_code == MAX_EXPR)
3839 /* MAX (X, 0) == -1 -> false */
3840 return omit_one_operand (type, integer_zero_node, inner);
3842 else if (consts_equal)
3843 /* MIN (X, 0) == 0 -> X >= 0 */
3844 return fold (build (GE_EXPR, type, inner, comp_const));
3847 /* MIN (X, 0) == 5 -> false */
3848 return omit_one_operand (type, integer_zero_node, inner);
3851 /* MIN (X, 0) == -1 -> X == -1 */
3852 return fold (build (EQ_EXPR, type, inner, comp_const));
3855 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
3856 /* MAX (X, 0) > 0 -> X > 0
3857 MAX (X, 0) > 5 -> X > 5 */
3858 return fold (build (GT_EXPR, type, inner, comp_const));
3860 else if (op_code == MAX_EXPR)
3861 /* MAX (X, 0) > -1 -> true */
3862 return omit_one_operand (type, integer_one_node, inner);
3864 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
3865 /* MIN (X, 0) > 0 -> false
3866 MIN (X, 0) > 5 -> false */
3867 return omit_one_operand (type, integer_zero_node, inner);
3870 /* MIN (X, 0) > -1 -> X > -1 */
3871 return fold (build (GT_EXPR, type, inner, comp_const));
3878 /* T is an integer expression that is being multiplied, divided, or taken a
3879 modulus (CODE says which and what kind of divide or modulus) by a
3880 constant C. See if we can eliminate that operation by folding it with
3881 other operations already in T. WIDE_TYPE, if non-null, is a type that
3882 should be used for the computation if wider than our type.
3884 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
3885 (X * 2) + (Y + 4). We must, however, be assured that either the original
3886 expression would not overflow or that overflow is undefined for the type
3887 in the language in question.
3889 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
3890 the machine has a multiply-accumulate insn or that this is part of an
3891 addressing calculation.
3893 If we return a non-null expression, it is an equivalent form of the
3894 original computation, but need not be in the original type. */
3897 extract_muldiv (t, c, code, wide_type)
3900 enum tree_code code;
3903 tree type = TREE_TYPE (t);
3904 enum tree_code tcode = TREE_CODE (t);
3905 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
3906 > GET_MODE_SIZE (TYPE_MODE (type)))
3907 ? wide_type : type);
3909 int same_p = tcode == code;
3910 tree op0 = NULL_TREE, op1 = NULL_TREE;
3912 /* Don't deal with constants of zero here; they confuse the code below. */
3913 if (integer_zerop (c))
3916 if (TREE_CODE_CLASS (tcode) == '1')
3917 op0 = TREE_OPERAND (t, 0);
3919 if (TREE_CODE_CLASS (tcode) == '2')
3920 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
3922 /* Note that we need not handle conditional operations here since fold
3923 already handles those cases. So just do arithmetic here. */
3927 /* For a constant, we can always simplify if we are a multiply
3928 or (for divide and modulus) if it is a multiple of our constant. */
3929 if (code == MULT_EXPR
3930 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
3931 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
3934 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
3935 /* If op0 is an expression, and is unsigned, and the type is
3936 smaller than ctype, then we cannot widen the expression. */
3937 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
3938 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
3939 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
3940 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
3941 && TREE_UNSIGNED (TREE_TYPE (op0))
3942 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
3943 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
3944 && (GET_MODE_SIZE (TYPE_MODE (ctype))
3945 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
3948 /* Pass the constant down and see if we can make a simplification. If
3949 we can, replace this expression with the inner simplification for
3950 possible later conversion to our or some other type. */
3951 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
3952 code == MULT_EXPR ? ctype : NULL_TREE)))
3956 case NEGATE_EXPR: case ABS_EXPR:
3957 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
3958 return fold (build1 (tcode, ctype, convert (ctype, t1)));
3961 case MIN_EXPR: case MAX_EXPR:
3962 /* If widening the type changes the signedness, then we can't perform
3963 this optimization as that changes the result. */
3964 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
3967 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
3968 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
3969 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
3971 if (tree_int_cst_sgn (c) < 0)
3972 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
3974 return fold (build (tcode, ctype, convert (ctype, t1),
3975 convert (ctype, t2)));
3979 case WITH_RECORD_EXPR:
3980 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
3981 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
3982 TREE_OPERAND (t, 1));
3986 /* If this has not been evaluated and the operand has no side effects,
3987 we can see if we can do something inside it and make a new one.
3988 Note that this test is overly conservative since we can do this
3989 if the only reason it had side effects is that it was another
3990 similar SAVE_EXPR, but that isn't worth bothering with. */
3991 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
3992 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
3995 t1 = save_expr (t1);
3996 if (SAVE_EXPR_PERSISTENT_P (t) && TREE_CODE (t1) == SAVE_EXPR)
3997 SAVE_EXPR_PERSISTENT_P (t1) = 1;
3998 if (is_pending_size (t))
3999 put_pending_size (t1);
4004 case LSHIFT_EXPR: case RSHIFT_EXPR:
4005 /* If the second operand is constant, this is a multiplication
4006 or floor division, by a power of two, so we can treat it that
4007 way unless the multiplier or divisor overflows. */
4008 if (TREE_CODE (op1) == INTEGER_CST
4009 /* const_binop may not detect overflow correctly,
4010 so check for it explicitly here. */
4011 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4012 && TREE_INT_CST_HIGH (op1) == 0
4013 && 0 != (t1 = convert (ctype,
4014 const_binop (LSHIFT_EXPR, size_one_node,
4016 && ! TREE_OVERFLOW (t1))
4017 return extract_muldiv (build (tcode == LSHIFT_EXPR
4018 ? MULT_EXPR : FLOOR_DIV_EXPR,
4019 ctype, convert (ctype, op0), t1),
4020 c, code, wide_type);
4023 case PLUS_EXPR: case MINUS_EXPR:
4024 /* See if we can eliminate the operation on both sides. If we can, we
4025 can return a new PLUS or MINUS. If we can't, the only remaining
4026 cases where we can do anything are if the second operand is a
4028 t1 = extract_muldiv (op0, c, code, wide_type);
4029 t2 = extract_muldiv (op1, c, code, wide_type);
4030 if (t1 != 0 && t2 != 0
4031 && (code == MULT_EXPR
4032 /* If not multiplication, we can only do this if either operand
4033 is divisible by c. */
4034 || multiple_of_p (ctype, op0, c)
4035 || multiple_of_p (ctype, op1, c)))
4036 return fold (build (tcode, ctype, convert (ctype, t1),
4037 convert (ctype, t2)));
4039 /* If this was a subtraction, negate OP1 and set it to be an addition.
4040 This simplifies the logic below. */
4041 if (tcode == MINUS_EXPR)
4042 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4044 if (TREE_CODE (op1) != INTEGER_CST)
4047 /* If either OP1 or C are negative, this optimization is not safe for
4048 some of the division and remainder types while for others we need
4049 to change the code. */
4050 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4052 if (code == CEIL_DIV_EXPR)
4053 code = FLOOR_DIV_EXPR;
4054 else if (code == FLOOR_DIV_EXPR)
4055 code = CEIL_DIV_EXPR;
4056 else if (code != MULT_EXPR
4057 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4061 /* If it's a multiply or a division/modulus operation of a multiple
4062 of our constant, do the operation and verify it doesn't overflow. */
4063 if (code == MULT_EXPR
4064 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4066 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4067 if (op1 == 0 || TREE_OVERFLOW (op1))
4073 /* If we have an unsigned type is not a sizetype, we cannot widen
4074 the operation since it will change the result if the original
4075 computation overflowed. */
4076 if (TREE_UNSIGNED (ctype)
4077 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4081 /* If we were able to eliminate our operation from the first side,
4082 apply our operation to the second side and reform the PLUS. */
4083 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4084 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4086 /* The last case is if we are a multiply. In that case, we can
4087 apply the distributive law to commute the multiply and addition
4088 if the multiplication of the constants doesn't overflow. */
4089 if (code == MULT_EXPR)
4090 return fold (build (tcode, ctype, fold (build (code, ctype,
4091 convert (ctype, op0),
4092 convert (ctype, c))),
4098 /* We have a special case here if we are doing something like
4099 (C * 8) % 4 since we know that's zero. */
4100 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4101 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4102 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4103 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4104 return omit_one_operand (type, integer_zero_node, op0);
4106 /* ... fall through ... */
4108 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4109 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4110 /* If we can extract our operation from the LHS, do so and return a
4111 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4112 do something only if the second operand is a constant. */
4114 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4115 return fold (build (tcode, ctype, convert (ctype, t1),
4116 convert (ctype, op1)));
4117 else if (tcode == MULT_EXPR && code == MULT_EXPR
4118 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4119 return fold (build (tcode, ctype, convert (ctype, op0),
4120 convert (ctype, t1)));
4121 else if (TREE_CODE (op1) != INTEGER_CST)
4124 /* If these are the same operation types, we can associate them
4125 assuming no overflow. */
4127 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4128 convert (ctype, c), 0))
4129 && ! TREE_OVERFLOW (t1))
4130 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4132 /* If these operations "cancel" each other, we have the main
4133 optimizations of this pass, which occur when either constant is a
4134 multiple of the other, in which case we replace this with either an
4135 operation or CODE or TCODE.
4137 If we have an unsigned type that is not a sizetype, we cannot do
4138 this since it will change the result if the original computation
4140 if ((! TREE_UNSIGNED (ctype)
4141 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4142 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4143 || (tcode == MULT_EXPR
4144 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4145 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4147 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4148 return fold (build (tcode, ctype, convert (ctype, op0),
4150 const_binop (TRUNC_DIV_EXPR,
4152 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4153 return fold (build (code, ctype, convert (ctype, op0),
4155 const_binop (TRUNC_DIV_EXPR,
4167 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4168 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4169 that we may sometimes modify the tree. */
4172 strip_compound_expr (t, s)
4176 enum tree_code code = TREE_CODE (t);
4178 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4179 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4180 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4181 return TREE_OPERAND (t, 1);
4183 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4184 don't bother handling any other types. */
4185 else if (code == COND_EXPR)
4187 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4188 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4189 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4191 else if (TREE_CODE_CLASS (code) == '1')
4192 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4193 else if (TREE_CODE_CLASS (code) == '<'
4194 || TREE_CODE_CLASS (code) == '2')
4196 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4197 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4203 /* Return a node which has the indicated constant VALUE (either 0 or
4204 1), and is of the indicated TYPE. */
4207 constant_boolean_node (value, type)
4211 if (type == integer_type_node)
4212 return value ? integer_one_node : integer_zero_node;
4213 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4214 return truthvalue_conversion (value ? integer_one_node :
4218 tree t = build_int_2 (value, 0);
4220 TREE_TYPE (t) = type;
4225 /* Utility function for the following routine, to see how complex a nesting of
4226 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4227 we don't care (to avoid spending too much time on complex expressions.). */
4230 count_cond (expr, lim)
4236 if (TREE_CODE (expr) != COND_EXPR)
4241 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4242 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4243 return MIN (lim, 1 + ctrue + cfalse);
4246 /* Transform `a + (b ? x : y)' into `x ? (a + b) : (a + y)'.
4247 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4248 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4249 expression, and ARG to `a'. If COND_FIRST_P is non-zero, then the
4250 COND is the first argument to CODE; otherwise (as in the example
4251 given here), it is the second argument. TYPE is the type of the
4252 original expression. */
4255 fold_binary_op_with_conditional_arg (code, type, cond, arg, cond_first_p)
4256 enum tree_code code;
4262 tree test, true_value, false_value;
4263 tree lhs = NULL_TREE;
4264 tree rhs = NULL_TREE;
4265 /* In the end, we'll produce a COND_EXPR. Both arms of the
4266 conditional expression will be binary operations. The left-hand
4267 side of the expression to be executed if the condition is true
4268 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4269 of the expression to be executed if the condition is true will be
4270 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4271 but apply to the expression to be executed if the conditional is
4277 /* These are the codes to use for the left-hand side and right-hand
4278 side of the COND_EXPR. Normally, they are the same as CODE. */
4279 enum tree_code lhs_code = code;
4280 enum tree_code rhs_code = code;
4281 /* And these are the types of the expressions. */
4282 tree lhs_type = type;
4283 tree rhs_type = type;
4287 true_rhs = false_rhs = &arg;
4288 true_lhs = &true_value;
4289 false_lhs = &false_value;
4293 true_lhs = false_lhs = &arg;
4294 true_rhs = &true_value;
4295 false_rhs = &false_value;
4298 if (TREE_CODE (cond) == COND_EXPR)
4300 test = TREE_OPERAND (cond, 0);
4301 true_value = TREE_OPERAND (cond, 1);
4302 false_value = TREE_OPERAND (cond, 2);
4303 /* If this operand throws an expression, then it does not make
4304 sense to try to perform a logical or arithmetic operation
4305 involving it. Instead of building `a + throw 3' for example,
4306 we simply build `a, throw 3'. */
4307 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4309 lhs_code = COMPOUND_EXPR;
4311 lhs_type = void_type_node;
4313 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4315 rhs_code = COMPOUND_EXPR;
4317 rhs_type = void_type_node;
4322 tree testtype = TREE_TYPE (cond);
4324 true_value = convert (testtype, integer_one_node);
4325 false_value = convert (testtype, integer_zero_node);
4328 /* If ARG is complex we want to make sure we only evaluate
4329 it once. Though this is only required if it is volatile, it
4330 might be more efficient even if it is not. However, if we
4331 succeed in folding one part to a constant, we do not need
4332 to make this SAVE_EXPR. Since we do this optimization
4333 primarily to see if we do end up with constant and this
4334 SAVE_EXPR interferes with later optimizations, suppressing
4335 it when we can is important.
4337 If we are not in a function, we can't make a SAVE_EXPR, so don't
4338 try to do so. Don't try to see if the result is a constant
4339 if an arm is a COND_EXPR since we get exponential behavior
4342 if (TREE_CODE (arg) != SAVE_EXPR && ! TREE_CONSTANT (arg)
4343 && global_bindings_p () == 0
4344 && ((TREE_CODE (arg) != VAR_DECL
4345 && TREE_CODE (arg) != PARM_DECL)
4346 || TREE_SIDE_EFFECTS (arg)))
4348 if (TREE_CODE (true_value) != COND_EXPR)
4349 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4351 if (TREE_CODE (false_value) != COND_EXPR)
4352 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4354 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4355 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4356 arg = save_expr (arg), lhs = rhs = 0;
4360 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4362 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4364 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4366 if (TREE_CODE (arg) == SAVE_EXPR)
4367 return build (COMPOUND_EXPR, type,
4368 convert (void_type_node, arg),
4369 strip_compound_expr (test, arg));
4371 return convert (type, test);
4375 /* Perform constant folding and related simplification of EXPR.
4376 The related simplifications include x*1 => x, x*0 => 0, etc.,
4377 and application of the associative law.
4378 NOP_EXPR conversions may be removed freely (as long as we
4379 are careful not to change the C type of the overall expression)
4380 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4381 but we can constant-fold them if they have constant operands. */
4388 tree t1 = NULL_TREE;
4390 tree type = TREE_TYPE (expr);
4391 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4392 enum tree_code code = TREE_CODE (t);
4393 int kind = TREE_CODE_CLASS (code);
4395 /* WINS will be nonzero when the switch is done
4396 if all operands are constant. */
4399 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4400 Likewise for a SAVE_EXPR that's already been evaluated. */
4401 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
4404 /* Return right away if a constant. */
4408 #ifdef MAX_INTEGER_COMPUTATION_MODE
4409 check_max_integer_computation_mode (expr);
4412 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4416 /* Special case for conversion ops that can have fixed point args. */
4417 arg0 = TREE_OPERAND (t, 0);
4419 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4421 STRIP_SIGN_NOPS (arg0);
4423 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4424 subop = TREE_REALPART (arg0);
4428 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4429 && TREE_CODE (subop) != REAL_CST
4431 /* Note that TREE_CONSTANT isn't enough:
4432 static var addresses are constant but we can't
4433 do arithmetic on them. */
4436 else if (IS_EXPR_CODE_CLASS (kind) || kind == 'r')
4438 int len = first_rtl_op (code);
4440 for (i = 0; i < len; i++)
4442 tree op = TREE_OPERAND (t, i);
4446 continue; /* Valid for CALL_EXPR, at least. */
4448 if (kind == '<' || code == RSHIFT_EXPR)
4450 /* Signedness matters here. Perhaps we can refine this
4452 STRIP_SIGN_NOPS (op);
4455 /* Strip any conversions that don't change the mode. */
4458 if (TREE_CODE (op) == COMPLEX_CST)
4459 subop = TREE_REALPART (op);
4463 if (TREE_CODE (subop) != INTEGER_CST
4464 && TREE_CODE (subop) != REAL_CST)
4465 /* Note that TREE_CONSTANT isn't enough:
4466 static var addresses are constant but we can't
4467 do arithmetic on them. */
4477 /* If this is a commutative operation, and ARG0 is a constant, move it
4478 to ARG1 to reduce the number of tests below. */
4479 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4480 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4481 || code == BIT_AND_EXPR)
4482 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4484 tem = arg0; arg0 = arg1; arg1 = tem;
4486 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4487 TREE_OPERAND (t, 1) = tem;
4490 /* Now WINS is set as described above,
4491 ARG0 is the first operand of EXPR,
4492 and ARG1 is the second operand (if it has more than one operand).
4494 First check for cases where an arithmetic operation is applied to a
4495 compound, conditional, or comparison operation. Push the arithmetic
4496 operation inside the compound or conditional to see if any folding
4497 can then be done. Convert comparison to conditional for this purpose.
4498 The also optimizes non-constant cases that used to be done in
4501 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
4502 one of the operands is a comparison and the other is a comparison, a
4503 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4504 code below would make the expression more complex. Change it to a
4505 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4506 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4508 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4509 || code == EQ_EXPR || code == NE_EXPR)
4510 && ((truth_value_p (TREE_CODE (arg0))
4511 && (truth_value_p (TREE_CODE (arg1))
4512 || (TREE_CODE (arg1) == BIT_AND_EXPR
4513 && integer_onep (TREE_OPERAND (arg1, 1)))))
4514 || (truth_value_p (TREE_CODE (arg1))
4515 && (truth_value_p (TREE_CODE (arg0))
4516 || (TREE_CODE (arg0) == BIT_AND_EXPR
4517 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4519 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4520 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4524 if (code == EQ_EXPR)
4525 t = invert_truthvalue (t);
4530 if (TREE_CODE_CLASS (code) == '1')
4532 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4533 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4534 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4535 else if (TREE_CODE (arg0) == COND_EXPR)
4537 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4538 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4539 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4541 /* If this was a conversion, and all we did was to move into
4542 inside the COND_EXPR, bring it back out. But leave it if
4543 it is a conversion from integer to integer and the
4544 result precision is no wider than a word since such a
4545 conversion is cheap and may be optimized away by combine,
4546 while it couldn't if it were outside the COND_EXPR. Then return
4547 so we don't get into an infinite recursion loop taking the
4548 conversion out and then back in. */
4550 if ((code == NOP_EXPR || code == CONVERT_EXPR
4551 || code == NON_LVALUE_EXPR)
4552 && TREE_CODE (t) == COND_EXPR
4553 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4554 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4555 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4556 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4557 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4559 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4560 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4561 t = build1 (code, type,
4563 TREE_TYPE (TREE_OPERAND
4564 (TREE_OPERAND (t, 1), 0)),
4565 TREE_OPERAND (t, 0),
4566 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4567 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4570 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4571 return fold (build (COND_EXPR, type, arg0,
4572 fold (build1 (code, type, integer_one_node)),
4573 fold (build1 (code, type, integer_zero_node))));
4575 else if (TREE_CODE_CLASS (code) == '2'
4576 || TREE_CODE_CLASS (code) == '<')
4578 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4579 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4580 fold (build (code, type,
4581 arg0, TREE_OPERAND (arg1, 1))));
4582 else if ((TREE_CODE (arg1) == COND_EXPR
4583 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4584 && TREE_CODE_CLASS (code) != '<'))
4585 && (TREE_CODE (arg0) != COND_EXPR
4586 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4587 && (! TREE_SIDE_EFFECTS (arg0)
4588 || (global_bindings_p () == 0
4589 && ! contains_placeholder_p (arg0))))
4591 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
4592 /*cond_first_p=*/0);
4593 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4594 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4595 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4596 else if ((TREE_CODE (arg0) == COND_EXPR
4597 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4598 && TREE_CODE_CLASS (code) != '<'))
4599 && (TREE_CODE (arg1) != COND_EXPR
4600 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4601 && (! TREE_SIDE_EFFECTS (arg1)
4602 || (global_bindings_p () == 0
4603 && ! contains_placeholder_p (arg1))))
4605 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
4606 /*cond_first_p=*/1);
4608 else if (TREE_CODE_CLASS (code) == '<'
4609 && TREE_CODE (arg0) == COMPOUND_EXPR)
4610 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4611 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4612 else if (TREE_CODE_CLASS (code) == '<'
4613 && TREE_CODE (arg1) == COMPOUND_EXPR)
4614 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4615 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4628 return fold (DECL_INITIAL (t));
4633 case FIX_TRUNC_EXPR:
4634 /* Other kinds of FIX are not handled properly by fold_convert. */
4636 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4637 return TREE_OPERAND (t, 0);
4639 /* Handle cases of two conversions in a row. */
4640 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4641 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4643 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4644 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4645 tree final_type = TREE_TYPE (t);
4646 int inside_int = INTEGRAL_TYPE_P (inside_type);
4647 int inside_ptr = POINTER_TYPE_P (inside_type);
4648 int inside_float = FLOAT_TYPE_P (inside_type);
4649 unsigned int inside_prec = TYPE_PRECISION (inside_type);
4650 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4651 int inter_int = INTEGRAL_TYPE_P (inter_type);
4652 int inter_ptr = POINTER_TYPE_P (inter_type);
4653 int inter_float = FLOAT_TYPE_P (inter_type);
4654 unsigned int inter_prec = TYPE_PRECISION (inter_type);
4655 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4656 int final_int = INTEGRAL_TYPE_P (final_type);
4657 int final_ptr = POINTER_TYPE_P (final_type);
4658 int final_float = FLOAT_TYPE_P (final_type);
4659 unsigned int final_prec = TYPE_PRECISION (final_type);
4660 int final_unsignedp = TREE_UNSIGNED (final_type);
4662 /* In addition to the cases of two conversions in a row
4663 handled below, if we are converting something to its own
4664 type via an object of identical or wider precision, neither
4665 conversion is needed. */
4666 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
4667 && ((inter_int && final_int) || (inter_float && final_float))
4668 && inter_prec >= final_prec)
4669 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4671 /* Likewise, if the intermediate and final types are either both
4672 float or both integer, we don't need the middle conversion if
4673 it is wider than the final type and doesn't change the signedness
4674 (for integers). Avoid this if the final type is a pointer
4675 since then we sometimes need the inner conversion. Likewise if
4676 the outer has a precision not equal to the size of its mode. */
4677 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4678 || (inter_float && inside_float))
4679 && inter_prec >= inside_prec
4680 && (inter_float || inter_unsignedp == inside_unsignedp)
4681 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4682 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4684 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4686 /* If we have a sign-extension of a zero-extended value, we can
4687 replace that by a single zero-extension. */
4688 if (inside_int && inter_int && final_int
4689 && inside_prec < inter_prec && inter_prec < final_prec
4690 && inside_unsignedp && !inter_unsignedp)
4691 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4693 /* Two conversions in a row are not needed unless:
4694 - some conversion is floating-point (overstrict for now), or
4695 - the intermediate type is narrower than both initial and
4697 - the intermediate type and innermost type differ in signedness,
4698 and the outermost type is wider than the intermediate, or
4699 - the initial type is a pointer type and the precisions of the
4700 intermediate and final types differ, or
4701 - the final type is a pointer type and the precisions of the
4702 initial and intermediate types differ. */
4703 if (! inside_float && ! inter_float && ! final_float
4704 && (inter_prec > inside_prec || inter_prec > final_prec)
4705 && ! (inside_int && inter_int
4706 && inter_unsignedp != inside_unsignedp
4707 && inter_prec < final_prec)
4708 && ((inter_unsignedp && inter_prec > inside_prec)
4709 == (final_unsignedp && final_prec > inter_prec))
4710 && ! (inside_ptr && inter_prec != final_prec)
4711 && ! (final_ptr && inside_prec != inter_prec)
4712 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4713 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4715 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4718 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4719 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4720 /* Detect assigning a bitfield. */
4721 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4722 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4724 /* Don't leave an assignment inside a conversion
4725 unless assigning a bitfield. */
4726 tree prev = TREE_OPERAND (t, 0);
4727 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4728 /* First do the assignment, then return converted constant. */
4729 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4735 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4738 return fold_convert (t, arg0);
4740 case VIEW_CONVERT_EXPR:
4741 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
4742 return build1 (VIEW_CONVERT_EXPR, type,
4743 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4746 #if 0 /* This loses on &"foo"[0]. */
4751 /* Fold an expression like: "foo"[2] */
4752 if (TREE_CODE (arg0) == STRING_CST
4753 && TREE_CODE (arg1) == INTEGER_CST
4754 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
4756 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
4757 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4758 force_fit_type (t, 0);
4765 if (TREE_CODE (arg0) == CONSTRUCTOR)
4767 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4774 TREE_CONSTANT (t) = wins;
4780 if (TREE_CODE (arg0) == INTEGER_CST)
4782 unsigned HOST_WIDE_INT low;
4784 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4785 TREE_INT_CST_HIGH (arg0),
4787 t = build_int_2 (low, high);
4788 TREE_TYPE (t) = type;
4790 = (TREE_OVERFLOW (arg0)
4791 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4792 TREE_CONSTANT_OVERFLOW (t)
4793 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4795 else if (TREE_CODE (arg0) == REAL_CST)
4796 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4798 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4799 return TREE_OPERAND (arg0, 0);
4801 /* Convert - (a - b) to (b - a) for non-floating-point. */
4802 else if (TREE_CODE (arg0) == MINUS_EXPR
4803 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
4804 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4805 TREE_OPERAND (arg0, 0));
4812 if (TREE_CODE (arg0) == INTEGER_CST)
4814 /* If the value is unsigned, then the absolute value is
4815 the same as the ordinary value. */
4816 if (TREE_UNSIGNED (type))
4818 /* Similarly, if the value is non-negative. */
4819 else if (INT_CST_LT (integer_minus_one_node, arg0))
4821 /* If the value is negative, then the absolute value is
4825 unsigned HOST_WIDE_INT low;
4827 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4828 TREE_INT_CST_HIGH (arg0),
4830 t = build_int_2 (low, high);
4831 TREE_TYPE (t) = type;
4833 = (TREE_OVERFLOW (arg0)
4834 | force_fit_type (t, overflow));
4835 TREE_CONSTANT_OVERFLOW (t)
4836 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4839 else if (TREE_CODE (arg0) == REAL_CST)
4841 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4842 t = build_real (type,
4843 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4846 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4847 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4851 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4852 return convert (type, arg0);
4853 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4854 return build (COMPLEX_EXPR, type,
4855 TREE_OPERAND (arg0, 0),
4856 negate_expr (TREE_OPERAND (arg0, 1)));
4857 else if (TREE_CODE (arg0) == COMPLEX_CST)
4858 return build_complex (type, TREE_REALPART (arg0),
4859 negate_expr (TREE_IMAGPART (arg0)));
4860 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4861 return fold (build (TREE_CODE (arg0), type,
4862 fold (build1 (CONJ_EXPR, type,
4863 TREE_OPERAND (arg0, 0))),
4864 fold (build1 (CONJ_EXPR,
4865 type, TREE_OPERAND (arg0, 1)))));
4866 else if (TREE_CODE (arg0) == CONJ_EXPR)
4867 return TREE_OPERAND (arg0, 0);
4873 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4874 ~ TREE_INT_CST_HIGH (arg0));
4875 TREE_TYPE (t) = type;
4876 force_fit_type (t, 0);
4877 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4878 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4880 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4881 return TREE_OPERAND (arg0, 0);
4885 /* A + (-B) -> A - B */
4886 if (TREE_CODE (arg1) == NEGATE_EXPR)
4887 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4888 /* (-A) + B -> B - A */
4889 if (TREE_CODE (arg0) == NEGATE_EXPR)
4890 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
4891 else if (! FLOAT_TYPE_P (type))
4893 if (integer_zerop (arg1))
4894 return non_lvalue (convert (type, arg0));
4896 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4897 with a constant, and the two constants have no bits in common,
4898 we should treat this as a BIT_IOR_EXPR since this may produce more
4900 if (TREE_CODE (arg0) == BIT_AND_EXPR
4901 && TREE_CODE (arg1) == BIT_AND_EXPR
4902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4903 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4904 && integer_zerop (const_binop (BIT_AND_EXPR,
4905 TREE_OPERAND (arg0, 1),
4906 TREE_OPERAND (arg1, 1), 0)))
4908 code = BIT_IOR_EXPR;
4912 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
4913 (plus (plus (mult) (mult)) (foo)) so that we can
4914 take advantage of the factoring cases below. */
4915 if ((TREE_CODE (arg0) == PLUS_EXPR
4916 && TREE_CODE (arg1) == MULT_EXPR)
4917 || (TREE_CODE (arg1) == PLUS_EXPR
4918 && TREE_CODE (arg0) == MULT_EXPR))
4920 tree parg0, parg1, parg, marg;
4922 if (TREE_CODE (arg0) == PLUS_EXPR)
4923 parg = arg0, marg = arg1;
4925 parg = arg1, marg = arg0;
4926 parg0 = TREE_OPERAND (parg, 0);
4927 parg1 = TREE_OPERAND (parg, 1);
4931 if (TREE_CODE (parg0) == MULT_EXPR
4932 && TREE_CODE (parg1) != MULT_EXPR)
4933 return fold (build (PLUS_EXPR, type,
4934 fold (build (PLUS_EXPR, type, parg0, marg)),
4936 if (TREE_CODE (parg0) != MULT_EXPR
4937 && TREE_CODE (parg1) == MULT_EXPR)
4938 return fold (build (PLUS_EXPR, type,
4939 fold (build (PLUS_EXPR, type, parg1, marg)),
4943 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
4945 tree arg00, arg01, arg10, arg11;
4946 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
4948 /* (A * C) + (B * C) -> (A+B) * C.
4949 We are most concerned about the case where C is a constant,
4950 but other combinations show up during loop reduction. Since
4951 it is not difficult, try all four possibilities. */
4953 arg00 = TREE_OPERAND (arg0, 0);
4954 arg01 = TREE_OPERAND (arg0, 1);
4955 arg10 = TREE_OPERAND (arg1, 0);
4956 arg11 = TREE_OPERAND (arg1, 1);
4959 if (operand_equal_p (arg01, arg11, 0))
4960 same = arg01, alt0 = arg00, alt1 = arg10;
4961 else if (operand_equal_p (arg00, arg10, 0))
4962 same = arg00, alt0 = arg01, alt1 = arg11;
4963 else if (operand_equal_p (arg00, arg11, 0))
4964 same = arg00, alt0 = arg01, alt1 = arg10;
4965 else if (operand_equal_p (arg01, arg10, 0))
4966 same = arg01, alt0 = arg00, alt1 = arg11;
4968 /* No identical multiplicands; see if we can find a common
4969 power-of-two factor in non-power-of-two multiplies. This
4970 can help in multi-dimensional array access. */
4971 else if (TREE_CODE (arg01) == INTEGER_CST
4972 && TREE_CODE (arg11) == INTEGER_CST
4973 && TREE_INT_CST_HIGH (arg01) == 0
4974 && TREE_INT_CST_HIGH (arg11) == 0)
4976 HOST_WIDE_INT int01, int11, tmp;
4977 int01 = TREE_INT_CST_LOW (arg01);
4978 int11 = TREE_INT_CST_LOW (arg11);
4980 /* Move min of absolute values to int11. */
4981 if ((int01 >= 0 ? int01 : -int01)
4982 < (int11 >= 0 ? int11 : -int11))
4984 tmp = int01, int01 = int11, int11 = tmp;
4985 alt0 = arg00, arg00 = arg10, arg10 = alt0;
4986 alt0 = arg01, arg01 = arg11, arg11 = alt0;
4989 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
4991 alt0 = fold (build (MULT_EXPR, type, arg00,
4992 build_int_2 (int01 / int11, 0)));
4999 return fold (build (MULT_EXPR, type,
5000 fold (build (PLUS_EXPR, type, alt0, alt1)),
5004 /* In IEEE floating point, x+0 may not equal x. */
5005 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5006 || flag_unsafe_math_optimizations)
5007 && real_zerop (arg1))
5008 return non_lvalue (convert (type, arg0));
5009 /* x+(-0) equals x, even for IEEE. */
5010 else if (TREE_CODE (arg1) == REAL_CST
5011 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5012 return non_lvalue (convert (type, arg0));
5015 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5016 is a rotate of A by C1 bits. */
5017 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5018 is a rotate of A by B bits. */
5020 enum tree_code code0, code1;
5021 code0 = TREE_CODE (arg0);
5022 code1 = TREE_CODE (arg1);
5023 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5024 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5025 && operand_equal_p (TREE_OPERAND (arg0, 0),
5026 TREE_OPERAND (arg1, 0), 0)
5027 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5029 tree tree01, tree11;
5030 enum tree_code code01, code11;
5032 tree01 = TREE_OPERAND (arg0, 1);
5033 tree11 = TREE_OPERAND (arg1, 1);
5034 STRIP_NOPS (tree01);
5035 STRIP_NOPS (tree11);
5036 code01 = TREE_CODE (tree01);
5037 code11 = TREE_CODE (tree11);
5038 if (code01 == INTEGER_CST
5039 && code11 == INTEGER_CST
5040 && TREE_INT_CST_HIGH (tree01) == 0
5041 && TREE_INT_CST_HIGH (tree11) == 0
5042 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5043 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5044 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5045 code0 == LSHIFT_EXPR ? tree01 : tree11);
5046 else if (code11 == MINUS_EXPR)
5048 tree tree110, tree111;
5049 tree110 = TREE_OPERAND (tree11, 0);
5050 tree111 = TREE_OPERAND (tree11, 1);
5051 STRIP_NOPS (tree110);
5052 STRIP_NOPS (tree111);
5053 if (TREE_CODE (tree110) == INTEGER_CST
5054 && 0 == compare_tree_int (tree110,
5056 (TREE_TYPE (TREE_OPERAND
5058 && operand_equal_p (tree01, tree111, 0))
5059 return build ((code0 == LSHIFT_EXPR
5062 type, TREE_OPERAND (arg0, 0), tree01);
5064 else if (code01 == MINUS_EXPR)
5066 tree tree010, tree011;
5067 tree010 = TREE_OPERAND (tree01, 0);
5068 tree011 = TREE_OPERAND (tree01, 1);
5069 STRIP_NOPS (tree010);
5070 STRIP_NOPS (tree011);
5071 if (TREE_CODE (tree010) == INTEGER_CST
5072 && 0 == compare_tree_int (tree010,
5074 (TREE_TYPE (TREE_OPERAND
5076 && operand_equal_p (tree11, tree011, 0))
5077 return build ((code0 != LSHIFT_EXPR
5080 type, TREE_OPERAND (arg0, 0), tree11);
5086 /* In most languages, can't associate operations on floats through
5087 parentheses. Rather than remember where the parentheses were, we
5088 don't associate floats at all. It shouldn't matter much. However,
5089 associating multiplications is only very slightly inaccurate, so do
5090 that if -funsafe-math-optimizations is specified. */
5093 && (! FLOAT_TYPE_P (type)
5094 || (flag_unsafe_math_optimizations && code == MULT_EXPR)))
5096 tree var0, con0, lit0, var1, con1, lit1;
5098 /* Split both trees into variables, constants, and literals. Then
5099 associate each group together, the constants with literals,
5100 then the result with variables. This increases the chances of
5101 literals being recombined later and of generating relocatable
5102 expressions for the sum of a constant and literal. */
5103 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5104 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5106 /* Only do something if we found more than two objects. Otherwise,
5107 nothing has changed and we risk infinite recursion. */
5108 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5109 + (lit0 != 0) + (lit1 != 0)))
5111 var0 = associate_trees (var0, var1, code, type);
5112 con0 = associate_trees (con0, con1, code, type);
5113 lit0 = associate_trees (lit0, lit1, code, type);
5114 con0 = associate_trees (con0, lit0, code, type);
5115 return convert (type, associate_trees (var0, con0, code, type));
5121 t1 = const_binop (code, arg0, arg1, 0);
5122 if (t1 != NULL_TREE)
5124 /* The return value should always have
5125 the same type as the original expression. */
5126 if (TREE_TYPE (t1) != TREE_TYPE (t))
5127 t1 = convert (TREE_TYPE (t), t1);
5134 /* A - (-B) -> A + B */
5135 if (TREE_CODE (arg1) == NEGATE_EXPR)
5136 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5137 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5138 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5140 fold (build (MINUS_EXPR, type,
5141 build_real (TREE_TYPE (arg1),
5142 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5143 TREE_OPERAND (arg0, 0)));
5145 if (! FLOAT_TYPE_P (type))
5147 if (! wins && integer_zerop (arg0))
5148 return negate_expr (convert (type, arg1));
5149 if (integer_zerop (arg1))
5150 return non_lvalue (convert (type, arg0));
5152 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5153 about the case where C is a constant, just try one of the
5154 four possibilities. */
5156 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5157 && operand_equal_p (TREE_OPERAND (arg0, 1),
5158 TREE_OPERAND (arg1, 1), 0))
5159 return fold (build (MULT_EXPR, type,
5160 fold (build (MINUS_EXPR, type,
5161 TREE_OPERAND (arg0, 0),
5162 TREE_OPERAND (arg1, 0))),
5163 TREE_OPERAND (arg0, 1)));
5166 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5167 || flag_unsafe_math_optimizations)
5169 /* Except with IEEE floating point, 0-x equals -x. */
5170 if (! wins && real_zerop (arg0))
5171 return negate_expr (convert (type, arg1));
5172 /* Except with IEEE floating point, x-0 equals x. */
5173 if (real_zerop (arg1))
5174 return non_lvalue (convert (type, arg0));
5177 /* Fold &x - &x. This can happen from &x.foo - &x.
5178 This is unsafe for certain floats even in non-IEEE formats.
5179 In IEEE, it is unsafe because it does wrong for NaNs.
5180 Also note that operand_equal_p is always false if an operand
5183 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
5184 && operand_equal_p (arg0, arg1, 0))
5185 return convert (type, integer_zero_node);
5190 /* (-A) * (-B) -> A * B */
5191 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5192 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5193 TREE_OPERAND (arg1, 0)));
5195 if (! FLOAT_TYPE_P (type))
5197 if (integer_zerop (arg1))
5198 return omit_one_operand (type, arg1, arg0);
5199 if (integer_onep (arg1))
5200 return non_lvalue (convert (type, arg0));
5202 /* (a * (1 << b)) is (a << b) */
5203 if (TREE_CODE (arg1) == LSHIFT_EXPR
5204 && integer_onep (TREE_OPERAND (arg1, 0)))
5205 return fold (build (LSHIFT_EXPR, type, arg0,
5206 TREE_OPERAND (arg1, 1)));
5207 if (TREE_CODE (arg0) == LSHIFT_EXPR
5208 && integer_onep (TREE_OPERAND (arg0, 0)))
5209 return fold (build (LSHIFT_EXPR, type, arg1,
5210 TREE_OPERAND (arg0, 1)));
5212 if (TREE_CODE (arg1) == INTEGER_CST
5213 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5215 return convert (type, tem);
5220 /* x*0 is 0, except for IEEE floating point. */
5221 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5222 || flag_unsafe_math_optimizations)
5223 && real_zerop (arg1))
5224 return omit_one_operand (type, arg1, arg0);
5225 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5226 However, ANSI says we can drop signals,
5227 so we can do this anyway. */
5228 if (real_onep (arg1))
5229 return non_lvalue (convert (type, arg0));
5231 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5232 && ! contains_placeholder_p (arg0))
5234 tree arg = save_expr (arg0);
5235 return build (PLUS_EXPR, type, arg, arg);
5242 if (integer_all_onesp (arg1))
5243 return omit_one_operand (type, arg1, arg0);
5244 if (integer_zerop (arg1))
5245 return non_lvalue (convert (type, arg0));
5246 t1 = distribute_bit_expr (code, type, arg0, arg1);
5247 if (t1 != NULL_TREE)
5250 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5252 This results in more efficient code for machines without a NAND
5253 instruction. Combine will canonicalize to the first form
5254 which will allow use of NAND instructions provided by the
5255 backend if they exist. */
5256 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5257 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5259 return fold (build1 (BIT_NOT_EXPR, type,
5260 build (BIT_AND_EXPR, type,
5261 TREE_OPERAND (arg0, 0),
5262 TREE_OPERAND (arg1, 0))));
5265 /* See if this can be simplified into a rotate first. If that
5266 is unsuccessful continue in the association code. */
5270 if (integer_zerop (arg1))
5271 return non_lvalue (convert (type, arg0));
5272 if (integer_all_onesp (arg1))
5273 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5275 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5276 with a constant, and the two constants have no bits in common,
5277 we should treat this as a BIT_IOR_EXPR since this may produce more
5279 if (TREE_CODE (arg0) == BIT_AND_EXPR
5280 && TREE_CODE (arg1) == BIT_AND_EXPR
5281 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5282 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5283 && integer_zerop (const_binop (BIT_AND_EXPR,
5284 TREE_OPERAND (arg0, 1),
5285 TREE_OPERAND (arg1, 1), 0)))
5287 code = BIT_IOR_EXPR;
5291 /* See if this can be simplified into a rotate first. If that
5292 is unsuccessful continue in the association code. */
5297 if (integer_all_onesp (arg1))
5298 return non_lvalue (convert (type, arg0));
5299 if (integer_zerop (arg1))
5300 return omit_one_operand (type, arg1, arg0);
5301 t1 = distribute_bit_expr (code, type, arg0, arg1);
5302 if (t1 != NULL_TREE)
5304 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5305 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5306 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5309 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5311 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5312 && (~TREE_INT_CST_LOW (arg0)
5313 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5314 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5316 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5317 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5320 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5322 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5323 && (~TREE_INT_CST_LOW (arg1)
5324 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5325 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5328 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5330 This results in more efficient code for machines without a NOR
5331 instruction. Combine will canonicalize to the first form
5332 which will allow use of NOR instructions provided by the
5333 backend if they exist. */
5334 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5335 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5337 return fold (build1 (BIT_NOT_EXPR, type,
5338 build (BIT_IOR_EXPR, type,
5339 TREE_OPERAND (arg0, 0),
5340 TREE_OPERAND (arg1, 0))));
5345 case BIT_ANDTC_EXPR:
5346 if (integer_all_onesp (arg0))
5347 return non_lvalue (convert (type, arg1));
5348 if (integer_zerop (arg0))
5349 return omit_one_operand (type, arg0, arg1);
5350 if (TREE_CODE (arg1) == INTEGER_CST)
5352 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5353 code = BIT_AND_EXPR;
5359 /* In most cases, do nothing with a divide by zero. */
5360 #ifndef REAL_INFINITY
5361 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5365 /* (-A) / (-B) -> A / B */
5366 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5367 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5368 TREE_OPERAND (arg1, 0)));
5370 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5371 However, ANSI says we can drop signals, so we can do this anyway. */
5372 if (real_onep (arg1))
5373 return non_lvalue (convert (type, arg0));
5375 /* If ARG1 is a constant, we can convert this to a multiply by the
5376 reciprocal. This does not have the same rounding properties,
5377 so only do this if -funsafe-math-optimizations. We can actually
5378 always safely do it if ARG1 is a power of two, but it's hard to
5379 tell if it is or not in a portable manner. */
5380 if (TREE_CODE (arg1) == REAL_CST)
5382 if (flag_unsafe_math_optimizations
5383 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5385 return fold (build (MULT_EXPR, type, arg0, tem));
5386 /* Find the reciprocal if optimizing and the result is exact. */
5390 r = TREE_REAL_CST (arg1);
5391 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5393 tem = build_real (type, r);
5394 return fold (build (MULT_EXPR, type, arg0, tem));
5398 /* Convert A/B/C to A/(B*C). */
5399 if (flag_unsafe_math_optimizations
5400 && TREE_CODE (arg0) == RDIV_EXPR)
5402 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5403 build (MULT_EXPR, type, TREE_OPERAND (arg0, 1),
5406 /* Convert A/(B/C) to (A/B)*C. */
5407 if (flag_unsafe_math_optimizations
5408 && TREE_CODE (arg1) == RDIV_EXPR)
5410 return fold (build (MULT_EXPR, type,
5411 build (RDIV_EXPR, type, arg0,
5412 TREE_OPERAND (arg1, 0)),
5413 TREE_OPERAND (arg1, 1)));
5417 case TRUNC_DIV_EXPR:
5418 case ROUND_DIV_EXPR:
5419 case FLOOR_DIV_EXPR:
5421 case EXACT_DIV_EXPR:
5422 if (integer_onep (arg1))
5423 return non_lvalue (convert (type, arg0));
5424 if (integer_zerop (arg1))
5427 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5428 operation, EXACT_DIV_EXPR.
5430 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5431 At one time others generated faster code, it's not clear if they do
5432 after the last round to changes to the DIV code in expmed.c. */
5433 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5434 && multiple_of_p (type, arg0, arg1))
5435 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5437 if (TREE_CODE (arg1) == INTEGER_CST
5438 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5440 return convert (type, tem);
5445 case FLOOR_MOD_EXPR:
5446 case ROUND_MOD_EXPR:
5447 case TRUNC_MOD_EXPR:
5448 if (integer_onep (arg1))
5449 return omit_one_operand (type, integer_zero_node, arg0);
5450 if (integer_zerop (arg1))
5453 if (TREE_CODE (arg1) == INTEGER_CST
5454 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5456 return convert (type, tem);
5464 if (integer_zerop (arg1))
5465 return non_lvalue (convert (type, arg0));
5466 /* Since negative shift count is not well-defined,
5467 don't try to compute it in the compiler. */
5468 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5470 /* Rewrite an LROTATE_EXPR by a constant into an
5471 RROTATE_EXPR by a new constant. */
5472 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5474 TREE_SET_CODE (t, RROTATE_EXPR);
5475 code = RROTATE_EXPR;
5476 TREE_OPERAND (t, 1) = arg1
5479 convert (TREE_TYPE (arg1),
5480 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5482 if (tree_int_cst_sgn (arg1) < 0)
5486 /* If we have a rotate of a bit operation with the rotate count and
5487 the second operand of the bit operation both constant,
5488 permute the two operations. */
5489 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5490 && (TREE_CODE (arg0) == BIT_AND_EXPR
5491 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5492 || TREE_CODE (arg0) == BIT_IOR_EXPR
5493 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5494 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5495 return fold (build (TREE_CODE (arg0), type,
5496 fold (build (code, type,
5497 TREE_OPERAND (arg0, 0), arg1)),
5498 fold (build (code, type,
5499 TREE_OPERAND (arg0, 1), arg1))));
5501 /* Two consecutive rotates adding up to the width of the mode can
5503 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5504 && TREE_CODE (arg0) == RROTATE_EXPR
5505 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5506 && TREE_INT_CST_HIGH (arg1) == 0
5507 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5508 && ((TREE_INT_CST_LOW (arg1)
5509 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5510 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5511 return TREE_OPERAND (arg0, 0);
5516 if (operand_equal_p (arg0, arg1, 0))
5517 return omit_one_operand (type, arg0, arg1);
5518 if (INTEGRAL_TYPE_P (type)
5519 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5520 return omit_one_operand (type, arg1, arg0);
5524 if (operand_equal_p (arg0, arg1, 0))
5525 return omit_one_operand (type, arg0, arg1);
5526 if (INTEGRAL_TYPE_P (type)
5527 && TYPE_MAX_VALUE (type)
5528 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5529 return omit_one_operand (type, arg1, arg0);
5532 case TRUTH_NOT_EXPR:
5533 /* Note that the operand of this must be an int
5534 and its values must be 0 or 1.
5535 ("true" is a fixed value perhaps depending on the language,
5536 but we don't handle values other than 1 correctly yet.) */
5537 tem = invert_truthvalue (arg0);
5538 /* Avoid infinite recursion. */
5539 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5541 return convert (type, tem);
5543 case TRUTH_ANDIF_EXPR:
5544 /* Note that the operands of this must be ints
5545 and their values must be 0 or 1.
5546 ("true" is a fixed value perhaps depending on the language.) */
5547 /* If first arg is constant zero, return it. */
5548 if (integer_zerop (arg0))
5549 return convert (type, arg0);
5550 case TRUTH_AND_EXPR:
5551 /* If either arg is constant true, drop it. */
5552 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5553 return non_lvalue (convert (type, arg1));
5554 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
5555 /* Preserve sequence points. */
5556 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5557 return non_lvalue (convert (type, arg0));
5558 /* If second arg is constant zero, result is zero, but first arg
5559 must be evaluated. */
5560 if (integer_zerop (arg1))
5561 return omit_one_operand (type, arg1, arg0);
5562 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5563 case will be handled here. */
5564 if (integer_zerop (arg0))
5565 return omit_one_operand (type, arg0, arg1);
5568 /* We only do these simplifications if we are optimizing. */
5572 /* Check for things like (A || B) && (A || C). We can convert this
5573 to A || (B && C). Note that either operator can be any of the four
5574 truth and/or operations and the transformation will still be
5575 valid. Also note that we only care about order for the
5576 ANDIF and ORIF operators. If B contains side effects, this
5577 might change the truth-value of A. */
5578 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5579 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5580 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5581 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5582 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5583 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5585 tree a00 = TREE_OPERAND (arg0, 0);
5586 tree a01 = TREE_OPERAND (arg0, 1);
5587 tree a10 = TREE_OPERAND (arg1, 0);
5588 tree a11 = TREE_OPERAND (arg1, 1);
5589 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5590 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5591 && (code == TRUTH_AND_EXPR
5592 || code == TRUTH_OR_EXPR));
5594 if (operand_equal_p (a00, a10, 0))
5595 return fold (build (TREE_CODE (arg0), type, a00,
5596 fold (build (code, type, a01, a11))));
5597 else if (commutative && operand_equal_p (a00, a11, 0))
5598 return fold (build (TREE_CODE (arg0), type, a00,
5599 fold (build (code, type, a01, a10))));
5600 else if (commutative && operand_equal_p (a01, a10, 0))
5601 return fold (build (TREE_CODE (arg0), type, a01,
5602 fold (build (code, type, a00, a11))));
5604 /* This case if tricky because we must either have commutative
5605 operators or else A10 must not have side-effects. */
5607 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5608 && operand_equal_p (a01, a11, 0))
5609 return fold (build (TREE_CODE (arg0), type,
5610 fold (build (code, type, a00, a10)),
5614 /* See if we can build a range comparison. */
5615 if (0 != (tem = fold_range_test (t)))
5618 /* Check for the possibility of merging component references. If our
5619 lhs is another similar operation, try to merge its rhs with our
5620 rhs. Then try to merge our lhs and rhs. */
5621 if (TREE_CODE (arg0) == code
5622 && 0 != (tem = fold_truthop (code, type,
5623 TREE_OPERAND (arg0, 1), arg1)))
5624 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5626 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5631 case TRUTH_ORIF_EXPR:
5632 /* Note that the operands of this must be ints
5633 and their values must be 0 or true.
5634 ("true" is a fixed value perhaps depending on the language.) */
5635 /* If first arg is constant true, return it. */
5636 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5637 return convert (type, arg0);
5639 /* If either arg is constant zero, drop it. */
5640 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5641 return non_lvalue (convert (type, arg1));
5642 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
5643 /* Preserve sequence points. */
5644 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
5645 return non_lvalue (convert (type, arg0));
5646 /* If second arg is constant true, result is true, but we must
5647 evaluate first arg. */
5648 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5649 return omit_one_operand (type, arg1, arg0);
5650 /* Likewise for first arg, but note this only occurs here for
5652 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5653 return omit_one_operand (type, arg0, arg1);
5656 case TRUTH_XOR_EXPR:
5657 /* If either arg is constant zero, drop it. */
5658 if (integer_zerop (arg0))
5659 return non_lvalue (convert (type, arg1));
5660 if (integer_zerop (arg1))
5661 return non_lvalue (convert (type, arg0));
5662 /* If either arg is constant true, this is a logical inversion. */
5663 if (integer_onep (arg0))
5664 return non_lvalue (convert (type, invert_truthvalue (arg1)));
5665 if (integer_onep (arg1))
5666 return non_lvalue (convert (type, invert_truthvalue (arg0)));
5675 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
5677 /* (-a) CMP (-b) -> b CMP a */
5678 if (TREE_CODE (arg0) == NEGATE_EXPR
5679 && TREE_CODE (arg1) == NEGATE_EXPR)
5680 return fold (build (code, type, TREE_OPERAND (arg1, 0),
5681 TREE_OPERAND (arg0, 0)));
5682 /* (-a) CMP CST -> a swap(CMP) (-CST) */
5683 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5686 (swap_tree_comparison (code), type,
5687 TREE_OPERAND (arg0, 0),
5688 build_real (TREE_TYPE (arg1),
5689 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
5690 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
5691 /* a CMP (-0) -> a CMP 0 */
5692 if (TREE_CODE (arg1) == REAL_CST
5693 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5694 return fold (build (code, type, arg0,
5695 build_real (TREE_TYPE (arg1), dconst0)));
5698 /* If one arg is a constant integer, put it last. */
5699 if (TREE_CODE (arg0) == INTEGER_CST
5700 && TREE_CODE (arg1) != INTEGER_CST)
5702 TREE_OPERAND (t, 0) = arg1;
5703 TREE_OPERAND (t, 1) = arg0;
5704 arg0 = TREE_OPERAND (t, 0);
5705 arg1 = TREE_OPERAND (t, 1);
5706 code = swap_tree_comparison (code);
5707 TREE_SET_CODE (t, code);
5710 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5711 First, see if one arg is constant; find the constant arg
5712 and the other one. */
5714 tree constop = 0, varop = NULL_TREE;
5715 int constopnum = -1;
5717 if (TREE_CONSTANT (arg1))
5718 constopnum = 1, constop = arg1, varop = arg0;
5719 if (TREE_CONSTANT (arg0))
5720 constopnum = 0, constop = arg0, varop = arg1;
5722 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5724 /* This optimization is invalid for ordered comparisons
5725 if CONST+INCR overflows or if foo+incr might overflow.
5726 This optimization is invalid for floating point due to rounding.
5727 For pointer types we assume overflow doesn't happen. */
5728 if (POINTER_TYPE_P (TREE_TYPE (varop))
5729 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5730 && (code == EQ_EXPR || code == NE_EXPR)))
5733 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5734 constop, TREE_OPERAND (varop, 1)));
5736 /* Do not overwrite the current varop to be a preincrement,
5737 create a new node so that we won't confuse our caller who
5738 might create trees and throw them away, reusing the
5739 arguments that they passed to build. This shows up in
5740 the THEN or ELSE parts of ?: being postincrements. */
5741 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
5742 TREE_OPERAND (varop, 0),
5743 TREE_OPERAND (varop, 1));
5745 /* If VAROP is a reference to a bitfield, we must mask
5746 the constant by the width of the field. */
5747 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5748 && DECL_BIT_FIELD(TREE_OPERAND
5749 (TREE_OPERAND (varop, 0), 1)))
5752 = TREE_INT_CST_LOW (DECL_SIZE
5754 (TREE_OPERAND (varop, 0), 1)));
5755 tree mask, unsigned_type;
5756 unsigned int precision;
5757 tree folded_compare;
5759 /* First check whether the comparison would come out
5760 always the same. If we don't do that we would
5761 change the meaning with the masking. */
5762 if (constopnum == 0)
5763 folded_compare = fold (build (code, type, constop,
5764 TREE_OPERAND (varop, 0)));
5766 folded_compare = fold (build (code, type,
5767 TREE_OPERAND (varop, 0),
5769 if (integer_zerop (folded_compare)
5770 || integer_onep (folded_compare))
5771 return omit_one_operand (type, folded_compare, varop);
5773 unsigned_type = type_for_size (size, 1);
5774 precision = TYPE_PRECISION (unsigned_type);
5775 mask = build_int_2 (~0, ~0);
5776 TREE_TYPE (mask) = unsigned_type;
5777 force_fit_type (mask, 0);
5778 mask = const_binop (RSHIFT_EXPR, mask,
5779 size_int (precision - size), 0);
5780 newconst = fold (build (BIT_AND_EXPR,
5781 TREE_TYPE (varop), newconst,
5782 convert (TREE_TYPE (varop),
5786 t = build (code, type,
5787 (constopnum == 0) ? newconst : varop,
5788 (constopnum == 1) ? newconst : varop);
5792 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5794 if (POINTER_TYPE_P (TREE_TYPE (varop))
5795 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5796 && (code == EQ_EXPR || code == NE_EXPR)))
5799 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5800 constop, TREE_OPERAND (varop, 1)));
5802 /* Do not overwrite the current varop to be a predecrement,
5803 create a new node so that we won't confuse our caller who
5804 might create trees and throw them away, reusing the
5805 arguments that they passed to build. This shows up in
5806 the THEN or ELSE parts of ?: being postdecrements. */
5807 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
5808 TREE_OPERAND (varop, 0),
5809 TREE_OPERAND (varop, 1));
5811 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5812 && DECL_BIT_FIELD(TREE_OPERAND
5813 (TREE_OPERAND (varop, 0), 1)))
5816 = TREE_INT_CST_LOW (DECL_SIZE
5818 (TREE_OPERAND (varop, 0), 1)));
5819 tree mask, unsigned_type;
5820 unsigned int precision;
5821 tree folded_compare;
5823 if (constopnum == 0)
5824 folded_compare = fold (build (code, type, constop,
5825 TREE_OPERAND (varop, 0)));
5827 folded_compare = fold (build (code, type,
5828 TREE_OPERAND (varop, 0),
5830 if (integer_zerop (folded_compare)
5831 || integer_onep (folded_compare))
5832 return omit_one_operand (type, folded_compare, varop);
5834 unsigned_type = type_for_size (size, 1);
5835 precision = TYPE_PRECISION (unsigned_type);
5836 mask = build_int_2 (~0, ~0);
5837 TREE_TYPE (mask) = TREE_TYPE (varop);
5838 force_fit_type (mask, 0);
5839 mask = const_binop (RSHIFT_EXPR, mask,
5840 size_int (precision - size), 0);
5841 newconst = fold (build (BIT_AND_EXPR,
5842 TREE_TYPE (varop), newconst,
5843 convert (TREE_TYPE (varop),
5847 t = build (code, type,
5848 (constopnum == 0) ? newconst : varop,
5849 (constopnum == 1) ? newconst : varop);
5855 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5856 if (TREE_CODE (arg1) == INTEGER_CST
5857 && TREE_CODE (arg0) != INTEGER_CST
5858 && tree_int_cst_sgn (arg1) > 0)
5860 switch (TREE_CODE (t))
5864 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5865 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5870 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5871 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5879 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
5880 a MINUS_EXPR of a constant, we can convert it into a comparison with
5881 a revised constant as long as no overflow occurs. */
5882 if ((code == EQ_EXPR || code == NE_EXPR)
5883 && TREE_CODE (arg1) == INTEGER_CST
5884 && (TREE_CODE (arg0) == PLUS_EXPR
5885 || TREE_CODE (arg0) == MINUS_EXPR)
5886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5887 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
5888 ? MINUS_EXPR : PLUS_EXPR,
5889 arg1, TREE_OPERAND (arg0, 1), 0))
5890 && ! TREE_CONSTANT_OVERFLOW (tem))
5891 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5893 /* Similarly for a NEGATE_EXPR. */
5894 else if ((code == EQ_EXPR || code == NE_EXPR)
5895 && TREE_CODE (arg0) == NEGATE_EXPR
5896 && TREE_CODE (arg1) == INTEGER_CST
5897 && 0 != (tem = negate_expr (arg1))
5898 && TREE_CODE (tem) == INTEGER_CST
5899 && ! TREE_CONSTANT_OVERFLOW (tem))
5900 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5902 /* If we have X - Y == 0, we can convert that to X == Y and similarly
5903 for !=. Don't do this for ordered comparisons due to overflow. */
5904 else if ((code == NE_EXPR || code == EQ_EXPR)
5905 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
5906 return fold (build (code, type,
5907 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
5909 /* If we are widening one operand of an integer comparison,
5910 see if the other operand is similarly being widened. Perhaps we
5911 can do the comparison in the narrower type. */
5912 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
5913 && TREE_CODE (arg0) == NOP_EXPR
5914 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
5915 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
5916 && (TREE_TYPE (t1) == TREE_TYPE (tem)
5917 || (TREE_CODE (t1) == INTEGER_CST
5918 && int_fits_type_p (t1, TREE_TYPE (tem)))))
5919 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
5921 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
5922 constant, we can simplify it. */
5923 else if (TREE_CODE (arg1) == INTEGER_CST
5924 && (TREE_CODE (arg0) == MIN_EXPR
5925 || TREE_CODE (arg0) == MAX_EXPR)
5926 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5927 return optimize_minmax_comparison (t);
5929 /* If we are comparing an ABS_EXPR with a constant, we can
5930 convert all the cases into explicit comparisons, but they may
5931 well not be faster than doing the ABS and one comparison.
5932 But ABS (X) <= C is a range comparison, which becomes a subtraction
5933 and a comparison, and is probably faster. */
5934 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5935 && TREE_CODE (arg0) == ABS_EXPR
5936 && ! TREE_SIDE_EFFECTS (arg0)
5937 && (0 != (tem = negate_expr (arg1)))
5938 && TREE_CODE (tem) == INTEGER_CST
5939 && ! TREE_CONSTANT_OVERFLOW (tem))
5940 return fold (build (TRUTH_ANDIF_EXPR, type,
5941 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
5942 build (LE_EXPR, type,
5943 TREE_OPERAND (arg0, 0), arg1)));
5945 /* If this is an EQ or NE comparison with zero and ARG0 is
5946 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5947 two operations, but the latter can be done in one less insn
5948 on machines that have only two-operand insns or on which a
5949 constant cannot be the first operand. */
5950 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5951 && TREE_CODE (arg0) == BIT_AND_EXPR)
5953 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5954 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5956 fold (build (code, type,
5957 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5959 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5960 TREE_OPERAND (arg0, 1),
5961 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5962 convert (TREE_TYPE (arg0),
5965 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5966 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5968 fold (build (code, type,
5969 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5971 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5972 TREE_OPERAND (arg0, 0),
5973 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5974 convert (TREE_TYPE (arg0),
5979 /* If this is an NE or EQ comparison of zero against the result of a
5980 signed MOD operation whose second operand is a power of 2, make
5981 the MOD operation unsigned since it is simpler and equivalent. */
5982 if ((code == NE_EXPR || code == EQ_EXPR)
5983 && integer_zerop (arg1)
5984 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5985 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5986 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5987 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5988 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5989 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5991 tree newtype = unsigned_type (TREE_TYPE (arg0));
5992 tree newmod = build (TREE_CODE (arg0), newtype,
5993 convert (newtype, TREE_OPERAND (arg0, 0)),
5994 convert (newtype, TREE_OPERAND (arg0, 1)));
5996 return build (code, type, newmod, convert (newtype, arg1));
5999 /* If this is an NE comparison of zero with an AND of one, remove the
6000 comparison since the AND will give the correct value. */
6001 if (code == NE_EXPR && integer_zerop (arg1)
6002 && TREE_CODE (arg0) == BIT_AND_EXPR
6003 && integer_onep (TREE_OPERAND (arg0, 1)))
6004 return convert (type, arg0);
6006 /* If we have (A & C) == C where C is a power of 2, convert this into
6007 (A & C) != 0. Similarly for NE_EXPR. */
6008 if ((code == EQ_EXPR || code == NE_EXPR)
6009 && TREE_CODE (arg0) == BIT_AND_EXPR
6010 && integer_pow2p (TREE_OPERAND (arg0, 1))
6011 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6012 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6013 arg0, integer_zero_node);
6015 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6016 and similarly for >= into !=. */
6017 if ((code == LT_EXPR || code == GE_EXPR)
6018 && TREE_UNSIGNED (TREE_TYPE (arg0))
6019 && TREE_CODE (arg1) == LSHIFT_EXPR
6020 && integer_onep (TREE_OPERAND (arg1, 0)))
6021 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6022 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6023 TREE_OPERAND (arg1, 1)),
6024 convert (TREE_TYPE (arg0), integer_zero_node));
6026 else if ((code == LT_EXPR || code == GE_EXPR)
6027 && TREE_UNSIGNED (TREE_TYPE (arg0))
6028 && (TREE_CODE (arg1) == NOP_EXPR
6029 || TREE_CODE (arg1) == CONVERT_EXPR)
6030 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6031 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6033 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6034 convert (TREE_TYPE (arg0),
6035 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6036 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6037 convert (TREE_TYPE (arg0), integer_zero_node));
6039 /* Simplify comparison of something with itself. (For IEEE
6040 floating-point, we can only do some of these simplifications.) */
6041 if (operand_equal_p (arg0, arg1, 0))
6048 if (! FLOAT_TYPE_P (TREE_TYPE (arg0)))
6049 return constant_boolean_node (1, type);
6051 TREE_SET_CODE (t, code);
6055 /* For NE, we can only do this simplification if integer. */
6056 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6058 /* ... fall through ... */
6061 return constant_boolean_node (0, type);
6067 /* An unsigned comparison against 0 can be simplified. */
6068 if (integer_zerop (arg1)
6069 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6070 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6071 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6073 switch (TREE_CODE (t))
6077 TREE_SET_CODE (t, NE_EXPR);
6081 TREE_SET_CODE (t, EQ_EXPR);
6084 return omit_one_operand (type,
6085 convert (type, integer_one_node),
6088 return omit_one_operand (type,
6089 convert (type, integer_zero_node),
6096 /* Comparisons with the highest or lowest possible integer of
6097 the specified size will have known values and an unsigned
6098 <= 0x7fffffff can be simplified. */
6100 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6102 if (TREE_CODE (arg1) == INTEGER_CST
6103 && ! TREE_CONSTANT_OVERFLOW (arg1)
6104 && width <= HOST_BITS_PER_WIDE_INT
6105 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6106 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6108 if (TREE_INT_CST_HIGH (arg1) == 0
6109 && (TREE_INT_CST_LOW (arg1)
6110 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6111 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6112 switch (TREE_CODE (t))
6115 return omit_one_operand (type,
6116 convert (type, integer_zero_node),
6119 TREE_SET_CODE (t, EQ_EXPR);
6123 return omit_one_operand (type,
6124 convert (type, integer_one_node),
6127 TREE_SET_CODE (t, NE_EXPR);
6134 else if (TREE_INT_CST_HIGH (arg1) == -1
6135 && (TREE_INT_CST_LOW (arg1)
6136 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6137 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6138 switch (TREE_CODE (t))
6141 return omit_one_operand (type,
6142 convert (type, integer_zero_node),
6145 TREE_SET_CODE (t, EQ_EXPR);
6149 return omit_one_operand (type,
6150 convert (type, integer_one_node),
6153 TREE_SET_CODE (t, NE_EXPR);
6160 else if (TREE_INT_CST_HIGH (arg1) == 0
6161 && (TREE_INT_CST_LOW (arg1)
6162 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6163 && TREE_UNSIGNED (TREE_TYPE (arg1))
6164 /* signed_type does not work on pointer types. */
6165 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
6166 switch (TREE_CODE (t))
6169 return fold (build (GE_EXPR, type,
6170 convert (signed_type (TREE_TYPE (arg0)),
6172 convert (signed_type (TREE_TYPE (arg1)),
6173 integer_zero_node)));
6175 return fold (build (LT_EXPR, type,
6176 convert (signed_type (TREE_TYPE (arg0)),
6178 convert (signed_type (TREE_TYPE (arg1)),
6179 integer_zero_node)));
6185 else if (TREE_INT_CST_HIGH (arg1) == 0
6186 && (TREE_INT_CST_LOW (arg1)
6187 == ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1)
6188 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6189 switch (TREE_CODE (t))
6192 return omit_one_operand (type,
6193 convert (type, integer_zero_node),
6196 TREE_SET_CODE (t, EQ_EXPR);
6200 return omit_one_operand (type,
6201 convert (type, integer_one_node),
6204 TREE_SET_CODE (t, NE_EXPR);
6213 /* If we are comparing an expression that just has comparisons
6214 of two integer values, arithmetic expressions of those comparisons,
6215 and constants, we can simplify it. There are only three cases
6216 to check: the two values can either be equal, the first can be
6217 greater, or the second can be greater. Fold the expression for
6218 those three values. Since each value must be 0 or 1, we have
6219 eight possibilities, each of which corresponds to the constant 0
6220 or 1 or one of the six possible comparisons.
6222 This handles common cases like (a > b) == 0 but also handles
6223 expressions like ((x > y) - (y > x)) > 0, which supposedly
6224 occur in macroized code. */
6226 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6228 tree cval1 = 0, cval2 = 0;
6231 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6232 /* Don't handle degenerate cases here; they should already
6233 have been handled anyway. */
6234 && cval1 != 0 && cval2 != 0
6235 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6236 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6237 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6238 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6239 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6240 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6241 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6243 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6244 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6246 /* We can't just pass T to eval_subst in case cval1 or cval2
6247 was the same as ARG1. */
6250 = fold (build (code, type,
6251 eval_subst (arg0, cval1, maxval, cval2, minval),
6254 = fold (build (code, type,
6255 eval_subst (arg0, cval1, maxval, cval2, maxval),
6258 = fold (build (code, type,
6259 eval_subst (arg0, cval1, minval, cval2, maxval),
6262 /* All three of these results should be 0 or 1. Confirm they
6263 are. Then use those values to select the proper code
6266 if ((integer_zerop (high_result)
6267 || integer_onep (high_result))
6268 && (integer_zerop (equal_result)
6269 || integer_onep (equal_result))
6270 && (integer_zerop (low_result)
6271 || integer_onep (low_result)))
6273 /* Make a 3-bit mask with the high-order bit being the
6274 value for `>', the next for '=', and the low for '<'. */
6275 switch ((integer_onep (high_result) * 4)
6276 + (integer_onep (equal_result) * 2)
6277 + integer_onep (low_result))
6281 return omit_one_operand (type, integer_zero_node, arg0);
6302 return omit_one_operand (type, integer_one_node, arg0);
6305 t = build (code, type, cval1, cval2);
6307 return save_expr (t);
6314 /* If this is a comparison of a field, we may be able to simplify it. */
6315 if ((TREE_CODE (arg0) == COMPONENT_REF
6316 || TREE_CODE (arg0) == BIT_FIELD_REF)
6317 && (code == EQ_EXPR || code == NE_EXPR)
6318 /* Handle the constant case even without -O
6319 to make sure the warnings are given. */
6320 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6322 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6326 /* If this is a comparison of complex values and either or both sides
6327 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6328 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6329 This may prevent needless evaluations. */
6330 if ((code == EQ_EXPR || code == NE_EXPR)
6331 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6332 && (TREE_CODE (arg0) == COMPLEX_EXPR
6333 || TREE_CODE (arg1) == COMPLEX_EXPR
6334 || TREE_CODE (arg0) == COMPLEX_CST
6335 || TREE_CODE (arg1) == COMPLEX_CST))
6337 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6338 tree real0, imag0, real1, imag1;
6340 arg0 = save_expr (arg0);
6341 arg1 = save_expr (arg1);
6342 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6343 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6344 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6345 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6347 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6350 fold (build (code, type, real0, real1)),
6351 fold (build (code, type, imag0, imag1))));
6354 /* Optimize comparisons of strlen vs zero to a compare of the
6355 first character of the string vs zero. To wit,
6356 strlen(ptr) == 0 => *ptr == 0
6357 strlen(ptr) != 0 => *ptr != 0
6358 Other cases should reduce to one of these two (or a constant)
6359 due to the return value of strlen being unsigned. */
6360 if ((code == EQ_EXPR || code == NE_EXPR)
6361 && integer_zerop (arg1)
6362 && TREE_CODE (arg0) == CALL_EXPR
6363 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR)
6365 tree fndecl = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6368 if (TREE_CODE (fndecl) == FUNCTION_DECL
6369 && DECL_BUILT_IN (fndecl)
6370 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
6371 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
6372 && (arglist = TREE_OPERAND (arg0, 1))
6373 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
6374 && ! TREE_CHAIN (arglist))
6375 return fold (build (code, type,
6376 build1 (INDIRECT_REF, char_type_node,
6377 TREE_VALUE(arglist)),
6378 integer_zero_node));
6381 /* From here on, the only cases we handle are when the result is
6382 known to be a constant.
6384 To compute GT, swap the arguments and do LT.
6385 To compute GE, do LT and invert the result.
6386 To compute LE, swap the arguments, do LT and invert the result.
6387 To compute NE, do EQ and invert the result.
6389 Therefore, the code below must handle only EQ and LT. */
6391 if (code == LE_EXPR || code == GT_EXPR)
6393 tem = arg0, arg0 = arg1, arg1 = tem;
6394 code = swap_tree_comparison (code);
6397 /* Note that it is safe to invert for real values here because we
6398 will check below in the one case that it matters. */
6402 if (code == NE_EXPR || code == GE_EXPR)
6405 code = invert_tree_comparison (code);
6408 /* Compute a result for LT or EQ if args permit;
6409 otherwise return T. */
6410 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6412 if (code == EQ_EXPR)
6413 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6415 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6416 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6417 : INT_CST_LT (arg0, arg1)),
6421 #if 0 /* This is no longer useful, but breaks some real code. */
6422 /* Assume a nonexplicit constant cannot equal an explicit one,
6423 since such code would be undefined anyway.
6424 Exception: on sysvr4, using #pragma weak,
6425 a label can come out as 0. */
6426 else if (TREE_CODE (arg1) == INTEGER_CST
6427 && !integer_zerop (arg1)
6428 && TREE_CONSTANT (arg0)
6429 && TREE_CODE (arg0) == ADDR_EXPR
6431 t1 = build_int_2 (0, 0);
6433 /* Two real constants can be compared explicitly. */
6434 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6436 /* If either operand is a NaN, the result is false with two
6437 exceptions: First, an NE_EXPR is true on NaNs, but that case
6438 is already handled correctly since we will be inverting the
6439 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6440 or a GE_EXPR into a LT_EXPR, we must return true so that it
6441 will be inverted into false. */
6443 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6444 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6445 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6447 else if (code == EQ_EXPR)
6448 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6449 TREE_REAL_CST (arg1)),
6452 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6453 TREE_REAL_CST (arg1)),
6457 if (t1 == NULL_TREE)
6461 TREE_INT_CST_LOW (t1) ^= 1;
6463 TREE_TYPE (t1) = type;
6464 if (TREE_CODE (type) == BOOLEAN_TYPE)
6465 return truthvalue_conversion (t1);
6469 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6470 so all simple results must be passed through pedantic_non_lvalue. */
6471 if (TREE_CODE (arg0) == INTEGER_CST)
6472 return pedantic_non_lvalue
6473 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6474 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6475 return pedantic_omit_one_operand (type, arg1, arg0);
6477 /* If the second operand is zero, invert the comparison and swap
6478 the second and third operands. Likewise if the second operand
6479 is constant and the third is not or if the third operand is
6480 equivalent to the first operand of the comparison. */
6482 if (integer_zerop (arg1)
6483 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6484 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6485 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6486 TREE_OPERAND (t, 2),
6487 TREE_OPERAND (arg0, 1))))
6489 /* See if this can be inverted. If it can't, possibly because
6490 it was a floating-point inequality comparison, don't do
6492 tem = invert_truthvalue (arg0);
6494 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6496 t = build (code, type, tem,
6497 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6499 /* arg1 should be the first argument of the new T. */
6500 arg1 = TREE_OPERAND (t, 1);
6505 /* If we have A op B ? A : C, we may be able to convert this to a
6506 simpler expression, depending on the operation and the values
6507 of B and C. IEEE floating point prevents this though,
6508 because A or B might be -0.0 or a NaN. */
6510 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6511 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6512 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6513 || flag_unsafe_math_optimizations)
6514 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6515 arg1, TREE_OPERAND (arg0, 1)))
6517 tree arg2 = TREE_OPERAND (t, 2);
6518 enum tree_code comp_code = TREE_CODE (arg0);
6522 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or -abs (A),
6523 depending on the comparison operation. */
6524 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6525 ? real_zerop (TREE_OPERAND (arg0, 1))
6526 : integer_zerop (TREE_OPERAND (arg0, 1)))
6527 && TREE_CODE (arg2) == NEGATE_EXPR
6528 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6536 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
6540 return pedantic_non_lvalue (convert (type, arg1));
6543 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6544 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6545 return pedantic_non_lvalue
6546 (convert (type, fold (build1 (ABS_EXPR,
6547 TREE_TYPE (arg1), arg1))));
6550 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6551 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6552 return pedantic_non_lvalue
6553 (negate_expr (convert (type,
6554 fold (build1 (ABS_EXPR,
6561 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6564 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6566 if (comp_code == NE_EXPR)
6567 return pedantic_non_lvalue (convert (type, arg1));
6568 else if (comp_code == EQ_EXPR)
6569 return pedantic_non_lvalue (convert (type, integer_zero_node));
6572 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6573 or max (A, B), depending on the operation. */
6575 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6576 arg2, TREE_OPERAND (arg0, 0)))
6578 tree comp_op0 = TREE_OPERAND (arg0, 0);
6579 tree comp_op1 = TREE_OPERAND (arg0, 1);
6580 tree comp_type = TREE_TYPE (comp_op0);
6582 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
6583 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
6589 return pedantic_non_lvalue (convert (type, arg2));
6591 return pedantic_non_lvalue (convert (type, arg1));
6594 /* In C++ a ?: expression can be an lvalue, so put the
6595 operand which will be used if they are equal first
6596 so that we can convert this back to the
6597 corresponding COND_EXPR. */
6598 return pedantic_non_lvalue
6599 (convert (type, fold (build (MIN_EXPR, comp_type,
6600 (comp_code == LE_EXPR
6601 ? comp_op0 : comp_op1),
6602 (comp_code == LE_EXPR
6603 ? comp_op1 : comp_op0)))));
6607 return pedantic_non_lvalue
6608 (convert (type, fold (build (MAX_EXPR, comp_type,
6609 (comp_code == GE_EXPR
6610 ? comp_op0 : comp_op1),
6611 (comp_code == GE_EXPR
6612 ? comp_op1 : comp_op0)))));
6619 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6620 we might still be able to simplify this. For example,
6621 if C1 is one less or one more than C2, this might have started
6622 out as a MIN or MAX and been transformed by this function.
6623 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6625 if (INTEGRAL_TYPE_P (type)
6626 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6627 && TREE_CODE (arg2) == INTEGER_CST)
6631 /* We can replace A with C1 in this case. */
6632 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6633 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6634 TREE_OPERAND (t, 2));
6638 /* If C1 is C2 + 1, this is min(A, C2). */
6639 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6640 && operand_equal_p (TREE_OPERAND (arg0, 1),
6641 const_binop (PLUS_EXPR, arg2,
6642 integer_one_node, 0), 1))
6643 return pedantic_non_lvalue
6644 (fold (build (MIN_EXPR, type, arg1, arg2)));
6648 /* If C1 is C2 - 1, this is min(A, C2). */
6649 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6650 && operand_equal_p (TREE_OPERAND (arg0, 1),
6651 const_binop (MINUS_EXPR, arg2,
6652 integer_one_node, 0), 1))
6653 return pedantic_non_lvalue
6654 (fold (build (MIN_EXPR, type, arg1, arg2)));
6658 /* If C1 is C2 - 1, this is max(A, C2). */
6659 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6660 && operand_equal_p (TREE_OPERAND (arg0, 1),
6661 const_binop (MINUS_EXPR, arg2,
6662 integer_one_node, 0), 1))
6663 return pedantic_non_lvalue
6664 (fold (build (MAX_EXPR, type, arg1, arg2)));
6668 /* If C1 is C2 + 1, this is max(A, C2). */
6669 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6670 && operand_equal_p (TREE_OPERAND (arg0, 1),
6671 const_binop (PLUS_EXPR, arg2,
6672 integer_one_node, 0), 1))
6673 return pedantic_non_lvalue
6674 (fold (build (MAX_EXPR, type, arg1, arg2)));
6683 /* If the second operand is simpler than the third, swap them
6684 since that produces better jump optimization results. */
6685 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6686 || TREE_CODE (arg1) == SAVE_EXPR)
6687 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6688 || DECL_P (TREE_OPERAND (t, 2))
6689 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6691 /* See if this can be inverted. If it can't, possibly because
6692 it was a floating-point inequality comparison, don't do
6694 tem = invert_truthvalue (arg0);
6696 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6698 t = build (code, type, tem,
6699 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6701 /* arg1 should be the first argument of the new T. */
6702 arg1 = TREE_OPERAND (t, 1);
6707 /* Convert A ? 1 : 0 to simply A. */
6708 if (integer_onep (TREE_OPERAND (t, 1))
6709 && integer_zerop (TREE_OPERAND (t, 2))
6710 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6711 call to fold will try to move the conversion inside
6712 a COND, which will recurse. In that case, the COND_EXPR
6713 is probably the best choice, so leave it alone. */
6714 && type == TREE_TYPE (arg0))
6715 return pedantic_non_lvalue (arg0);
6717 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
6718 operation is simply A & 2. */
6720 if (integer_zerop (TREE_OPERAND (t, 2))
6721 && TREE_CODE (arg0) == NE_EXPR
6722 && integer_zerop (TREE_OPERAND (arg0, 1))
6723 && integer_pow2p (arg1)
6724 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6725 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6727 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6732 /* When pedantic, a compound expression can be neither an lvalue
6733 nor an integer constant expression. */
6734 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6736 /* Don't let (0, 0) be null pointer constant. */
6737 if (integer_zerop (arg1))
6738 return build1 (NOP_EXPR, type, arg1);
6739 return convert (type, arg1);
6743 return build_complex (type, arg0, arg1);
6747 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6749 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6750 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6751 TREE_OPERAND (arg0, 1));
6752 else if (TREE_CODE (arg0) == COMPLEX_CST)
6753 return TREE_REALPART (arg0);
6754 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6755 return fold (build (TREE_CODE (arg0), type,
6756 fold (build1 (REALPART_EXPR, type,
6757 TREE_OPERAND (arg0, 0))),
6758 fold (build1 (REALPART_EXPR,
6759 type, TREE_OPERAND (arg0, 1)))));
6763 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6764 return convert (type, integer_zero_node);
6765 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6766 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6767 TREE_OPERAND (arg0, 0));
6768 else if (TREE_CODE (arg0) == COMPLEX_CST)
6769 return TREE_IMAGPART (arg0);
6770 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6771 return fold (build (TREE_CODE (arg0), type,
6772 fold (build1 (IMAGPART_EXPR, type,
6773 TREE_OPERAND (arg0, 0))),
6774 fold (build1 (IMAGPART_EXPR, type,
6775 TREE_OPERAND (arg0, 1)))));
6778 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6780 case CLEANUP_POINT_EXPR:
6781 if (! has_cleanups (arg0))
6782 return TREE_OPERAND (t, 0);
6785 enum tree_code code0 = TREE_CODE (arg0);
6786 int kind0 = TREE_CODE_CLASS (code0);
6787 tree arg00 = TREE_OPERAND (arg0, 0);
6790 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6791 return fold (build1 (code0, type,
6792 fold (build1 (CLEANUP_POINT_EXPR,
6793 TREE_TYPE (arg00), arg00))));
6795 if (kind0 == '<' || kind0 == '2'
6796 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6797 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
6798 || code0 == TRUTH_XOR_EXPR)
6800 arg01 = TREE_OPERAND (arg0, 1);
6802 if (TREE_CONSTANT (arg00)
6803 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6804 && ! has_cleanups (arg00)))
6805 return fold (build (code0, type, arg00,
6806 fold (build1 (CLEANUP_POINT_EXPR,
6807 TREE_TYPE (arg01), arg01))));
6809 if (TREE_CONSTANT (arg01))
6810 return fold (build (code0, type,
6811 fold (build1 (CLEANUP_POINT_EXPR,
6812 TREE_TYPE (arg00), arg00)),
6820 /* Check for a built-in function. */
6821 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
6822 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
6824 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
6826 tree tmp = fold_builtin (expr);
6834 } /* switch (code) */
6837 /* Determine if first argument is a multiple of second argument. Return 0 if
6838 it is not, or we cannot easily determined it to be.
6840 An example of the sort of thing we care about (at this point; this routine
6841 could surely be made more general, and expanded to do what the *_DIV_EXPR's
6842 fold cases do now) is discovering that
6844 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6850 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
6852 This code also handles discovering that
6854 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6856 is a multiple of 8 so we don't have to worry about dealing with a
6859 Note that we *look* inside a SAVE_EXPR only to determine how it was
6860 calculated; it is not safe for fold to do much of anything else with the
6861 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
6862 at run time. For example, the latter example above *cannot* be implemented
6863 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
6864 evaluation time of the original SAVE_EXPR is not necessarily the same at
6865 the time the new expression is evaluated. The only optimization of this
6866 sort that would be valid is changing
6868 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6872 SAVE_EXPR (I) * SAVE_EXPR (J)
6874 (where the same SAVE_EXPR (J) is used in the original and the
6875 transformed version). */
6878 multiple_of_p (type, top, bottom)
6883 if (operand_equal_p (top, bottom, 0))
6886 if (TREE_CODE (type) != INTEGER_TYPE)
6889 switch (TREE_CODE (top))
6892 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6893 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6897 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6898 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6901 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
6905 op1 = TREE_OPERAND (top, 1);
6906 /* const_binop may not detect overflow correctly,
6907 so check for it explicitly here. */
6908 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
6909 > TREE_INT_CST_LOW (op1)
6910 && TREE_INT_CST_HIGH (op1) == 0
6911 && 0 != (t1 = convert (type,
6912 const_binop (LSHIFT_EXPR, size_one_node,
6914 && ! TREE_OVERFLOW (t1))
6915 return multiple_of_p (type, t1, bottom);
6920 /* Can't handle conversions from non-integral or wider integral type. */
6921 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6922 || (TYPE_PRECISION (type)
6923 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6926 /* .. fall through ... */
6929 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
6932 if (TREE_CODE (bottom) != INTEGER_CST
6933 || (TREE_UNSIGNED (type)
6934 && (tree_int_cst_sgn (top) < 0
6935 || tree_int_cst_sgn (bottom) < 0)))
6937 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
6945 /* Return true if `t' is known to be non-negative. */
6948 tree_expr_nonnegative_p (t)
6951 switch (TREE_CODE (t))
6957 return tree_int_cst_sgn (t) >= 0;
6958 case TRUNC_DIV_EXPR:
6960 case FLOOR_DIV_EXPR:
6961 case ROUND_DIV_EXPR:
6962 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
6963 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
6964 case TRUNC_MOD_EXPR:
6966 case FLOOR_MOD_EXPR:
6967 case ROUND_MOD_EXPR:
6968 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
6970 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
6971 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
6973 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
6975 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
6976 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
6978 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
6979 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
6981 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
6983 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
6985 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
6986 case NON_LVALUE_EXPR:
6987 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
6989 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
6992 if (truth_value_p (TREE_CODE (t)))
6993 /* Truth values evaluate to 0 or 1, which is nonnegative. */
6996 /* We don't know sign of `t', so be conservative and return false. */
7001 /* Return true if `r' is known to be non-negative.
7002 Only handles constants at the moment. */
7005 rtl_expr_nonnegative_p (r)
7008 switch (GET_CODE (r))
7011 return INTVAL (r) >= 0;
7014 if (GET_MODE (r) == VOIDmode)
7015 return CONST_DOUBLE_HIGH (r) >= 0;
7023 units = CONST_VECTOR_NUNITS (r);
7025 for (i = 0; i < units; ++i)
7027 elt = CONST_VECTOR_ELT (r, i);
7028 if (!rtl_expr_nonnegative_p (elt))
7037 /* These are always nonnegative. */