1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
29 /* The entry points in this file are fold, size_int and size_binop.
31 fold takes a tree as argument and returns a simplified tree.
33 size_binop takes a tree code for an arithmetic operation
34 and two operands that are trees, and produces a tree for the
35 result, assuming the type comes from `sizetype'.
37 size_int takes an integer value, and creates a tree constant
38 with type from `sizetype'. */
46 /* Handle floating overflow for `const_binop'. */
47 static jmp_buf float_error;
49 static void encode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT, HOST_WIDE_INT));
50 static void decode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT *, HOST_WIDE_INT *));
51 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
52 HOST_WIDE_INT, HOST_WIDE_INT,
53 HOST_WIDE_INT, HOST_WIDE_INT *,
54 HOST_WIDE_INT *, HOST_WIDE_INT *,
56 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
57 static tree const_binop PROTO((enum tree_code, tree, tree, int));
58 static tree fold_convert PROTO((tree, tree));
59 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
60 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
61 static int truth_value_p PROTO((enum tree_code));
62 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
63 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
64 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
65 static tree omit_one_operand PROTO((tree, tree, tree));
66 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
67 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
68 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
69 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
71 static tree decode_field_reference PROTO((tree, int *, int *,
72 enum machine_mode *, int *,
74 static int all_ones_mask_p PROTO((tree, int));
75 static int simple_operand_p PROTO((tree));
76 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
77 enum tree_code, tree, tree, tree));
78 static tree unextend PROTO((tree, int, int));
79 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
80 static tree strip_compound_expr PROTO((tree, tree));
86 /* Yield nonzero if a signed left shift of A by B bits overflows. */
87 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
89 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
90 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
91 Then this yields nonzero if overflow occurred during the addition.
92 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
93 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
94 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
96 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
97 We do that by representing the two-word integer in 4 words, with only
98 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
101 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
102 #define HIGHPART(x) \
103 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
104 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
106 /* Unpack a two-word integer into 4 words.
107 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
108 WORDS points to the array of HOST_WIDE_INTs. */
111 encode (words, low, hi)
112 HOST_WIDE_INT *words;
113 HOST_WIDE_INT low, hi;
115 words[0] = LOWPART (low);
116 words[1] = HIGHPART (low);
117 words[2] = LOWPART (hi);
118 words[3] = HIGHPART (hi);
121 /* Pack an array of 4 words into a two-word integer.
122 WORDS points to the array of words.
123 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
126 decode (words, low, hi)
127 HOST_WIDE_INT *words;
128 HOST_WIDE_INT *low, *hi;
130 *low = words[0] | words[1] * BASE;
131 *hi = words[2] | words[3] * BASE;
134 /* Make the integer constant T valid for its type
135 by setting to 0 or 1 all the bits in the constant
136 that don't belong in the type.
137 Yield 1 if a signed overflow occurs, 0 otherwise.
138 If OVERFLOW is nonzero, a signed overflow has already occurred
139 in calculating T, so propagate it.
141 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
145 force_fit_type (t, overflow)
149 HOST_WIDE_INT low, high;
152 if (TREE_CODE (t) == REAL_CST)
154 #ifdef CHECK_FLOAT_VALUE
155 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
161 else if (TREE_CODE (t) != INTEGER_CST)
164 low = TREE_INT_CST_LOW (t);
165 high = TREE_INT_CST_HIGH (t);
167 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
170 prec = TYPE_PRECISION (TREE_TYPE (t));
172 /* First clear all bits that are beyond the type's precision. */
174 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
176 else if (prec > HOST_BITS_PER_WIDE_INT)
178 TREE_INT_CST_HIGH (t)
179 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
183 TREE_INT_CST_HIGH (t) = 0;
184 if (prec < HOST_BITS_PER_WIDE_INT)
185 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
188 /* Unsigned types do not suffer sign extension or overflow. */
189 if (TREE_UNSIGNED (TREE_TYPE (t)))
192 /* If the value's sign bit is set, extend the sign. */
193 if (prec != 2 * HOST_BITS_PER_WIDE_INT
194 && (prec > HOST_BITS_PER_WIDE_INT
195 ? (TREE_INT_CST_HIGH (t)
196 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
197 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
199 /* Value is negative:
200 set to 1 all the bits that are outside this type's precision. */
201 if (prec > HOST_BITS_PER_WIDE_INT)
203 TREE_INT_CST_HIGH (t)
204 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
208 TREE_INT_CST_HIGH (t) = -1;
209 if (prec < HOST_BITS_PER_WIDE_INT)
210 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
214 /* Yield nonzero if signed overflow occurred. */
216 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
220 /* Add two doubleword integers with doubleword result.
221 Each argument is given as two `HOST_WIDE_INT' pieces.
222 One argument is L1 and H1; the other, L2 and H2.
223 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
226 add_double (l1, h1, l2, h2, lv, hv)
227 HOST_WIDE_INT l1, h1, l2, h2;
228 HOST_WIDE_INT *lv, *hv;
233 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
237 return overflow_sum_sign (h1, h2, h);
240 /* Negate a doubleword integer with doubleword result.
241 Return nonzero if the operation overflows, assuming it's signed.
242 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
243 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
246 neg_double (l1, h1, lv, hv)
247 HOST_WIDE_INT l1, h1;
248 HOST_WIDE_INT *lv, *hv;
254 return (*hv & h1) < 0;
264 /* Multiply two doubleword integers with doubleword result.
265 Return nonzero if the operation overflows, assuming it's signed.
266 Each argument is given as two `HOST_WIDE_INT' pieces.
267 One argument is L1 and H1; the other, L2 and H2.
268 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
271 mul_double (l1, h1, l2, h2, lv, hv)
272 HOST_WIDE_INT l1, h1, l2, h2;
273 HOST_WIDE_INT *lv, *hv;
275 HOST_WIDE_INT arg1[4];
276 HOST_WIDE_INT arg2[4];
277 HOST_WIDE_INT prod[4 * 2];
278 register unsigned HOST_WIDE_INT carry;
279 register int i, j, k;
280 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
282 encode (arg1, l1, h1);
283 encode (arg2, l2, h2);
285 bzero ((char *) prod, sizeof prod);
287 for (i = 0; i < 4; i++)
290 for (j = 0; j < 4; j++)
293 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
294 carry += arg1[i] * arg2[j];
295 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
297 prod[k] = LOWPART (carry);
298 carry = HIGHPART (carry);
303 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
305 /* Check for overflow by calculating the top half of the answer in full;
306 it should agree with the low half's sign bit. */
307 decode (prod+4, &toplow, &tophigh);
310 neg_double (l2, h2, &neglow, &neghigh);
311 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
315 neg_double (l1, h1, &neglow, &neghigh);
316 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
318 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
321 /* Shift the doubleword integer in L1, H1 left by COUNT places
322 keeping only PREC bits of result.
323 Shift right if COUNT is negative.
324 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
325 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
328 lshift_double (l1, h1, count, prec, lv, hv, arith)
329 HOST_WIDE_INT l1, h1, count;
331 HOST_WIDE_INT *lv, *hv;
336 rshift_double (l1, h1, - count, prec, lv, hv, arith);
341 count = (unsigned HOST_WIDE_INT) count & prec;
343 if (count >= HOST_BITS_PER_WIDE_INT)
345 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
350 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
351 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
352 *lv = (unsigned HOST_WIDE_INT) l1 << count;
356 /* Shift the doubleword integer in L1, H1 right by COUNT places
357 keeping only PREC bits of result. COUNT must be positive.
358 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
359 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
362 rshift_double (l1, h1, count, prec, lv, hv, arith)
363 HOST_WIDE_INT l1, h1, count;
365 HOST_WIDE_INT *lv, *hv;
368 unsigned HOST_WIDE_INT signmask;
370 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
374 count = (unsigned HOST_WIDE_INT) count % prec;
376 if (count >= HOST_BITS_PER_WIDE_INT)
379 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
380 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
384 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
385 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
386 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
387 | ((unsigned HOST_WIDE_INT) h1 >> count));
391 /* Rotate the doubleword integer in L1, H1 left by COUNT places
392 keeping only PREC bits of result.
393 Rotate right if COUNT is negative.
394 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
397 lrotate_double (l1, h1, count, prec, lv, hv)
398 HOST_WIDE_INT l1, h1, count;
400 HOST_WIDE_INT *lv, *hv;
402 HOST_WIDE_INT s1l, s1h, s2l, s2h;
408 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
409 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
414 /* Rotate the doubleword integer in L1, H1 left by COUNT places
415 keeping only PREC bits of result. COUNT must be positive.
416 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
419 rrotate_double (l1, h1, count, prec, lv, hv)
420 HOST_WIDE_INT l1, h1, count;
422 HOST_WIDE_INT *lv, *hv;
424 HOST_WIDE_INT s1l, s1h, s2l, s2h;
430 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
431 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
436 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
437 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
438 CODE is a tree code for a kind of division, one of
439 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
441 It controls how the quotient is rounded to a integer.
442 Return nonzero if the operation overflows.
443 UNS nonzero says do unsigned division. */
446 div_and_round_double (code, uns,
447 lnum_orig, hnum_orig, lden_orig, hden_orig,
448 lquo, hquo, lrem, hrem)
451 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
452 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
453 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
456 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
457 HOST_WIDE_INT den[4], quo[4];
459 unsigned HOST_WIDE_INT work;
460 register int carry = 0;
461 HOST_WIDE_INT lnum = lnum_orig;
462 HOST_WIDE_INT hnum = hnum_orig;
463 HOST_WIDE_INT lden = lden_orig;
464 HOST_WIDE_INT hden = hden_orig;
467 if ((hden == 0) && (lden == 0))
470 /* calculate quotient sign and convert operands to unsigned. */
476 /* (minimum integer) / (-1) is the only overflow case. */
477 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
483 neg_double (lden, hden, &lden, &hden);
487 if (hnum == 0 && hden == 0)
488 { /* single precision */
490 /* This unsigned division rounds toward zero. */
491 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
496 { /* trivial case: dividend < divisor */
497 /* hden != 0 already checked. */
504 bzero ((char *) quo, sizeof quo);
506 bzero ((char *) num, sizeof num); /* to zero 9th element */
507 bzero ((char *) den, sizeof den);
509 encode (num, lnum, hnum);
510 encode (den, lden, hden);
512 /* Special code for when the divisor < BASE. */
513 if (hden == 0 && lden < BASE)
515 /* hnum != 0 already checked. */
516 for (i = 4 - 1; i >= 0; i--)
518 work = num[i] + carry * BASE;
519 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
520 carry = work % (unsigned HOST_WIDE_INT) lden;
525 /* Full double precision division,
526 with thanks to Don Knuth's "Seminumerical Algorithms". */
527 int quo_est, scale, num_hi_sig, den_hi_sig;
529 /* Find the highest non-zero divisor digit. */
530 for (i = 4 - 1; ; i--)
536 /* Insure that the first digit of the divisor is at least BASE/2.
537 This is required by the quotient digit estimation algorithm. */
539 scale = BASE / (den[den_hi_sig] + 1);
540 if (scale > 1) { /* scale divisor and dividend */
542 for (i = 0; i <= 4 - 1; i++) {
543 work = (num[i] * scale) + carry;
544 num[i] = LOWPART (work);
545 carry = HIGHPART (work);
548 for (i = 0; i <= 4 - 1; i++) {
549 work = (den[i] * scale) + carry;
550 den[i] = LOWPART (work);
551 carry = HIGHPART (work);
552 if (den[i] != 0) den_hi_sig = i;
559 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
560 /* guess the next quotient digit, quo_est, by dividing the first
561 two remaining dividend digits by the high order quotient digit.
562 quo_est is never low and is at most 2 high. */
563 unsigned HOST_WIDE_INT tmp;
565 num_hi_sig = i + den_hi_sig + 1;
566 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
567 if (num[num_hi_sig] != den[den_hi_sig])
568 quo_est = work / den[den_hi_sig];
572 /* refine quo_est so it's usually correct, and at most one high. */
573 tmp = work - quo_est * den[den_hi_sig];
575 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
578 /* Try QUO_EST as the quotient digit, by multiplying the
579 divisor by QUO_EST and subtracting from the remaining dividend.
580 Keep in mind that QUO_EST is the I - 1st digit. */
583 for (j = 0; j <= den_hi_sig; j++)
585 work = quo_est * den[j] + carry;
586 carry = HIGHPART (work);
587 work = num[i + j] - LOWPART (work);
588 num[i + j] = LOWPART (work);
589 carry += HIGHPART (work) != 0;
592 /* if quo_est was high by one, then num[i] went negative and
593 we need to correct things. */
595 if (num[num_hi_sig] < carry)
598 carry = 0; /* add divisor back in */
599 for (j = 0; j <= den_hi_sig; j++)
601 work = num[i + j] + den[j] + carry;
602 carry = HIGHPART (work);
603 num[i + j] = LOWPART (work);
605 num [num_hi_sig] += carry;
608 /* store the quotient digit. */
613 decode (quo, lquo, hquo);
616 /* if result is negative, make it so. */
618 neg_double (*lquo, *hquo, lquo, hquo);
620 /* compute trial remainder: rem = num - (quo * den) */
621 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
622 neg_double (*lrem, *hrem, lrem, hrem);
623 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
628 case TRUNC_MOD_EXPR: /* round toward zero */
629 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
633 case FLOOR_MOD_EXPR: /* round toward negative infinity */
634 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
637 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
640 else return overflow;
644 case CEIL_MOD_EXPR: /* round toward positive infinity */
645 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
647 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
650 else return overflow;
654 case ROUND_MOD_EXPR: /* round to closest integer */
656 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
657 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
659 /* get absolute values */
660 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
661 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
663 /* if (2 * abs (lrem) >= abs (lden)) */
664 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
665 labs_rem, habs_rem, <wice, &htwice);
666 if (((unsigned HOST_WIDE_INT) habs_den
667 < (unsigned HOST_WIDE_INT) htwice)
668 || (((unsigned HOST_WIDE_INT) habs_den
669 == (unsigned HOST_WIDE_INT) htwice)
670 && ((HOST_WIDE_INT unsigned) labs_den
671 < (unsigned HOST_WIDE_INT) ltwice)))
675 add_double (*lquo, *hquo,
676 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
679 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
682 else return overflow;
690 /* compute true remainder: rem = num - (quo * den) */
691 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
692 neg_double (*lrem, *hrem, lrem, hrem);
693 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
697 #ifndef REAL_ARITHMETIC
698 /* Effectively truncate a real value to represent the nearest possible value
699 in a narrower mode. The result is actually represented in the same data
700 type as the argument, but its value is usually different.
702 A trap may occur during the FP operations and it is the responsibility
703 of the calling function to have a handler established. */
706 real_value_truncate (mode, arg)
707 enum machine_mode mode;
710 return REAL_VALUE_TRUNCATE (mode, arg);
713 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
715 /* Check for infinity in an IEEE double precision number. */
721 /* The IEEE 64-bit double format. */
726 unsigned exponent : 11;
727 unsigned mantissa1 : 20;
732 unsigned mantissa1 : 20;
733 unsigned exponent : 11;
739 if (u.big_endian.sign == 1)
742 return (u.big_endian.exponent == 2047
743 && u.big_endian.mantissa1 == 0
744 && u.big_endian.mantissa2 == 0);
749 return (u.little_endian.exponent == 2047
750 && u.little_endian.mantissa1 == 0
751 && u.little_endian.mantissa2 == 0);
755 /* Check whether an IEEE double precision number is a NaN. */
761 /* The IEEE 64-bit double format. */
766 unsigned exponent : 11;
767 unsigned mantissa1 : 20;
772 unsigned mantissa1 : 20;
773 unsigned exponent : 11;
779 if (u.big_endian.sign == 1)
782 return (u.big_endian.exponent == 2047
783 && (u.big_endian.mantissa1 != 0
784 || u.big_endian.mantissa2 != 0));
789 return (u.little_endian.exponent == 2047
790 && (u.little_endian.mantissa1 != 0
791 || u.little_endian.mantissa2 != 0));
795 /* Check for a negative IEEE double precision number. */
801 /* The IEEE 64-bit double format. */
806 unsigned exponent : 11;
807 unsigned mantissa1 : 20;
812 unsigned mantissa1 : 20;
813 unsigned exponent : 11;
819 if (u.big_endian.sign == 1)
822 return u.big_endian.sign;
827 return u.little_endian.sign;
830 #else /* Target not IEEE */
832 /* Let's assume other float formats don't have infinity.
833 (This can be overridden by redefining REAL_VALUE_ISINF.) */
841 /* Let's assume other float formats don't have NaNs.
842 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
850 /* Let's assume other float formats don't have minus zero.
851 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
858 #endif /* Target not IEEE */
859 #endif /* no REAL_ARITHMETIC */
861 /* Split a tree IN into a constant and a variable part
862 that could be combined with CODE to make IN.
863 CODE must be a commutative arithmetic operation.
864 Store the constant part into *CONP and the variable in &VARP.
865 Return 1 if this was done; zero means the tree IN did not decompose
868 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
869 Therefore, we must tell the caller whether the variable part
870 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
871 The value stored is the coefficient for the variable term.
872 The constant term we return should always be added;
873 we negate it if necessary. */
876 split_tree (in, code, varp, conp, varsignp)
882 register tree outtype = TREE_TYPE (in);
886 /* Strip any conversions that don't change the machine mode. */
887 while ((TREE_CODE (in) == NOP_EXPR
888 || TREE_CODE (in) == CONVERT_EXPR)
889 && (TYPE_MODE (TREE_TYPE (in))
890 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
891 in = TREE_OPERAND (in, 0);
893 if (TREE_CODE (in) == code
894 || (! FLOAT_TYPE_P (TREE_TYPE (in))
895 /* We can associate addition and subtraction together
896 (even though the C standard doesn't say so)
897 for integers because the value is not affected.
898 For reals, the value might be affected, so we can't. */
899 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
900 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
902 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
903 if (code == INTEGER_CST)
905 *conp = TREE_OPERAND (in, 0);
906 *varp = TREE_OPERAND (in, 1);
907 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
908 && TREE_TYPE (*varp) != outtype)
909 *varp = convert (outtype, *varp);
910 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
913 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
915 *conp = TREE_OPERAND (in, 1);
916 *varp = TREE_OPERAND (in, 0);
918 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
919 && TREE_TYPE (*varp) != outtype)
920 *varp = convert (outtype, *varp);
921 if (TREE_CODE (in) == MINUS_EXPR)
923 /* If operation is subtraction and constant is second,
924 must negate it to get an additive constant.
925 And this cannot be done unless it is a manifest constant.
926 It could also be the address of a static variable.
927 We cannot negate that, so give up. */
928 if (TREE_CODE (*conp) == INTEGER_CST)
929 /* Subtracting from integer_zero_node loses for long long. */
930 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
936 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
938 *conp = TREE_OPERAND (in, 0);
939 *varp = TREE_OPERAND (in, 1);
940 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
941 && TREE_TYPE (*varp) != outtype)
942 *varp = convert (outtype, *varp);
943 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
950 /* Combine two constants NUM and ARG2 under operation CODE
951 to produce a new constant.
952 We assume ARG1 and ARG2 have the same data type,
953 or at least are the same kind of constant and the same machine mode.
955 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
958 const_binop (code, arg1, arg2, notrunc)
960 register tree arg1, arg2;
963 if (TREE_CODE (arg1) == INTEGER_CST)
965 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
966 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
967 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
968 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
969 HOST_WIDE_INT low, hi;
970 HOST_WIDE_INT garbagel, garbageh;
972 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
978 t = build_int_2 (int1l | int2l, int1h | int2h);
982 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
986 t = build_int_2 (int1l & int2l, int1h & int2h);
990 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
996 /* It's unclear from the C standard whether shifts can overflow.
997 The following code ignores overflow; perhaps a C standard
998 interpretation ruling is needed. */
999 lshift_double (int1l, int1h, int2l,
1000 TYPE_PRECISION (TREE_TYPE (arg1)),
1003 t = build_int_2 (low, hi);
1004 TREE_TYPE (t) = TREE_TYPE (arg1);
1006 force_fit_type (t, 0);
1007 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1008 TREE_CONSTANT_OVERFLOW (t)
1009 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1015 lrotate_double (int1l, int1h, int2l,
1016 TYPE_PRECISION (TREE_TYPE (arg1)),
1018 t = build_int_2 (low, hi);
1025 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1028 overflow = int2h < hi;
1030 t = build_int_2 (int2l, int2h);
1036 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1039 overflow = int1h < hi;
1041 t = build_int_2 (int1l, int1h);
1044 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1045 t = build_int_2 (low, hi);
1049 if (int2h == 0 && int2l == 0)
1051 t = build_int_2 (int1l, int1h);
1054 neg_double (int2l, int2h, &low, &hi);
1055 add_double (int1l, int1h, low, hi, &low, &hi);
1056 overflow = overflow_sum_sign (hi, int2h, int1h);
1057 t = build_int_2 (low, hi);
1061 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1062 t = build_int_2 (low, hi);
1065 case TRUNC_DIV_EXPR:
1066 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1067 case EXACT_DIV_EXPR:
1068 /* This is a shortcut for a common special case.
1069 It reduces the number of tree nodes generated
1071 if (int2h == 0 && int2l > 0
1072 && TREE_TYPE (arg1) == sizetype
1073 && int1h == 0 && int1l >= 0)
1075 if (code == CEIL_DIV_EXPR)
1077 return size_int (int1l / int2l);
1079 case ROUND_DIV_EXPR:
1080 if (int2h == 0 && int2l == 1)
1082 t = build_int_2 (int1l, int1h);
1085 if (int1l == int2l && int1h == int2h)
1087 if ((int1l | int1h) == 0)
1089 t = build_int_2 (1, 0);
1092 overflow = div_and_round_double (code, uns,
1093 int1l, int1h, int2l, int2h,
1094 &low, &hi, &garbagel, &garbageh);
1095 t = build_int_2 (low, hi);
1098 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1099 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1100 overflow = div_and_round_double (code, uns,
1101 int1l, int1h, int2l, int2h,
1102 &garbagel, &garbageh, &low, &hi);
1103 t = build_int_2 (low, hi);
1110 low = (((unsigned HOST_WIDE_INT) int1h
1111 < (unsigned HOST_WIDE_INT) int2h)
1112 || (((unsigned HOST_WIDE_INT) int1h
1113 == (unsigned HOST_WIDE_INT) int2h)
1114 && ((unsigned HOST_WIDE_INT) int1l
1115 < (unsigned HOST_WIDE_INT) int2l)));
1119 low = ((int1h < int2h)
1120 || ((int1h == int2h)
1121 && ((unsigned HOST_WIDE_INT) int1l
1122 < (unsigned HOST_WIDE_INT) int2l)));
1124 if (low == (code == MIN_EXPR))
1125 t = build_int_2 (int1l, int1h);
1127 t = build_int_2 (int2l, int2h);
1134 TREE_TYPE (t) = TREE_TYPE (arg1);
1136 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow && !uns))
1137 | TREE_OVERFLOW (arg1)
1138 | TREE_OVERFLOW (arg2));
1139 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1140 | TREE_CONSTANT_OVERFLOW (arg1)
1141 | TREE_CONSTANT_OVERFLOW (arg2));
1144 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1145 if (TREE_CODE (arg1) == REAL_CST)
1150 REAL_VALUE_TYPE value;
1153 d1 = TREE_REAL_CST (arg1);
1154 d2 = TREE_REAL_CST (arg2);
1156 /* If either operand is a NaN, just return it. Otherwise, set up
1157 for floating-point trap; we return an overflow. */
1158 if (REAL_VALUE_ISNAN (d1))
1160 else if (REAL_VALUE_ISNAN (d2))
1162 else if (setjmp (float_error))
1164 t = copy_node (arg1);
1169 set_float_handler (float_error);
1171 #ifdef REAL_ARITHMETIC
1172 REAL_ARITHMETIC (value, code, d1, d2);
1189 #ifndef REAL_INFINITY
1198 value = MIN (d1, d2);
1202 value = MAX (d1, d2);
1208 #endif /* no REAL_ARITHMETIC */
1209 t = build_real (TREE_TYPE (arg1),
1210 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1212 set_float_handler (NULL_PTR);
1215 = (force_fit_type (t, overflow)
1216 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1217 TREE_CONSTANT_OVERFLOW (t)
1219 | TREE_CONSTANT_OVERFLOW (arg1)
1220 | TREE_CONSTANT_OVERFLOW (arg2);
1223 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1224 if (TREE_CODE (arg1) == COMPLEX_CST)
1226 register tree r1 = TREE_REALPART (arg1);
1227 register tree i1 = TREE_IMAGPART (arg1);
1228 register tree r2 = TREE_REALPART (arg2);
1229 register tree i2 = TREE_IMAGPART (arg2);
1235 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1236 const_binop (PLUS_EXPR, i1, i2, notrunc));
1240 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1241 const_binop (MINUS_EXPR, i1, i2, notrunc));
1245 t = build_complex (const_binop (MINUS_EXPR,
1246 const_binop (MULT_EXPR,
1248 const_binop (MULT_EXPR,
1251 const_binop (PLUS_EXPR,
1252 const_binop (MULT_EXPR,
1254 const_binop (MULT_EXPR,
1261 register tree magsquared
1262 = const_binop (PLUS_EXPR,
1263 const_binop (MULT_EXPR, r2, r2, notrunc),
1264 const_binop (MULT_EXPR, i2, i2, notrunc),
1268 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1269 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1270 const_binop (PLUS_EXPR,
1271 const_binop (MULT_EXPR, r1, r2,
1273 const_binop (MULT_EXPR, i1, i2,
1276 magsquared, notrunc),
1277 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1278 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1279 const_binop (MINUS_EXPR,
1280 const_binop (MULT_EXPR, i1, r2,
1282 const_binop (MULT_EXPR, r1, i2,
1285 magsquared, notrunc));
1292 TREE_TYPE (t) = TREE_TYPE (arg1);
1298 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1302 unsigned HOST_WIDE_INT number;
1305 /* Type-size nodes already made for small sizes. */
1306 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1308 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1309 && size_table[number] != 0)
1310 return size_table[number];
1311 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1313 push_obstacks_nochange ();
1314 /* Make this a permanent node. */
1315 end_temporary_allocation ();
1316 t = build_int_2 (number, 0);
1317 TREE_TYPE (t) = sizetype;
1318 size_table[number] = t;
1323 t = build_int_2 (number, 0);
1324 TREE_TYPE (t) = sizetype;
1329 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1330 CODE is a tree code. Data type is taken from `sizetype',
1331 If the operands are constant, so is the result. */
1334 size_binop (code, arg0, arg1)
1335 enum tree_code code;
1338 /* Handle the special case of two integer constants faster. */
1339 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1341 /* And some specific cases even faster than that. */
1342 if (code == PLUS_EXPR
1343 && TREE_INT_CST_LOW (arg0) == 0
1344 && TREE_INT_CST_HIGH (arg0) == 0)
1346 if (code == MINUS_EXPR
1347 && TREE_INT_CST_LOW (arg1) == 0
1348 && TREE_INT_CST_HIGH (arg1) == 0)
1350 if (code == MULT_EXPR
1351 && TREE_INT_CST_LOW (arg0) == 1
1352 && TREE_INT_CST_HIGH (arg0) == 0)
1354 /* Handle general case of two integer constants. */
1355 return const_binop (code, arg0, arg1, 1);
1358 if (arg0 == error_mark_node || arg1 == error_mark_node)
1359 return error_mark_node;
1361 return fold (build (code, sizetype, arg0, arg1));
1364 /* Given T, a tree representing type conversion of ARG1, a constant,
1365 return a constant tree representing the result of conversion. */
1368 fold_convert (t, arg1)
1372 register tree type = TREE_TYPE (t);
1375 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1377 if (TREE_CODE (arg1) == INTEGER_CST)
1379 /* If we would build a constant wider than GCC supports,
1380 leave the conversion unfolded. */
1381 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1384 /* Given an integer constant, make new constant with new type,
1385 appropriately sign-extended or truncated. */
1386 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1387 TREE_INT_CST_HIGH (arg1));
1388 TREE_TYPE (t) = type;
1389 /* Indicate an overflow if (1) ARG1 already overflowed,
1390 or (2) force_fit_type indicates an overflow.
1391 Tell force_fit_type that an overflow has already occurred
1392 if ARG1 is a too-large unsigned value and T is signed. */
1394 = (TREE_OVERFLOW (arg1)
1395 | force_fit_type (t,
1396 (TREE_INT_CST_HIGH (arg1) < 0
1397 & (TREE_UNSIGNED (type)
1398 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1399 TREE_CONSTANT_OVERFLOW (t)
1400 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1402 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1403 else if (TREE_CODE (arg1) == REAL_CST)
1405 /* Don't initialize these, use assignments.
1406 Initialized local aggregates don't work on old compilers. */
1411 x = TREE_REAL_CST (arg1);
1412 l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
1413 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1414 /* See if X will be in range after truncation towards 0.
1415 To compensate for truncation, move the bounds away from 0,
1416 but reject if X exactly equals the adjusted bounds. */
1417 #ifdef REAL_ARITHMETIC
1418 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1419 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1424 /* If X is a NaN, use zero instead and show we have an overflow.
1425 Otherwise, range check. */
1426 if (REAL_VALUE_ISNAN (x))
1427 overflow = 1, x = dconst0;
1428 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1431 #ifndef REAL_ARITHMETIC
1433 HOST_WIDE_INT low, high;
1434 HOST_WIDE_INT half_word
1435 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1440 high = (HOST_WIDE_INT) (x / half_word / half_word);
1441 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1442 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1444 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1445 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1448 low = (HOST_WIDE_INT) x;
1449 if (TREE_REAL_CST (arg1) < 0)
1450 neg_double (low, high, &low, &high);
1451 t = build_int_2 (low, high);
1455 HOST_WIDE_INT low, high;
1456 REAL_VALUE_TO_INT (&low, &high, x);
1457 t = build_int_2 (low, high);
1460 TREE_TYPE (t) = type;
1462 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1463 TREE_CONSTANT_OVERFLOW (t)
1464 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1466 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1467 TREE_TYPE (t) = type;
1469 else if (TREE_CODE (type) == REAL_TYPE)
1471 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1472 if (TREE_CODE (arg1) == INTEGER_CST)
1473 return build_real_from_int_cst (type, arg1);
1474 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1475 if (TREE_CODE (arg1) == REAL_CST)
1477 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1479 else if (setjmp (float_error))
1482 t = copy_node (arg1);
1485 set_float_handler (float_error);
1487 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1488 TREE_REAL_CST (arg1)));
1489 set_float_handler (NULL_PTR);
1493 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1494 TREE_CONSTANT_OVERFLOW (t)
1495 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1499 TREE_CONSTANT (t) = 1;
1503 /* Return an expr equal to X but certainly not valid as an lvalue.
1504 Also make sure it is not valid as an null pointer constant. */
1512 /* These things are certainly not lvalues. */
1513 if (TREE_CODE (x) == NON_LVALUE_EXPR
1514 || TREE_CODE (x) == INTEGER_CST
1515 || TREE_CODE (x) == REAL_CST
1516 || TREE_CODE (x) == STRING_CST
1517 || TREE_CODE (x) == ADDR_EXPR)
1519 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1521 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1522 so convert_for_assignment won't strip it.
1523 This is so this 0 won't be treated as a null pointer constant. */
1524 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1525 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1531 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1532 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1536 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1537 Zero means allow extended lvalues. */
1539 int pedantic_lvalues;
1541 /* When pedantic, return an expr equal to X but certainly not valid as a
1542 pedantic lvalue. Otherwise, return X. */
1545 pedantic_non_lvalue (x)
1548 if (pedantic_lvalues)
1549 return non_lvalue (x);
1554 /* Given a tree comparison code, return the code that is the logical inverse
1555 of the given code. It is not safe to do this for floating-point
1556 comparisons, except for NE_EXPR and EQ_EXPR. */
1558 static enum tree_code
1559 invert_tree_comparison (code)
1560 enum tree_code code;
1581 /* Similar, but return the comparison that results if the operands are
1582 swapped. This is safe for floating-point. */
1584 static enum tree_code
1585 swap_tree_comparison (code)
1586 enum tree_code code;
1606 /* Return nonzero if CODE is a tree code that represents a truth value. */
1609 truth_value_p (code)
1610 enum tree_code code;
1612 return (TREE_CODE_CLASS (code) == '<'
1613 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1614 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1615 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1618 /* Return nonzero if two operands are necessarily equal.
1619 If ONLY_CONST is non-zero, only return non-zero for constants.
1620 This function tests whether the operands are indistinguishable;
1621 it does not test whether they are equal using C's == operation.
1622 The distinction is important for IEEE floating point, because
1623 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1624 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1627 operand_equal_p (arg0, arg1, only_const)
1631 /* If both types don't have the same signedness, then we can't consider
1632 them equal. We must check this before the STRIP_NOPS calls
1633 because they may change the signedness of the arguments. */
1634 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1640 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1641 We don't care about side effects in that case because the SAVE_EXPR
1642 takes care of that for us. */
1643 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1644 return ! only_const;
1646 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1649 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1650 && TREE_CODE (arg0) == ADDR_EXPR
1651 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1654 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1655 && TREE_CODE (arg0) == INTEGER_CST
1656 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1657 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1660 /* Detect when real constants are equal. */
1661 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1662 && TREE_CODE (arg0) == REAL_CST)
1663 return !bcmp ((char *) &TREE_REAL_CST (arg0),
1664 (char *) &TREE_REAL_CST (arg1),
1665 sizeof (REAL_VALUE_TYPE));
1673 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1675 /* This is needed for conversions and for COMPONENT_REF.
1676 Might as well play it safe and always test this. */
1677 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1680 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1683 /* Two conversions are equal only if signedness and modes match. */
1684 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1685 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1686 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1689 return operand_equal_p (TREE_OPERAND (arg0, 0),
1690 TREE_OPERAND (arg1, 0), 0);
1694 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1695 TREE_OPERAND (arg1, 0), 0)
1696 && operand_equal_p (TREE_OPERAND (arg0, 1),
1697 TREE_OPERAND (arg1, 1), 0));
1700 switch (TREE_CODE (arg0))
1703 return operand_equal_p (TREE_OPERAND (arg0, 0),
1704 TREE_OPERAND (arg1, 0), 0);
1708 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1709 TREE_OPERAND (arg1, 0), 0)
1710 && operand_equal_p (TREE_OPERAND (arg0, 1),
1711 TREE_OPERAND (arg1, 1), 0));
1714 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1715 TREE_OPERAND (arg1, 0), 0)
1716 && operand_equal_p (TREE_OPERAND (arg0, 1),
1717 TREE_OPERAND (arg1, 1), 0)
1718 && operand_equal_p (TREE_OPERAND (arg0, 2),
1719 TREE_OPERAND (arg1, 2), 0));
1727 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1728 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1730 When in doubt, return 0. */
1733 operand_equal_for_comparison_p (arg0, arg1, other)
1737 int unsignedp1, unsignedpo;
1738 tree primarg1, primother;
1739 unsigned correct_width;
1741 if (operand_equal_p (arg0, arg1, 0))
1744 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1745 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1748 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1749 actual comparison operand, ARG0.
1751 First throw away any conversions to wider types
1752 already present in the operands. */
1754 primarg1 = get_narrower (arg1, &unsignedp1);
1755 primother = get_narrower (other, &unsignedpo);
1757 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1758 if (unsignedp1 == unsignedpo
1759 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1760 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1762 tree type = TREE_TYPE (arg0);
1764 /* Make sure shorter operand is extended the right way
1765 to match the longer operand. */
1766 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1767 TREE_TYPE (primarg1)),
1770 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1777 /* See if ARG is an expression that is either a comparison or is performing
1778 arithmetic on comparisons. The comparisons must only be comparing
1779 two different values, which will be stored in *CVAL1 and *CVAL2; if
1780 they are non-zero it means that some operands have already been found.
1781 No variables may be used anywhere else in the expression except in the
1782 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1783 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1785 If this is true, return 1. Otherwise, return zero. */
1788 twoval_comparison_p (arg, cval1, cval2, save_p)
1790 tree *cval1, *cval2;
1793 enum tree_code code = TREE_CODE (arg);
1794 char class = TREE_CODE_CLASS (code);
1796 /* We can handle some of the 'e' cases here. */
1797 if (class == 'e' && code == TRUTH_NOT_EXPR)
1799 else if (class == 'e'
1800 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1801 || code == COMPOUND_EXPR))
1804 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1805 the expression. There may be no way to make this work, but it needs
1806 to be looked at again for 2.6. */
1808 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1810 /* If we've already found a CVAL1 or CVAL2, this expression is
1811 two complex to handle. */
1812 if (*cval1 || *cval2)
1823 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1826 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1827 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1828 cval1, cval2, save_p));
1834 if (code == COND_EXPR)
1835 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1836 cval1, cval2, save_p)
1837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1838 cval1, cval2, save_p)
1839 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1840 cval1, cval2, save_p));
1844 /* First see if we can handle the first operand, then the second. For
1845 the second operand, we know *CVAL1 can't be zero. It must be that
1846 one side of the comparison is each of the values; test for the
1847 case where this isn't true by failing if the two operands
1850 if (operand_equal_p (TREE_OPERAND (arg, 0),
1851 TREE_OPERAND (arg, 1), 0))
1855 *cval1 = TREE_OPERAND (arg, 0);
1856 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1858 else if (*cval2 == 0)
1859 *cval2 = TREE_OPERAND (arg, 0);
1860 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1865 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1867 else if (*cval2 == 0)
1868 *cval2 = TREE_OPERAND (arg, 1);
1869 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1880 /* ARG is a tree that is known to contain just arithmetic operations and
1881 comparisons. Evaluate the operations in the tree substituting NEW0 for
1882 any occurrence of OLD0 as an operand of a comparison and likewise for
1886 eval_subst (arg, old0, new0, old1, new1)
1888 tree old0, new0, old1, new1;
1890 tree type = TREE_TYPE (arg);
1891 enum tree_code code = TREE_CODE (arg);
1892 char class = TREE_CODE_CLASS (code);
1894 /* We can handle some of the 'e' cases here. */
1895 if (class == 'e' && code == TRUTH_NOT_EXPR)
1897 else if (class == 'e'
1898 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1904 return fold (build1 (code, type,
1905 eval_subst (TREE_OPERAND (arg, 0),
1906 old0, new0, old1, new1)));
1909 return fold (build (code, type,
1910 eval_subst (TREE_OPERAND (arg, 0),
1911 old0, new0, old1, new1),
1912 eval_subst (TREE_OPERAND (arg, 1),
1913 old0, new0, old1, new1)));
1919 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1922 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1925 return fold (build (code, type,
1926 eval_subst (TREE_OPERAND (arg, 0),
1927 old0, new0, old1, new1),
1928 eval_subst (TREE_OPERAND (arg, 1),
1929 old0, new0, old1, new1),
1930 eval_subst (TREE_OPERAND (arg, 2),
1931 old0, new0, old1, new1)));
1936 tree arg0 = TREE_OPERAND (arg, 0);
1937 tree arg1 = TREE_OPERAND (arg, 1);
1939 /* We need to check both for exact equality and tree equality. The
1940 former will be true if the operand has a side-effect. In that
1941 case, we know the operand occurred exactly once. */
1943 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1945 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1948 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1950 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1953 return fold (build (code, type, arg0, arg1));
1960 /* Return a tree for the case when the result of an expression is RESULT
1961 converted to TYPE and OMITTED was previously an operand of the expression
1962 but is now not needed (e.g., we folded OMITTED * 0).
1964 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1965 the conversion of RESULT to TYPE. */
1968 omit_one_operand (type, result, omitted)
1969 tree type, result, omitted;
1971 tree t = convert (type, result);
1973 if (TREE_SIDE_EFFECTS (omitted))
1974 return build (COMPOUND_EXPR, type, omitted, t);
1976 return non_lvalue (t);
1979 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
1982 pedantic_omit_one_operand (type, result, omitted)
1983 tree type, result, omitted;
1985 tree t = convert (type, result);
1987 if (TREE_SIDE_EFFECTS (omitted))
1988 return build (COMPOUND_EXPR, type, omitted, t);
1990 return pedantic_non_lvalue (t);
1995 /* Return a simplified tree node for the truth-negation of ARG. This
1996 never alters ARG itself. We assume that ARG is an operation that
1997 returns a truth value (0 or 1). */
2000 invert_truthvalue (arg)
2003 tree type = TREE_TYPE (arg);
2004 enum tree_code code = TREE_CODE (arg);
2006 if (code == ERROR_MARK)
2009 /* If this is a comparison, we can simply invert it, except for
2010 floating-point non-equality comparisons, in which case we just
2011 enclose a TRUTH_NOT_EXPR around what we have. */
2013 if (TREE_CODE_CLASS (code) == '<')
2015 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2016 && code != NE_EXPR && code != EQ_EXPR)
2017 return build1 (TRUTH_NOT_EXPR, type, arg);
2019 return build (invert_tree_comparison (code), type,
2020 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2026 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2027 && TREE_INT_CST_HIGH (arg) == 0, 0));
2029 case TRUTH_AND_EXPR:
2030 return build (TRUTH_OR_EXPR, type,
2031 invert_truthvalue (TREE_OPERAND (arg, 0)),
2032 invert_truthvalue (TREE_OPERAND (arg, 1)));
2035 return build (TRUTH_AND_EXPR, type,
2036 invert_truthvalue (TREE_OPERAND (arg, 0)),
2037 invert_truthvalue (TREE_OPERAND (arg, 1)));
2039 case TRUTH_XOR_EXPR:
2040 /* Here we can invert either operand. We invert the first operand
2041 unless the second operand is a TRUTH_NOT_EXPR in which case our
2042 result is the XOR of the first operand with the inside of the
2043 negation of the second operand. */
2045 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2046 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2047 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2049 return build (TRUTH_XOR_EXPR, type,
2050 invert_truthvalue (TREE_OPERAND (arg, 0)),
2051 TREE_OPERAND (arg, 1));
2053 case TRUTH_ANDIF_EXPR:
2054 return build (TRUTH_ORIF_EXPR, type,
2055 invert_truthvalue (TREE_OPERAND (arg, 0)),
2056 invert_truthvalue (TREE_OPERAND (arg, 1)));
2058 case TRUTH_ORIF_EXPR:
2059 return build (TRUTH_ANDIF_EXPR, type,
2060 invert_truthvalue (TREE_OPERAND (arg, 0)),
2061 invert_truthvalue (TREE_OPERAND (arg, 1)));
2063 case TRUTH_NOT_EXPR:
2064 return TREE_OPERAND (arg, 0);
2067 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2068 invert_truthvalue (TREE_OPERAND (arg, 1)),
2069 invert_truthvalue (TREE_OPERAND (arg, 2)));
2072 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2073 invert_truthvalue (TREE_OPERAND (arg, 1)));
2075 case NON_LVALUE_EXPR:
2076 return invert_truthvalue (TREE_OPERAND (arg, 0));
2081 return build1 (TREE_CODE (arg), type,
2082 invert_truthvalue (TREE_OPERAND (arg, 0)));
2085 if (!integer_onep (TREE_OPERAND (arg, 1)))
2087 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2090 return build1 (TRUTH_NOT_EXPR, type, arg);
2092 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2094 return build1 (TRUTH_NOT_EXPR, type, arg);
2097 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2098 operands are another bit-wise operation with a common input. If so,
2099 distribute the bit operations to save an operation and possibly two if
2100 constants are involved. For example, convert
2101 (A | B) & (A | C) into A | (B & C)
2102 Further simplification will occur if B and C are constants.
2104 If this optimization cannot be done, 0 will be returned. */
2107 distribute_bit_expr (code, type, arg0, arg1)
2108 enum tree_code code;
2115 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2116 || TREE_CODE (arg0) == code
2117 || (TREE_CODE (arg0) != BIT_AND_EXPR
2118 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2121 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2123 common = TREE_OPERAND (arg0, 0);
2124 left = TREE_OPERAND (arg0, 1);
2125 right = TREE_OPERAND (arg1, 1);
2127 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2129 common = TREE_OPERAND (arg0, 0);
2130 left = TREE_OPERAND (arg0, 1);
2131 right = TREE_OPERAND (arg1, 0);
2133 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2135 common = TREE_OPERAND (arg0, 1);
2136 left = TREE_OPERAND (arg0, 0);
2137 right = TREE_OPERAND (arg1, 1);
2139 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2141 common = TREE_OPERAND (arg0, 1);
2142 left = TREE_OPERAND (arg0, 0);
2143 right = TREE_OPERAND (arg1, 0);
2148 return fold (build (TREE_CODE (arg0), type, common,
2149 fold (build (code, type, left, right))));
2152 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2153 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2156 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2159 int bitsize, bitpos;
2162 tree result = build (BIT_FIELD_REF, type, inner,
2163 size_int (bitsize), size_int (bitpos));
2165 TREE_UNSIGNED (result) = unsignedp;
2170 /* Optimize a bit-field compare.
2172 There are two cases: First is a compare against a constant and the
2173 second is a comparison of two items where the fields are at the same
2174 bit position relative to the start of a chunk (byte, halfword, word)
2175 large enough to contain it. In these cases we can avoid the shift
2176 implicit in bitfield extractions.
2178 For constants, we emit a compare of the shifted constant with the
2179 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2180 compared. For two fields at the same position, we do the ANDs with the
2181 similar mask and compare the result of the ANDs.
2183 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2184 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2185 are the left and right operands of the comparison, respectively.
2187 If the optimization described above can be done, we return the resulting
2188 tree. Otherwise we return zero. */
2191 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2192 enum tree_code code;
2196 int lbitpos, lbitsize, rbitpos, rbitsize;
2197 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2198 tree type = TREE_TYPE (lhs);
2199 tree signed_type, unsigned_type;
2200 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2201 enum machine_mode lmode, rmode, lnmode, rnmode;
2202 int lunsignedp, runsignedp;
2203 int lvolatilep = 0, rvolatilep = 0;
2204 tree linner, rinner;
2208 /* Get all the information about the extractions being done. If the bit size
2209 if the same as the size of the underlying object, we aren't doing an
2210 extraction at all and so can do nothing. */
2211 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2212 &lunsignedp, &lvolatilep);
2213 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2219 /* If this is not a constant, we can only do something if bit positions,
2220 sizes, and signedness are the same. */
2221 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2222 &rmode, &runsignedp, &rvolatilep);
2224 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2225 || lunsignedp != runsignedp || offset != 0)
2229 /* See if we can find a mode to refer to this field. We should be able to,
2230 but fail if we can't. */
2231 lnmode = get_best_mode (lbitsize, lbitpos,
2232 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2234 if (lnmode == VOIDmode)
2237 /* Set signed and unsigned types of the precision of this mode for the
2239 signed_type = type_for_mode (lnmode, 0);
2240 unsigned_type = type_for_mode (lnmode, 1);
2244 rnmode = get_best_mode (rbitsize, rbitpos,
2245 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2247 if (rnmode == VOIDmode)
2251 /* Compute the bit position and size for the new reference and our offset
2252 within it. If the new reference is the same size as the original, we
2253 won't optimize anything, so return zero. */
2254 lnbitsize = GET_MODE_BITSIZE (lnmode);
2255 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2256 lbitpos -= lnbitpos;
2257 if (lnbitsize == lbitsize)
2262 rnbitsize = GET_MODE_BITSIZE (rnmode);
2263 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2264 rbitpos -= rnbitpos;
2265 if (rnbitsize == rbitsize)
2269 if (BYTES_BIG_ENDIAN)
2270 lbitpos = lnbitsize - lbitsize - lbitpos;
2272 /* Make the mask to be used against the extracted field. */
2273 mask = build_int_2 (~0, ~0);
2274 TREE_TYPE (mask) = unsigned_type;
2275 force_fit_type (mask, 0);
2276 mask = convert (unsigned_type, mask);
2277 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2278 mask = const_binop (RSHIFT_EXPR, mask,
2279 size_int (lnbitsize - lbitsize - lbitpos), 0);
2282 /* If not comparing with constant, just rework the comparison
2284 return build (code, compare_type,
2285 build (BIT_AND_EXPR, unsigned_type,
2286 make_bit_field_ref (linner, unsigned_type,
2287 lnbitsize, lnbitpos, 1),
2289 build (BIT_AND_EXPR, unsigned_type,
2290 make_bit_field_ref (rinner, unsigned_type,
2291 rnbitsize, rnbitpos, 1),
2294 /* Otherwise, we are handling the constant case. See if the constant is too
2295 big for the field. Warn and return a tree of for 0 (false) if so. We do
2296 this not only for its own sake, but to avoid having to test for this
2297 error case below. If we didn't, we might generate wrong code.
2299 For unsigned fields, the constant shifted right by the field length should
2300 be all zero. For signed fields, the high-order bits should agree with
2305 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2306 convert (unsigned_type, rhs),
2307 size_int (lbitsize), 0)))
2309 warning ("comparison is always %s due to width of bitfield",
2310 code == NE_EXPR ? "one" : "zero");
2311 return convert (compare_type,
2313 ? integer_one_node : integer_zero_node));
2318 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2319 size_int (lbitsize - 1), 0);
2320 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2322 warning ("comparison is always %s due to width of bitfield",
2323 code == NE_EXPR ? "one" : "zero");
2324 return convert (compare_type,
2326 ? integer_one_node : integer_zero_node));
2330 /* Single-bit compares should always be against zero. */
2331 if (lbitsize == 1 && ! integer_zerop (rhs))
2333 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2334 rhs = convert (type, integer_zero_node);
2337 /* Make a new bitfield reference, shift the constant over the
2338 appropriate number of bits and mask it with the computed mask
2339 (in case this was a signed field). If we changed it, make a new one. */
2340 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2343 TREE_SIDE_EFFECTS (lhs) = 1;
2344 TREE_THIS_VOLATILE (lhs) = 1;
2347 rhs = fold (const_binop (BIT_AND_EXPR,
2348 const_binop (LSHIFT_EXPR,
2349 convert (unsigned_type, rhs),
2350 size_int (lbitpos), 0),
2353 return build (code, compare_type,
2354 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2358 /* Subroutine for fold_truthop: decode a field reference.
2360 If EXP is a comparison reference, we return the innermost reference.
2362 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2363 set to the starting bit number.
2365 If the innermost field can be completely contained in a mode-sized
2366 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2368 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2369 otherwise it is not changed.
2371 *PUNSIGNEDP is set to the signedness of the field.
2373 *PMASK is set to the mask used. This is either contained in a
2374 BIT_AND_EXPR or derived from the width of the field.
2376 Return 0 if this is not a component reference or is one that we can't
2377 do anything with. */
2380 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2383 int *pbitsize, *pbitpos;
2384 enum machine_mode *pmode;
2385 int *punsignedp, *pvolatilep;
2389 tree mask, inner, offset;
2393 /* All the optimizations using this function assume integer fields.
2394 There are problems with FP fields since the type_for_size call
2395 below can fail for, e.g., XFmode. */
2396 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2401 if (TREE_CODE (exp) == BIT_AND_EXPR)
2403 and_mask = TREE_OPERAND (exp, 1);
2404 exp = TREE_OPERAND (exp, 0);
2405 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2406 if (TREE_CODE (and_mask) != INTEGER_CST)
2411 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2412 punsignedp, pvolatilep);
2413 if ((inner == exp && and_mask == 0)
2414 || *pbitsize < 0 || offset != 0)
2417 /* Compute the mask to access the bitfield. */
2418 unsigned_type = type_for_size (*pbitsize, 1);
2419 precision = TYPE_PRECISION (unsigned_type);
2421 mask = build_int_2 (~0, ~0);
2422 TREE_TYPE (mask) = unsigned_type;
2423 force_fit_type (mask, 0);
2424 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2425 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2427 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2429 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2430 convert (unsigned_type, and_mask), mask));
2436 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2440 all_ones_mask_p (mask, size)
2444 tree type = TREE_TYPE (mask);
2445 int precision = TYPE_PRECISION (type);
2448 tmask = build_int_2 (~0, ~0);
2449 TREE_TYPE (tmask) = signed_type (type);
2450 force_fit_type (tmask, 0);
2452 tree_int_cst_equal (mask,
2453 const_binop (RSHIFT_EXPR,
2454 const_binop (LSHIFT_EXPR, tmask,
2455 size_int (precision - size),
2457 size_int (precision - size), 0));
2460 /* Subroutine for fold_truthop: determine if an operand is simple enough
2461 to be evaluated unconditionally. */
2464 simple_operand_p (exp)
2467 /* Strip any conversions that don't change the machine mode. */
2468 while ((TREE_CODE (exp) == NOP_EXPR
2469 || TREE_CODE (exp) == CONVERT_EXPR)
2470 && (TYPE_MODE (TREE_TYPE (exp))
2471 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2472 exp = TREE_OPERAND (exp, 0);
2474 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2475 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2476 && ! TREE_ADDRESSABLE (exp)
2477 && ! TREE_THIS_VOLATILE (exp)
2478 && ! DECL_NONLOCAL (exp)
2479 /* Don't regard global variables as simple. They may be
2480 allocated in ways unknown to the compiler (shared memory,
2481 #pragma weak, etc). */
2482 && ! TREE_PUBLIC (exp)
2483 && ! DECL_EXTERNAL (exp)
2484 /* Loading a static variable is unduly expensive, but global
2485 registers aren't expensive. */
2486 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2489 /* Subroutine for fold_truthop: try to optimize a range test.
2491 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2493 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2494 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2495 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2498 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2499 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2500 larger than HI_CST (they may be equal).
2502 We return the simplified tree or 0 if no optimization is possible. */
2505 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2506 enum tree_code jcode, lo_code, hi_code;
2507 tree type, var, lo_cst, hi_cst;
2510 enum tree_code rcode;
2512 /* See if this is a range test and normalize the constant terms. */
2514 if (jcode == TRUTH_AND_EXPR)
2519 /* See if we have VAR != CST && VAR != CST+1. */
2520 if (! (hi_code == NE_EXPR
2521 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2522 && tree_int_cst_equal (integer_one_node,
2523 const_binop (MINUS_EXPR,
2524 hi_cst, lo_cst, 0))))
2532 if (hi_code == LT_EXPR)
2533 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2534 else if (hi_code != LE_EXPR)
2537 if (lo_code == GT_EXPR)
2538 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2540 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2553 /* See if we have VAR == CST || VAR == CST+1. */
2554 if (! (hi_code == EQ_EXPR
2555 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2556 && tree_int_cst_equal (integer_one_node,
2557 const_binop (MINUS_EXPR,
2558 hi_cst, lo_cst, 0))))
2566 if (hi_code == GE_EXPR)
2567 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2568 else if (hi_code != GT_EXPR)
2571 if (lo_code == LE_EXPR)
2572 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2574 /* We now have VAR < LO_CST || VAR > HI_CST. */
2583 /* When normalizing, it is possible to both increment the smaller constant
2584 and decrement the larger constant. See if they are still ordered. */
2585 if (tree_int_cst_lt (hi_cst, lo_cst))
2588 /* Fail if VAR isn't an integer. */
2589 utype = TREE_TYPE (var);
2590 if (! INTEGRAL_TYPE_P (utype))
2593 /* The range test is invalid if subtracting the two constants results
2594 in overflow. This can happen in traditional mode. */
2595 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2596 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2599 if (! TREE_UNSIGNED (utype))
2601 utype = unsigned_type (utype);
2602 var = convert (utype, var);
2603 lo_cst = convert (utype, lo_cst);
2604 hi_cst = convert (utype, hi_cst);
2607 return fold (convert (type,
2608 build (rcode, utype,
2609 build (MINUS_EXPR, utype, var, lo_cst),
2610 const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
2613 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
2614 bit value. Arrange things so the extra bits will be set to zero if and]
2615 only if C is signed-extended to its full width. */
2618 unextend (c, p, unsignedp)
2623 tree type = TREE_TYPE (c);
2624 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
2627 if (p == modesize || unsignedp)
2630 if (TREE_UNSIGNED (type))
2631 c = convert (signed_type (type), c);
2633 /* We work by getting just the sign bit into the low-order bit, then
2634 into the high-order bit, then sign-extened. We then XOR that value
2636 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
2637 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
2638 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
2639 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
2640 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
2643 /* Find ways of folding logical expressions of LHS and RHS:
2644 Try to merge two comparisons to the same innermost item.
2645 Look for range tests like "ch >= '0' && ch <= '9'".
2646 Look for combinations of simple terms on machines with expensive branches
2647 and evaluate the RHS unconditionally.
2649 For example, if we have p->a == 2 && p->b == 4 and we can make an
2650 object large enough to span both A and B, we can do this with a comparison
2651 against the object ANDed with the a mask.
2653 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2654 operations to do this with one comparison.
2656 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2657 function and the one above.
2659 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2660 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2662 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2665 We return the simplified tree or 0 if no optimization is possible. */
2668 fold_truthop (code, truth_type, lhs, rhs)
2669 enum tree_code code;
2670 tree truth_type, lhs, rhs;
2672 /* If this is the "or" of two comparisons, we can do something if we
2673 the comparisons are NE_EXPR. If this is the "and", we can do something
2674 if the comparisons are EQ_EXPR. I.e.,
2675 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2677 WANTED_CODE is this operation code. For single bit fields, we can
2678 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2679 comparison for one-bit fields. */
2681 enum tree_code wanted_code;
2682 enum tree_code lcode, rcode;
2683 tree ll_arg, lr_arg, rl_arg, rr_arg;
2684 tree ll_inner, lr_inner, rl_inner, rr_inner;
2685 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2686 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2687 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2688 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2689 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2690 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2691 enum machine_mode lnmode, rnmode;
2692 tree ll_mask, lr_mask, rl_mask, rr_mask;
2693 tree l_const, r_const;
2695 int first_bit, end_bit;
2698 /* Start by getting the comparison codes and seeing if this looks like
2699 a range test. Fail if anything is volatile. If one operand is a
2700 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2703 if (TREE_SIDE_EFFECTS (lhs)
2704 || TREE_SIDE_EFFECTS (rhs))
2707 lcode = TREE_CODE (lhs);
2708 rcode = TREE_CODE (rhs);
2710 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2711 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2713 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2714 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2716 if (TREE_CODE_CLASS (lcode) != '<'
2717 || TREE_CODE_CLASS (rcode) != '<')
2720 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2721 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2723 ll_arg = TREE_OPERAND (lhs, 0);
2724 lr_arg = TREE_OPERAND (lhs, 1);
2725 rl_arg = TREE_OPERAND (rhs, 0);
2726 rr_arg = TREE_OPERAND (rhs, 1);
2728 if (TREE_CODE (lr_arg) == INTEGER_CST
2729 && TREE_CODE (rr_arg) == INTEGER_CST
2730 && operand_equal_p (ll_arg, rl_arg, 0))
2732 if (tree_int_cst_lt (lr_arg, rr_arg))
2733 result = range_test (code, truth_type, lcode, rcode,
2734 ll_arg, lr_arg, rr_arg);
2736 result = range_test (code, truth_type, rcode, lcode,
2737 ll_arg, rr_arg, lr_arg);
2739 /* If this isn't a range test, it also isn't a comparison that
2740 can be merged. However, it wins to evaluate the RHS unconditionally
2741 on machines with expensive branches. */
2743 if (result == 0 && BRANCH_COST >= 2)
2745 if (TREE_CODE (ll_arg) != VAR_DECL
2746 && TREE_CODE (ll_arg) != PARM_DECL)
2748 /* Avoid evaluating the variable part twice. */
2749 ll_arg = save_expr (ll_arg);
2750 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2751 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2753 return build (code, truth_type, lhs, rhs);
2758 /* If the RHS can be evaluated unconditionally and its operands are
2759 simple, it wins to evaluate the RHS unconditionally on machines
2760 with expensive branches. In this case, this isn't a comparison
2761 that can be merged. */
2763 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2764 are with zero (tmw). */
2766 if (BRANCH_COST >= 2
2767 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2768 && simple_operand_p (rl_arg)
2769 && simple_operand_p (rr_arg))
2770 return build (code, truth_type, lhs, rhs);
2772 /* See if the comparisons can be merged. Then get all the parameters for
2775 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2776 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2780 ll_inner = decode_field_reference (ll_arg,
2781 &ll_bitsize, &ll_bitpos, &ll_mode,
2782 &ll_unsignedp, &volatilep, &ll_mask);
2783 lr_inner = decode_field_reference (lr_arg,
2784 &lr_bitsize, &lr_bitpos, &lr_mode,
2785 &lr_unsignedp, &volatilep, &lr_mask);
2786 rl_inner = decode_field_reference (rl_arg,
2787 &rl_bitsize, &rl_bitpos, &rl_mode,
2788 &rl_unsignedp, &volatilep, &rl_mask);
2789 rr_inner = decode_field_reference (rr_arg,
2790 &rr_bitsize, &rr_bitpos, &rr_mode,
2791 &rr_unsignedp, &volatilep, &rr_mask);
2793 /* It must be true that the inner operation on the lhs of each
2794 comparison must be the same if we are to be able to do anything.
2795 Then see if we have constants. If not, the same must be true for
2797 if (volatilep || ll_inner == 0 || rl_inner == 0
2798 || ! operand_equal_p (ll_inner, rl_inner, 0))
2801 if (TREE_CODE (lr_arg) == INTEGER_CST
2802 && TREE_CODE (rr_arg) == INTEGER_CST)
2803 l_const = lr_arg, r_const = rr_arg;
2804 else if (lr_inner == 0 || rr_inner == 0
2805 || ! operand_equal_p (lr_inner, rr_inner, 0))
2808 l_const = r_const = 0;
2810 /* If either comparison code is not correct for our logical operation,
2811 fail. However, we can convert a one-bit comparison against zero into
2812 the opposite comparison against that bit being set in the field. */
2814 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2815 if (lcode != wanted_code)
2817 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2823 if (rcode != wanted_code)
2825 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2831 /* See if we can find a mode that contains both fields being compared on
2832 the left. If we can't, fail. Otherwise, update all constants and masks
2833 to be relative to a field of that size. */
2834 first_bit = MIN (ll_bitpos, rl_bitpos);
2835 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2836 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2837 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2839 if (lnmode == VOIDmode)
2842 lnbitsize = GET_MODE_BITSIZE (lnmode);
2843 lnbitpos = first_bit & ~ (lnbitsize - 1);
2844 type = type_for_size (lnbitsize, 1);
2845 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2847 if (BYTES_BIG_ENDIAN)
2849 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2850 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2853 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2854 size_int (xll_bitpos), 0);
2855 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2856 size_int (xrl_bitpos), 0);
2860 l_const = convert (type, unextend (l_const, ll_bitsize, ll_unsignedp));
2861 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
2862 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
2863 fold (build1 (BIT_NOT_EXPR,
2867 warning ("comparison is always %s",
2868 wanted_code == NE_EXPR ? "one" : "zero");
2870 return convert (truth_type,
2871 wanted_code == NE_EXPR
2872 ? integer_one_node : integer_zero_node);
2877 r_const = convert (type, unextend (r_const, rl_bitsize, rl_unsignedp));
2878 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
2879 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
2880 fold (build1 (BIT_NOT_EXPR,
2884 warning ("comparison is always %s",
2885 wanted_code == NE_EXPR ? "one" : "zero");
2887 return convert (truth_type,
2888 wanted_code == NE_EXPR
2889 ? integer_one_node : integer_zero_node);
2893 /* If the right sides are not constant, do the same for it. Also,
2894 disallow this optimization if a size or signedness mismatch occurs
2895 between the left and right sides. */
2898 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2899 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2900 /* Make sure the two fields on the right
2901 correspond to the left without being swapped. */
2902 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2905 first_bit = MIN (lr_bitpos, rr_bitpos);
2906 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2907 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2908 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2910 if (rnmode == VOIDmode)
2913 rnbitsize = GET_MODE_BITSIZE (rnmode);
2914 rnbitpos = first_bit & ~ (rnbitsize - 1);
2915 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2917 if (BYTES_BIG_ENDIAN)
2919 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2920 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2923 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2924 size_int (xlr_bitpos), 0);
2925 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2926 size_int (xrr_bitpos), 0);
2928 /* Make a mask that corresponds to both fields being compared.
2929 Do this for both items being compared. If the masks agree,
2930 we can do this by masking both and comparing the masked
2932 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2933 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
2934 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2936 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2937 ll_unsignedp || rl_unsignedp);
2938 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2939 lr_unsignedp || rr_unsignedp);
2940 if (! all_ones_mask_p (ll_mask, lnbitsize))
2942 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2943 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2945 return build (wanted_code, truth_type, lhs, rhs);
2948 /* There is still another way we can do something: If both pairs of
2949 fields being compared are adjacent, we may be able to make a wider
2950 field containing them both. */
2951 if ((ll_bitsize + ll_bitpos == rl_bitpos
2952 && lr_bitsize + lr_bitpos == rr_bitpos)
2953 || (ll_bitpos == rl_bitpos + rl_bitsize
2954 && lr_bitpos == rr_bitpos + rr_bitsize))
2955 return build (wanted_code, truth_type,
2956 make_bit_field_ref (ll_inner, type,
2957 ll_bitsize + rl_bitsize,
2958 MIN (ll_bitpos, rl_bitpos),
2960 make_bit_field_ref (lr_inner, type,
2961 lr_bitsize + rr_bitsize,
2962 MIN (lr_bitpos, rr_bitpos),
2968 /* Handle the case of comparisons with constants. If there is something in
2969 common between the masks, those bits of the constants must be the same.
2970 If not, the condition is always false. Test for this to avoid generating
2971 incorrect code below. */
2972 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
2973 if (! integer_zerop (result)
2974 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
2975 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
2977 if (wanted_code == NE_EXPR)
2979 warning ("`or' of unmatched not-equal tests is always 1");
2980 return convert (truth_type, integer_one_node);
2984 warning ("`and' of mutually exclusive equal-tests is always zero");
2985 return convert (truth_type, integer_zero_node);
2989 /* Construct the expression we will return. First get the component
2990 reference we will make. Unless the mask is all ones the width of
2991 that field, perform the mask operation. Then compare with the
2993 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2994 ll_unsignedp || rl_unsignedp);
2996 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2997 if (! all_ones_mask_p (ll_mask, lnbitsize))
2998 result = build (BIT_AND_EXPR, type, result, ll_mask);
3000 return build (wanted_code, truth_type, result,
3001 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3004 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3005 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3006 that we may sometimes modify the tree. */
3009 strip_compound_expr (t, s)
3013 tree type = TREE_TYPE (t);
3014 enum tree_code code = TREE_CODE (t);
3016 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3017 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3018 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3019 return TREE_OPERAND (t, 1);
3021 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3022 don't bother handling any other types. */
3023 else if (code == COND_EXPR)
3025 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3026 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3027 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3029 else if (TREE_CODE_CLASS (code) == '1')
3030 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3031 else if (TREE_CODE_CLASS (code) == '<'
3032 || TREE_CODE_CLASS (code) == '2')
3034 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3035 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3041 /* Perform constant folding and related simplification of EXPR.
3042 The related simplifications include x*1 => x, x*0 => 0, etc.,
3043 and application of the associative law.
3044 NOP_EXPR conversions may be removed freely (as long as we
3045 are careful not to change the C type of the overall expression)
3046 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3047 but we can constant-fold them if they have constant operands. */
3053 register tree t = expr;
3054 tree t1 = NULL_TREE;
3056 tree type = TREE_TYPE (expr);
3057 register tree arg0, arg1;
3058 register enum tree_code code = TREE_CODE (t);
3062 /* WINS will be nonzero when the switch is done
3063 if all operands are constant. */
3067 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3068 if (code == RTL_EXPR)
3071 /* Return right away if already constant. */
3072 if (TREE_CONSTANT (t))
3074 if (code == CONST_DECL)
3075 return DECL_INITIAL (t);
3079 kind = TREE_CODE_CLASS (code);
3080 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3084 /* Special case for conversion ops that can have fixed point args. */
3085 arg0 = TREE_OPERAND (t, 0);
3087 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3089 STRIP_TYPE_NOPS (arg0);
3091 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3092 subop = TREE_REALPART (arg0);
3096 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3097 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3098 && TREE_CODE (subop) != REAL_CST
3099 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3101 /* Note that TREE_CONSTANT isn't enough:
3102 static var addresses are constant but we can't
3103 do arithmetic on them. */
3106 else if (kind == 'e' || kind == '<'
3107 || kind == '1' || kind == '2' || kind == 'r')
3109 register int len = tree_code_length[(int) code];
3111 for (i = 0; i < len; i++)
3113 tree op = TREE_OPERAND (t, i);
3117 continue; /* Valid for CALL_EXPR, at least. */
3119 if (kind == '<' || code == RSHIFT_EXPR)
3121 /* Signedness matters here. Perhaps we can refine this
3123 STRIP_TYPE_NOPS (op);
3127 /* Strip any conversions that don't change the mode. */
3131 if (TREE_CODE (op) == COMPLEX_CST)
3132 subop = TREE_REALPART (op);
3136 if (TREE_CODE (subop) != INTEGER_CST
3137 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3138 && TREE_CODE (subop) != REAL_CST
3139 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3141 /* Note that TREE_CONSTANT isn't enough:
3142 static var addresses are constant but we can't
3143 do arithmetic on them. */
3153 /* If this is a commutative operation, and ARG0 is a constant, move it
3154 to ARG1 to reduce the number of tests below. */
3155 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3156 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3157 || code == BIT_AND_EXPR)
3158 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3160 tem = arg0; arg0 = arg1; arg1 = tem;
3162 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3163 TREE_OPERAND (t, 1) = tem;
3166 /* Now WINS is set as described above,
3167 ARG0 is the first operand of EXPR,
3168 and ARG1 is the second operand (if it has more than one operand).
3170 First check for cases where an arithmetic operation is applied to a
3171 compound, conditional, or comparison operation. Push the arithmetic
3172 operation inside the compound or conditional to see if any folding
3173 can then be done. Convert comparison to conditional for this purpose.
3174 The also optimizes non-constant cases that used to be done in
3177 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3178 one of the operands is a comparison and the other is a comparison, a
3179 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3180 code below would make the expression more complex. Change it to a
3181 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3182 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3184 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3185 || code == EQ_EXPR || code == NE_EXPR)
3186 && ((truth_value_p (TREE_CODE (arg0))
3187 && (truth_value_p (TREE_CODE (arg1))
3188 || (TREE_CODE (arg1) == BIT_AND_EXPR
3189 && integer_onep (TREE_OPERAND (arg1, 1)))))
3190 || (truth_value_p (TREE_CODE (arg1))
3191 && (truth_value_p (TREE_CODE (arg0))
3192 || (TREE_CODE (arg0) == BIT_AND_EXPR
3193 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3195 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3196 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3200 if (code == EQ_EXPR)
3201 t = invert_truthvalue (t);
3206 if (TREE_CODE_CLASS (code) == '1')
3208 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3209 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3210 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3211 else if (TREE_CODE (arg0) == COND_EXPR)
3213 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3214 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3215 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3217 /* If this was a conversion, and all we did was to move into
3218 inside the COND_EXPR, bring it back out. But leave it if
3219 it is a conversion from integer to integer and the
3220 result precision is no wider than a word since such a
3221 conversion is cheap and may be optimized away by combine,
3222 while it couldn't if it were outside the COND_EXPR. Then return
3223 so we don't get into an infinite recursion loop taking the
3224 conversion out and then back in. */
3226 if ((code == NOP_EXPR || code == CONVERT_EXPR
3227 || code == NON_LVALUE_EXPR)
3228 && TREE_CODE (t) == COND_EXPR
3229 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3230 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3231 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3232 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3233 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3234 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3235 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3236 t = build1 (code, type,
3238 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3239 TREE_OPERAND (t, 0),
3240 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3241 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3244 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3245 return fold (build (COND_EXPR, type, arg0,
3246 fold (build1 (code, type, integer_one_node)),
3247 fold (build1 (code, type, integer_zero_node))));
3249 else if (TREE_CODE_CLASS (code) == '2'
3250 || TREE_CODE_CLASS (code) == '<')
3252 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3253 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3254 fold (build (code, type,
3255 arg0, TREE_OPERAND (arg1, 1))));
3256 else if (TREE_CODE (arg1) == COND_EXPR
3257 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3259 tree test, true_value, false_value;
3261 if (TREE_CODE (arg1) == COND_EXPR)
3263 test = TREE_OPERAND (arg1, 0);
3264 true_value = TREE_OPERAND (arg1, 1);
3265 false_value = TREE_OPERAND (arg1, 2);
3270 true_value = integer_one_node;
3271 false_value = integer_zero_node;
3274 /* If ARG0 is complex we want to make sure we only evaluate
3275 it once. Though this is only required if it is volatile, it
3276 might be more efficient even if it is not. However, if we
3277 succeed in folding one part to a constant, we do not need
3278 to make this SAVE_EXPR. Since we do this optimization
3279 primarily to see if we do end up with constant and this
3280 SAVE_EXPR interfers with later optimizations, suppressing
3281 it when we can is important. */
3283 if (TREE_CODE (arg0) != SAVE_EXPR
3284 && ((TREE_CODE (arg0) != VAR_DECL
3285 && TREE_CODE (arg0) != PARM_DECL)
3286 || TREE_SIDE_EFFECTS (arg0)))
3288 tree lhs = fold (build (code, type, arg0, true_value));
3289 tree rhs = fold (build (code, type, arg0, false_value));
3291 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3292 return fold (build (COND_EXPR, type, test, lhs, rhs));
3294 arg0 = save_expr (arg0);
3297 test = fold (build (COND_EXPR, type, test,
3298 fold (build (code, type, arg0, true_value)),
3299 fold (build (code, type, arg0, false_value))));
3300 if (TREE_CODE (arg0) == SAVE_EXPR)
3301 return build (COMPOUND_EXPR, type,
3302 convert (void_type_node, arg0),
3303 strip_compound_expr (test, arg0));
3305 return convert (type, test);
3308 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3309 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3310 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3311 else if (TREE_CODE (arg0) == COND_EXPR
3312 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3314 tree test, true_value, false_value;
3316 if (TREE_CODE (arg0) == COND_EXPR)
3318 test = TREE_OPERAND (arg0, 0);
3319 true_value = TREE_OPERAND (arg0, 1);
3320 false_value = TREE_OPERAND (arg0, 2);
3325 true_value = integer_one_node;
3326 false_value = integer_zero_node;
3329 if (TREE_CODE (arg1) != SAVE_EXPR
3330 && ((TREE_CODE (arg1) != VAR_DECL
3331 && TREE_CODE (arg1) != PARM_DECL)
3332 || TREE_SIDE_EFFECTS (arg1)))
3334 tree lhs = fold (build (code, type, true_value, arg1));
3335 tree rhs = fold (build (code, type, false_value, arg1));
3337 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3338 || TREE_CONSTANT (arg1))
3339 return fold (build (COND_EXPR, type, test, lhs, rhs));
3341 arg1 = save_expr (arg1);
3344 test = fold (build (COND_EXPR, type, test,
3345 fold (build (code, type, true_value, arg1)),
3346 fold (build (code, type, false_value, arg1))));
3347 if (TREE_CODE (arg1) == SAVE_EXPR)
3348 return build (COMPOUND_EXPR, type,
3349 convert (void_type_node, arg1),
3350 strip_compound_expr (test, arg1));
3352 return convert (type, test);
3355 else if (TREE_CODE_CLASS (code) == '<'
3356 && TREE_CODE (arg0) == COMPOUND_EXPR)
3357 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3358 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3359 else if (TREE_CODE_CLASS (code) == '<'
3360 && TREE_CODE (arg1) == COMPOUND_EXPR)
3361 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3362 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3374 return fold (DECL_INITIAL (t));
3379 case FIX_TRUNC_EXPR:
3380 /* Other kinds of FIX are not handled properly by fold_convert. */
3382 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
3383 return TREE_OPERAND (t, 0);
3385 /* Handle cases of two conversions in a row. */
3386 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3387 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3389 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3390 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
3391 tree final_type = TREE_TYPE (t);
3392 int inside_int = INTEGRAL_TYPE_P (inside_type);
3393 int inside_ptr = POINTER_TYPE_P (inside_type);
3394 int inside_float = FLOAT_TYPE_P (inside_type);
3395 int inside_prec = TYPE_PRECISION (inside_type);
3396 int inside_unsignedp = TREE_UNSIGNED (inside_type);
3397 int inter_int = INTEGRAL_TYPE_P (inter_type);
3398 int inter_ptr = POINTER_TYPE_P (inter_type);
3399 int inter_float = FLOAT_TYPE_P (inter_type);
3400 int inter_prec = TYPE_PRECISION (inter_type);
3401 int inter_unsignedp = TREE_UNSIGNED (inter_type);
3402 int final_int = INTEGRAL_TYPE_P (final_type);
3403 int final_ptr = POINTER_TYPE_P (final_type);
3404 int final_float = FLOAT_TYPE_P (final_type);
3405 int final_prec = TYPE_PRECISION (final_type);
3406 int final_unsignedp = TREE_UNSIGNED (final_type);
3408 /* In addition to the cases of two conversions in a row
3409 handled below, if we are converting something to its own
3410 type via an object of identical or wider precision, neither
3411 conversion is needed. */
3412 if (inside_type == final_type
3413 && ((inter_int && final_int) || (inter_float && final_float))
3414 && inter_prec >= final_prec)
3415 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3417 /* Likewise, if the intermediate and final types are either both
3418 float or both integer, we don't need the middle conversion if
3419 it is wider than the final type and doesn't change the signedness
3420 (for integers). Avoid this if the final type is a pointer
3421 since then we sometimes need the inner conversion. */
3422 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
3423 || (inter_float && inside_float))
3424 && inter_prec >= inside_prec
3425 && (inter_float || inter_unsignedp == inside_unsignedp)
3427 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3429 /* Two conversions in a row are not needed unless:
3430 - some conversion is floating-point (overstrict for now), or
3431 - the intermediate type is narrower than both initial and
3433 - the intermediate type and innermost type differ in signedness,
3434 and the outermost type is wider than the intermediate, or
3435 - the initial type is a pointer type and the precisions of the
3436 intermediate and final types differ, or
3437 - the final type is a pointer type and the precisions of the
3438 initial and intermediate types differ. */
3439 if (! inside_float && ! inter_float && ! final_float
3440 && (inter_prec > inside_prec || inter_prec > final_prec)
3441 && ! (inside_int && inter_int
3442 && inter_unsignedp != inside_unsignedp
3443 && inter_prec < final_prec)
3444 && ((inter_unsignedp && inter_prec > inside_prec)
3445 == (final_unsignedp && final_prec > inter_prec))
3446 && ! (inside_ptr && inter_prec != final_prec)
3447 && ! (final_ptr && inside_prec != inter_prec))
3448 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3451 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3452 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3453 /* Detect assigning a bitfield. */
3454 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3455 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3457 /* Don't leave an assignment inside a conversion
3458 unless assigning a bitfield. */
3459 tree prev = TREE_OPERAND (t, 0);
3460 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3461 /* First do the assignment, then return converted constant. */
3462 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3468 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3471 return fold_convert (t, arg0);
3473 #if 0 /* This loses on &"foo"[0]. */
3478 /* Fold an expression like: "foo"[2] */
3479 if (TREE_CODE (arg0) == STRING_CST
3480 && TREE_CODE (arg1) == INTEGER_CST
3481 && !TREE_INT_CST_HIGH (arg1)
3482 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3484 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3485 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3486 force_fit_type (t, 0);
3493 if (TREE_CODE (arg0) == CONSTRUCTOR)
3495 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
3502 TREE_CONSTANT (t) = wins;
3508 if (TREE_CODE (arg0) == INTEGER_CST)
3510 HOST_WIDE_INT low, high;
3511 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3512 TREE_INT_CST_HIGH (arg0),
3514 t = build_int_2 (low, high);
3515 TREE_TYPE (t) = type;
3517 = (TREE_OVERFLOW (arg0)
3518 | force_fit_type (t, overflow));
3519 TREE_CONSTANT_OVERFLOW (t)
3520 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3522 else if (TREE_CODE (arg0) == REAL_CST)
3523 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3524 TREE_TYPE (t) = type;
3526 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3527 return TREE_OPERAND (arg0, 0);
3529 /* Convert - (a - b) to (b - a) for non-floating-point. */
3530 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3531 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3532 TREE_OPERAND (arg0, 0));
3539 if (TREE_CODE (arg0) == INTEGER_CST)
3541 if (! TREE_UNSIGNED (type)
3542 && TREE_INT_CST_HIGH (arg0) < 0)
3544 HOST_WIDE_INT low, high;
3545 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3546 TREE_INT_CST_HIGH (arg0),
3548 t = build_int_2 (low, high);
3549 TREE_TYPE (t) = type;
3551 = (TREE_OVERFLOW (arg0)
3552 | force_fit_type (t, overflow));
3553 TREE_CONSTANT_OVERFLOW (t)
3554 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3557 else if (TREE_CODE (arg0) == REAL_CST)
3559 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3560 t = build_real (type,
3561 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3563 TREE_TYPE (t) = type;
3565 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3566 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3570 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3572 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3573 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3574 TREE_OPERAND (arg0, 0),
3575 fold (build1 (NEGATE_EXPR,
3576 TREE_TYPE (TREE_TYPE (arg0)),
3577 TREE_OPERAND (arg0, 1))));
3578 else if (TREE_CODE (arg0) == COMPLEX_CST)
3579 return build_complex (TREE_OPERAND (arg0, 0),
3580 fold (build1 (NEGATE_EXPR,
3581 TREE_TYPE (TREE_TYPE (arg0)),
3582 TREE_OPERAND (arg0, 1))));
3583 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3584 return fold (build (TREE_CODE (arg0), type,
3585 fold (build1 (CONJ_EXPR, type,
3586 TREE_OPERAND (arg0, 0))),
3587 fold (build1 (CONJ_EXPR,
3588 type, TREE_OPERAND (arg0, 1)))));
3589 else if (TREE_CODE (arg0) == CONJ_EXPR)
3590 return TREE_OPERAND (arg0, 0);
3596 if (TREE_CODE (arg0) == INTEGER_CST)
3597 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3598 ~ TREE_INT_CST_HIGH (arg0));
3599 TREE_TYPE (t) = type;
3600 force_fit_type (t, 0);
3601 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3602 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3604 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3605 return TREE_OPERAND (arg0, 0);
3609 /* A + (-B) -> A - B */
3610 if (TREE_CODE (arg1) == NEGATE_EXPR)
3611 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3612 else if (! FLOAT_TYPE_P (type))
3614 if (integer_zerop (arg1))
3615 return non_lvalue (convert (type, arg0));
3617 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3618 with a constant, and the two constants have no bits in common,
3619 we should treat this as a BIT_IOR_EXPR since this may produce more
3621 if (TREE_CODE (arg0) == BIT_AND_EXPR
3622 && TREE_CODE (arg1) == BIT_AND_EXPR
3623 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3624 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3625 && integer_zerop (const_binop (BIT_AND_EXPR,
3626 TREE_OPERAND (arg0, 1),
3627 TREE_OPERAND (arg1, 1), 0)))
3629 code = BIT_IOR_EXPR;
3633 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3634 about the case where C is a constant, just try one of the
3635 four possibilities. */
3637 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3638 && operand_equal_p (TREE_OPERAND (arg0, 1),
3639 TREE_OPERAND (arg1, 1), 0))
3640 return fold (build (MULT_EXPR, type,
3641 fold (build (PLUS_EXPR, type,
3642 TREE_OPERAND (arg0, 0),
3643 TREE_OPERAND (arg1, 0))),
3644 TREE_OPERAND (arg0, 1)));
3646 /* In IEEE floating point, x+0 may not equal x. */
3647 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3649 && real_zerop (arg1))
3650 return non_lvalue (convert (type, arg0));
3652 /* In most languages, can't associate operations on floats
3653 through parentheses. Rather than remember where the parentheses
3654 were, we don't associate floats at all. It shouldn't matter much.
3655 However, associating multiplications is only very slightly
3656 inaccurate, so do that if -ffast-math is specified. */
3657 if (FLOAT_TYPE_P (type)
3658 && ! (flag_fast_math && code == MULT_EXPR))
3661 /* The varsign == -1 cases happen only for addition and subtraction.
3662 It says that the arg that was split was really CON minus VAR.
3663 The rest of the code applies to all associative operations. */
3669 if (split_tree (arg0, code, &var, &con, &varsign))
3673 /* EXPR is (CON-VAR) +- ARG1. */
3674 /* If it is + and VAR==ARG1, return just CONST. */
3675 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3676 return convert (TREE_TYPE (t), con);
3678 /* If ARG0 is a constant, don't change things around;
3679 instead keep all the constant computations together. */
3681 if (TREE_CONSTANT (arg0))
3684 /* Otherwise return (CON +- ARG1) - VAR. */
3685 t = build (MINUS_EXPR, type,
3686 fold (build (code, type, con, arg1)), var);
3690 /* EXPR is (VAR+CON) +- ARG1. */
3691 /* If it is - and VAR==ARG1, return just CONST. */
3692 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3693 return convert (TREE_TYPE (t), con);
3695 /* If ARG0 is a constant, don't change things around;
3696 instead keep all the constant computations together. */
3698 if (TREE_CONSTANT (arg0))
3701 /* Otherwise return VAR +- (ARG1 +- CON). */
3702 tem = fold (build (code, type, arg1, con));
3703 t = build (code, type, var, tem);
3705 if (integer_zerop (tem)
3706 && (code == PLUS_EXPR || code == MINUS_EXPR))
3707 return convert (type, var);
3708 /* If we have x +/- (c - d) [c an explicit integer]
3709 change it to x -/+ (d - c) since if d is relocatable
3710 then the latter can be a single immediate insn
3711 and the former cannot. */
3712 if (TREE_CODE (tem) == MINUS_EXPR
3713 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3715 tree tem1 = TREE_OPERAND (tem, 1);
3716 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3717 TREE_OPERAND (tem, 0) = tem1;
3719 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3725 if (split_tree (arg1, code, &var, &con, &varsign))
3727 if (TREE_CONSTANT (arg1))
3732 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3734 /* EXPR is ARG0 +- (CON +- VAR). */
3735 if (TREE_CODE (t) == MINUS_EXPR
3736 && operand_equal_p (var, arg0, 0))
3738 /* If VAR and ARG0 cancel, return just CON or -CON. */
3739 if (code == PLUS_EXPR)
3740 return convert (TREE_TYPE (t), con);
3741 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3742 convert (TREE_TYPE (t), con)));
3745 t = build (TREE_CODE (t), type,
3746 fold (build (code, TREE_TYPE (t), arg0, con)), var);
3748 if (integer_zerop (TREE_OPERAND (t, 0))
3749 && TREE_CODE (t) == PLUS_EXPR)
3750 return convert (TREE_TYPE (t), var);
3755 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3756 if (TREE_CODE (arg1) == REAL_CST)
3758 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3760 t1 = const_binop (code, arg0, arg1, 0);
3761 if (t1 != NULL_TREE)
3763 /* The return value should always have
3764 the same type as the original expression. */
3765 TREE_TYPE (t1) = TREE_TYPE (t);
3771 if (! FLOAT_TYPE_P (type))
3773 if (! wins && integer_zerop (arg0))
3774 return build1 (NEGATE_EXPR, type, arg1);
3775 if (integer_zerop (arg1))
3776 return non_lvalue (convert (type, arg0));
3778 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3779 about the case where C is a constant, just try one of the
3780 four possibilities. */
3782 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3783 && operand_equal_p (TREE_OPERAND (arg0, 1),
3784 TREE_OPERAND (arg1, 1), 0))
3785 return fold (build (MULT_EXPR, type,
3786 fold (build (MINUS_EXPR, type,
3787 TREE_OPERAND (arg0, 0),
3788 TREE_OPERAND (arg1, 0))),
3789 TREE_OPERAND (arg0, 1)));
3791 /* Convert A - (-B) to A + B. */
3792 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3793 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3795 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3798 /* Except with IEEE floating point, 0-x equals -x. */
3799 if (! wins && real_zerop (arg0))
3800 return build1 (NEGATE_EXPR, type, arg1);
3801 /* Except with IEEE floating point, x-0 equals x. */
3802 if (real_zerop (arg1))
3803 return non_lvalue (convert (type, arg0));
3806 /* Fold &x - &x. This can happen from &x.foo - &x.
3807 This is unsafe for certain floats even in non-IEEE formats.
3808 In IEEE, it is unsafe because it does wrong for NaNs.
3809 Also note that operand_equal_p is always false if an operand
3812 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
3813 && operand_equal_p (arg0, arg1, 0))
3814 return convert (type, integer_zero_node);
3819 if (! FLOAT_TYPE_P (type))
3821 if (integer_zerop (arg1))
3822 return omit_one_operand (type, arg1, arg0);
3823 if (integer_onep (arg1))
3824 return non_lvalue (convert (type, arg0));
3826 /* ((A / C) * C) is A if the division is an
3827 EXACT_DIV_EXPR. Since C is normally a constant,
3828 just check for one of the four possibilities. */
3830 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3831 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3832 return TREE_OPERAND (arg0, 0);
3834 /* (a * (1 << b)) is (a << b) */
3835 if (TREE_CODE (arg1) == LSHIFT_EXPR
3836 && integer_onep (TREE_OPERAND (arg1, 0)))
3837 return fold (build (LSHIFT_EXPR, type, arg0,
3838 TREE_OPERAND (arg1, 1)));
3839 if (TREE_CODE (arg0) == LSHIFT_EXPR
3840 && integer_onep (TREE_OPERAND (arg0, 0)))
3841 return fold (build (LSHIFT_EXPR, type, arg1,
3842 TREE_OPERAND (arg0, 1)));
3846 /* x*0 is 0, except for IEEE floating point. */
3847 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3849 && real_zerop (arg1))
3850 return omit_one_operand (type, arg1, arg0);
3851 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3852 However, ANSI says we can drop signals,
3853 so we can do this anyway. */
3854 if (real_onep (arg1))
3855 return non_lvalue (convert (type, arg0));
3857 if (! wins && real_twop (arg1))
3859 tree arg = save_expr (arg0);
3860 return build (PLUS_EXPR, type, arg, arg);
3867 if (integer_all_onesp (arg1))
3868 return omit_one_operand (type, arg1, arg0);
3869 if (integer_zerop (arg1))
3870 return non_lvalue (convert (type, arg0));
3871 t1 = distribute_bit_expr (code, type, arg0, arg1);
3872 if (t1 != NULL_TREE)
3875 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3876 is a rotate of A by C1 bits. */
3878 if ((TREE_CODE (arg0) == RSHIFT_EXPR
3879 || TREE_CODE (arg0) == LSHIFT_EXPR)
3880 && (TREE_CODE (arg1) == RSHIFT_EXPR
3881 || TREE_CODE (arg1) == LSHIFT_EXPR)
3882 && TREE_CODE (arg0) != TREE_CODE (arg1)
3883 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3884 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3885 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3886 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3887 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
3888 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
3889 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
3890 + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
3891 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3892 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3893 TREE_CODE (arg0) == LSHIFT_EXPR
3894 ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
3899 if (integer_zerop (arg1))
3900 return non_lvalue (convert (type, arg0));
3901 if (integer_all_onesp (arg1))
3902 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3907 if (integer_all_onesp (arg1))
3908 return non_lvalue (convert (type, arg0));
3909 if (integer_zerop (arg1))
3910 return omit_one_operand (type, arg1, arg0);
3911 t1 = distribute_bit_expr (code, type, arg0, arg1);
3912 if (t1 != NULL_TREE)
3914 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3915 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3916 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3918 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3919 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3920 && (~TREE_INT_CST_LOW (arg0)
3921 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3922 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3924 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3925 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3927 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3928 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3929 && (~TREE_INT_CST_LOW (arg1)
3930 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3931 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3935 case BIT_ANDTC_EXPR:
3936 if (integer_all_onesp (arg0))
3937 return non_lvalue (convert (type, arg1));
3938 if (integer_zerop (arg0))
3939 return omit_one_operand (type, arg0, arg1);
3940 if (TREE_CODE (arg1) == INTEGER_CST)
3942 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3943 code = BIT_AND_EXPR;
3949 /* In most cases, do nothing with a divide by zero. */
3950 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3951 #ifndef REAL_INFINITY
3952 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
3955 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3957 /* In IEEE floating point, x/1 is not equivalent to x for snans.
3958 However, ANSI says we can drop signals, so we can do this anyway. */
3959 if (real_onep (arg1))
3960 return non_lvalue (convert (type, arg0));
3962 /* If ARG1 is a constant, we can convert this to a multiply by the
3963 reciprocal. This does not have the same rounding properties,
3964 so only do this if -ffast-math. We can actually always safely
3965 do it if ARG1 is a power of two, but it's hard to tell if it is
3966 or not in a portable manner. */
3967 if (TREE_CODE (arg1) == REAL_CST && flag_fast_math
3968 && 0 != (tem = const_binop (code, build_real (type, dconst1),
3970 return fold (build (MULT_EXPR, type, arg0, tem));
3974 case TRUNC_DIV_EXPR:
3975 case ROUND_DIV_EXPR:
3976 case FLOOR_DIV_EXPR:
3978 case EXACT_DIV_EXPR:
3979 if (integer_onep (arg1))
3980 return non_lvalue (convert (type, arg0));
3981 if (integer_zerop (arg1))
3984 /* If we have ((a / C1) / C2) where both division are the same type, try
3985 to simplify. First see if C1 * C2 overflows or not. */
3986 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
3987 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
3991 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
3992 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
3994 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
3995 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
3997 /* If no overflow, divide by C1*C2. */
3998 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4002 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4003 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4004 expressions, which often appear in the offsets or sizes of
4005 objects with a varying size. Only deal with positive divisors
4006 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4008 Look for NOPs and SAVE_EXPRs inside. */
4010 if (TREE_CODE (arg1) == INTEGER_CST
4011 && tree_int_cst_sgn (arg1) >= 0)
4013 int have_save_expr = 0;
4014 tree c2 = integer_zero_node;
4017 if (TREE_CODE (xarg0) == SAVE_EXPR)
4018 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4022 if (TREE_CODE (xarg0) == PLUS_EXPR
4023 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4024 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4025 else if (TREE_CODE (xarg0) == MINUS_EXPR
4026 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4027 /* If we are doing this computation unsigned, the negate
4029 && ! TREE_UNSIGNED (type))
4031 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4032 xarg0 = TREE_OPERAND (xarg0, 0);
4035 if (TREE_CODE (xarg0) == SAVE_EXPR)
4036 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4040 if (TREE_CODE (xarg0) == MULT_EXPR
4041 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4042 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4043 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4044 TREE_OPERAND (xarg0, 1), arg1, 1))
4045 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4046 TREE_OPERAND (xarg0, 1), 1)))
4047 && (tree_int_cst_sgn (c2) >= 0
4048 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4051 tree outer_div = integer_one_node;
4052 tree c1 = TREE_OPERAND (xarg0, 1);
4055 /* If C3 > C1, set them equal and do a divide by
4056 C3/C1 at the end of the operation. */
4057 if (tree_int_cst_lt (c1, c3))
4058 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4060 /* The result is A * (C1/C3) + (C2/C3). */
4061 t = fold (build (PLUS_EXPR, type,
4062 fold (build (MULT_EXPR, type,
4063 TREE_OPERAND (xarg0, 0),
4064 const_binop (code, c1, c3, 1))),
4065 const_binop (code, c2, c3, 1)));
4067 if (! integer_onep (outer_div))
4068 t = fold (build (code, type, t, convert (type, outer_div)));
4080 case FLOOR_MOD_EXPR:
4081 case ROUND_MOD_EXPR:
4082 case TRUNC_MOD_EXPR:
4083 if (integer_onep (arg1))
4084 return omit_one_operand (type, integer_zero_node, arg0);
4085 if (integer_zerop (arg1))
4088 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4089 where C1 % C3 == 0. Handle similarly to the division case,
4090 but don't bother with SAVE_EXPRs. */
4092 if (TREE_CODE (arg1) == INTEGER_CST
4093 && ! integer_zerop (arg1))
4095 tree c2 = integer_zero_node;
4098 if (TREE_CODE (xarg0) == PLUS_EXPR
4099 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4100 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4101 else if (TREE_CODE (xarg0) == MINUS_EXPR
4102 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4103 && ! TREE_UNSIGNED (type))
4105 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4106 xarg0 = TREE_OPERAND (xarg0, 0);
4111 if (TREE_CODE (xarg0) == MULT_EXPR
4112 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4113 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4114 TREE_OPERAND (xarg0, 1),
4116 && tree_int_cst_sgn (c2) >= 0)
4117 /* The result is (C2%C3). */
4118 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4119 TREE_OPERAND (xarg0, 0));
4128 if (integer_zerop (arg1))
4129 return non_lvalue (convert (type, arg0));
4130 /* Since negative shift count is not well-defined,
4131 don't try to compute it in the compiler. */
4132 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4134 /* Rewrite an LROTATE_EXPR by a constant into an
4135 RROTATE_EXPR by a new constant. */
4136 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4138 TREE_SET_CODE (t, RROTATE_EXPR);
4139 code = RROTATE_EXPR;
4140 TREE_OPERAND (t, 1) = arg1
4143 convert (TREE_TYPE (arg1),
4144 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4146 if (tree_int_cst_sgn (arg1) < 0)
4150 /* If we have a rotate of a bit operation with the rotate count and
4151 the second operand of the bit operation both constant,
4152 permute the two operations. */
4153 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4154 && (TREE_CODE (arg0) == BIT_AND_EXPR
4155 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4156 || TREE_CODE (arg0) == BIT_IOR_EXPR
4157 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4158 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4159 return fold (build (TREE_CODE (arg0), type,
4160 fold (build (code, type,
4161 TREE_OPERAND (arg0, 0), arg1)),
4162 fold (build (code, type,
4163 TREE_OPERAND (arg0, 1), arg1))));
4165 /* Two consecutive rotates adding up to the width of the mode can
4167 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4168 && TREE_CODE (arg0) == RROTATE_EXPR
4169 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4170 && TREE_INT_CST_HIGH (arg1) == 0
4171 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4172 && ((TREE_INT_CST_LOW (arg1)
4173 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4174 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4175 return TREE_OPERAND (arg0, 0);
4180 if (operand_equal_p (arg0, arg1, 0))
4182 if (INTEGRAL_TYPE_P (type)
4183 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4184 return omit_one_operand (type, arg1, arg0);
4188 if (operand_equal_p (arg0, arg1, 0))
4190 if (INTEGRAL_TYPE_P (type)
4191 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4192 return omit_one_operand (type, arg1, arg0);
4195 case TRUTH_NOT_EXPR:
4196 /* Note that the operand of this must be an int
4197 and its values must be 0 or 1.
4198 ("true" is a fixed value perhaps depending on the language,
4199 but we don't handle values other than 1 correctly yet.) */
4200 return invert_truthvalue (arg0);
4202 case TRUTH_ANDIF_EXPR:
4203 /* Note that the operands of this must be ints
4204 and their values must be 0 or 1.
4205 ("true" is a fixed value perhaps depending on the language.) */
4206 /* If first arg is constant zero, return it. */
4207 if (integer_zerop (arg0))
4209 case TRUTH_AND_EXPR:
4210 /* If either arg is constant true, drop it. */
4211 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4212 return non_lvalue (arg1);
4213 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4214 return non_lvalue (arg0);
4215 /* If second arg is constant zero, result is zero, but first arg
4216 must be evaluated. */
4217 if (integer_zerop (arg1))
4218 return omit_one_operand (type, arg1, arg0);
4221 /* We only do these simplifications if we are optimizing. */
4225 /* Check for things like (A || B) && (A || C). We can convert this
4226 to A || (B && C). Note that either operator can be any of the four
4227 truth and/or operations and the transformation will still be
4228 valid. Also note that we only care about order for the
4229 ANDIF and ORIF operators. */
4230 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4231 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4232 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4233 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4234 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4236 tree a00 = TREE_OPERAND (arg0, 0);
4237 tree a01 = TREE_OPERAND (arg0, 1);
4238 tree a10 = TREE_OPERAND (arg1, 0);
4239 tree a11 = TREE_OPERAND (arg1, 1);
4240 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4241 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4242 && (code == TRUTH_AND_EXPR
4243 || code == TRUTH_OR_EXPR));
4245 if (operand_equal_p (a00, a10, 0))
4246 return fold (build (TREE_CODE (arg0), type, a00,
4247 fold (build (code, type, a01, a11))));
4248 else if (commutative && operand_equal_p (a00, a11, 0))
4249 return fold (build (TREE_CODE (arg0), type, a00,
4250 fold (build (code, type, a01, a10))));
4251 else if (commutative && operand_equal_p (a01, a10, 0))
4252 return fold (build (TREE_CODE (arg0), type, a01,
4253 fold (build (code, type, a00, a11))));
4255 /* This case if tricky because we must either have commutative
4256 operators or else A10 must not have side-effects. */
4258 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4259 && operand_equal_p (a01, a11, 0))
4260 return fold (build (TREE_CODE (arg0), type,
4261 fold (build (code, type, a00, a10)),
4265 /* Check for the possibility of merging component references. If our
4266 lhs is another similar operation, try to merge its rhs with our
4267 rhs. Then try to merge our lhs and rhs. */
4268 if (TREE_CODE (arg0) == code
4269 && 0 != (tem = fold_truthop (code, type,
4270 TREE_OPERAND (arg0, 1), arg1)))
4271 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4273 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4278 case TRUTH_ORIF_EXPR:
4279 /* Note that the operands of this must be ints
4280 and their values must be 0 or true.
4281 ("true" is a fixed value perhaps depending on the language.) */
4282 /* If first arg is constant true, return it. */
4283 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4286 /* If either arg is constant zero, drop it. */
4287 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4288 return non_lvalue (arg1);
4289 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4290 return non_lvalue (arg0);
4291 /* If second arg is constant true, result is true, but we must
4292 evaluate first arg. */
4293 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4294 return omit_one_operand (type, arg1, arg0);
4297 case TRUTH_XOR_EXPR:
4298 /* If either arg is constant zero, drop it. */
4299 if (integer_zerop (arg0))
4300 return non_lvalue (arg1);
4301 if (integer_zerop (arg1))
4302 return non_lvalue (arg0);
4303 /* If either arg is constant true, this is a logical inversion. */
4304 if (integer_onep (arg0))
4305 return non_lvalue (invert_truthvalue (arg1));
4306 if (integer_onep (arg1))
4307 return non_lvalue (invert_truthvalue (arg0));
4316 /* If one arg is a constant integer, put it last. */
4317 if (TREE_CODE (arg0) == INTEGER_CST
4318 && TREE_CODE (arg1) != INTEGER_CST)
4320 TREE_OPERAND (t, 0) = arg1;
4321 TREE_OPERAND (t, 1) = arg0;
4322 arg0 = TREE_OPERAND (t, 0);
4323 arg1 = TREE_OPERAND (t, 1);
4324 code = swap_tree_comparison (code);
4325 TREE_SET_CODE (t, code);
4328 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4329 First, see if one arg is constant; find the constant arg
4330 and the other one. */
4332 tree constop = 0, varop;
4333 int constopnum = -1;
4335 if (TREE_CONSTANT (arg1))
4336 constopnum = 1, constop = arg1, varop = arg0;
4337 if (TREE_CONSTANT (arg0))
4338 constopnum = 0, constop = arg0, varop = arg1;
4340 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4342 /* This optimization is invalid for ordered comparisons
4343 if CONST+INCR overflows or if foo+incr might overflow.
4344 This optimization is invalid for floating point due to rounding.
4345 For pointer types we assume overflow doesn't happen. */
4346 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4347 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4348 && (code == EQ_EXPR || code == NE_EXPR)))
4351 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4352 constop, TREE_OPERAND (varop, 1)));
4353 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4355 t = build (code, type, TREE_OPERAND (t, 0),
4356 TREE_OPERAND (t, 1));
4357 TREE_OPERAND (t, constopnum) = newconst;
4361 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4363 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4364 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4365 && (code == EQ_EXPR || code == NE_EXPR)))
4368 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4369 constop, TREE_OPERAND (varop, 1)));
4370 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4371 t = build (code, type, TREE_OPERAND (t, 0),
4372 TREE_OPERAND (t, 1));
4373 TREE_OPERAND (t, constopnum) = newconst;
4379 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4380 if (TREE_CODE (arg1) == INTEGER_CST
4381 && TREE_CODE (arg0) != INTEGER_CST
4382 && tree_int_cst_sgn (arg1) > 0)
4384 switch (TREE_CODE (t))
4388 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4389 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4394 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4395 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4400 /* If this is an EQ or NE comparison with zero and ARG0 is
4401 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4402 two operations, but the latter can be done in one less insn
4403 one machine that have only two-operand insns or on which a
4404 constant cannot be the first operand. */
4405 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4406 && TREE_CODE (arg0) == BIT_AND_EXPR)
4408 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4409 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4411 fold (build (code, type,
4412 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4414 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4415 TREE_OPERAND (arg0, 1),
4416 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4417 convert (TREE_TYPE (arg0),
4420 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4421 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4423 fold (build (code, type,
4424 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4426 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4427 TREE_OPERAND (arg0, 0),
4428 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4429 convert (TREE_TYPE (arg0),
4434 /* If this is an NE or EQ comparison of zero against the result of a
4435 signed MOD operation whose second operand is a power of 2, make
4436 the MOD operation unsigned since it is simpler and equivalent. */
4437 if ((code == NE_EXPR || code == EQ_EXPR)
4438 && integer_zerop (arg1)
4439 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4440 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4441 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4442 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4443 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4444 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4446 tree newtype = unsigned_type (TREE_TYPE (arg0));
4447 tree newmod = build (TREE_CODE (arg0), newtype,
4448 convert (newtype, TREE_OPERAND (arg0, 0)),
4449 convert (newtype, TREE_OPERAND (arg0, 1)));
4451 return build (code, type, newmod, convert (newtype, arg1));
4454 /* If this is an NE comparison of zero with an AND of one, remove the
4455 comparison since the AND will give the correct value. */
4456 if (code == NE_EXPR && integer_zerop (arg1)
4457 && TREE_CODE (arg0) == BIT_AND_EXPR
4458 && integer_onep (TREE_OPERAND (arg0, 1)))
4459 return convert (type, arg0);
4461 /* If we have (A & C) == C where C is a power of 2, convert this into
4462 (A & C) != 0. Similarly for NE_EXPR. */
4463 if ((code == EQ_EXPR || code == NE_EXPR)
4464 && TREE_CODE (arg0) == BIT_AND_EXPR
4465 && integer_pow2p (TREE_OPERAND (arg0, 1))
4466 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4467 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4468 arg0, integer_zero_node);
4470 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4471 and similarly for >= into !=. */
4472 if ((code == LT_EXPR || code == GE_EXPR)
4473 && TREE_UNSIGNED (TREE_TYPE (arg0))
4474 && TREE_CODE (arg1) == LSHIFT_EXPR
4475 && integer_onep (TREE_OPERAND (arg1, 0)))
4476 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4477 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4478 TREE_OPERAND (arg1, 1)),
4479 convert (TREE_TYPE (arg0), integer_zero_node));
4481 else if ((code == LT_EXPR || code == GE_EXPR)
4482 && TREE_UNSIGNED (TREE_TYPE (arg0))
4483 && (TREE_CODE (arg1) == NOP_EXPR
4484 || TREE_CODE (arg1) == CONVERT_EXPR)
4485 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
4486 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
4488 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4489 convert (TREE_TYPE (arg0),
4490 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4491 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
4492 convert (TREE_TYPE (arg0), integer_zero_node));
4494 /* Simplify comparison of something with itself. (For IEEE
4495 floating-point, we can only do some of these simplifications.) */
4496 if (operand_equal_p (arg0, arg1, 0))
4503 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4505 t = build_int_2 (1, 0);
4506 TREE_TYPE (t) = type;
4510 TREE_SET_CODE (t, code);
4514 /* For NE, we can only do this simplification if integer. */
4515 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4517 /* ... fall through ... */
4520 t = build_int_2 (0, 0);
4521 TREE_TYPE (t) = type;
4526 /* An unsigned comparison against 0 can be simplified. */
4527 if (integer_zerop (arg1)
4528 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4529 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4530 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4532 switch (TREE_CODE (t))
4536 TREE_SET_CODE (t, NE_EXPR);
4540 TREE_SET_CODE (t, EQ_EXPR);
4543 return omit_one_operand (type,
4544 convert (type, integer_one_node),
4547 return omit_one_operand (type,
4548 convert (type, integer_zero_node),
4553 /* If we are comparing an expression that just has comparisons
4554 of two integer values, arithmetic expressions of those comparisons,
4555 and constants, we can simplify it. There are only three cases
4556 to check: the two values can either be equal, the first can be
4557 greater, or the second can be greater. Fold the expression for
4558 those three values. Since each value must be 0 or 1, we have
4559 eight possibilities, each of which corresponds to the constant 0
4560 or 1 or one of the six possible comparisons.
4562 This handles common cases like (a > b) == 0 but also handles
4563 expressions like ((x > y) - (y > x)) > 0, which supposedly
4564 occur in macroized code. */
4566 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4568 tree cval1 = 0, cval2 = 0;
4571 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4572 /* Don't handle degenerate cases here; they should already
4573 have been handled anyway. */
4574 && cval1 != 0 && cval2 != 0
4575 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4576 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4577 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4578 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4579 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4581 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4582 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4584 /* We can't just pass T to eval_subst in case cval1 or cval2
4585 was the same as ARG1. */
4588 = fold (build (code, type,
4589 eval_subst (arg0, cval1, maxval, cval2, minval),
4592 = fold (build (code, type,
4593 eval_subst (arg0, cval1, maxval, cval2, maxval),
4596 = fold (build (code, type,
4597 eval_subst (arg0, cval1, minval, cval2, maxval),
4600 /* All three of these results should be 0 or 1. Confirm they
4601 are. Then use those values to select the proper code
4604 if ((integer_zerop (high_result)
4605 || integer_onep (high_result))
4606 && (integer_zerop (equal_result)
4607 || integer_onep (equal_result))
4608 && (integer_zerop (low_result)
4609 || integer_onep (low_result)))
4611 /* Make a 3-bit mask with the high-order bit being the
4612 value for `>', the next for '=', and the low for '<'. */
4613 switch ((integer_onep (high_result) * 4)
4614 + (integer_onep (equal_result) * 2)
4615 + integer_onep (low_result))
4619 return omit_one_operand (type, integer_zero_node, arg0);
4640 return omit_one_operand (type, integer_one_node, arg0);
4643 t = build (code, type, cval1, cval2);
4645 return save_expr (t);
4652 /* If this is a comparison of a field, we may be able to simplify it. */
4653 if ((TREE_CODE (arg0) == COMPONENT_REF
4654 || TREE_CODE (arg0) == BIT_FIELD_REF)
4655 && (code == EQ_EXPR || code == NE_EXPR)
4656 /* Handle the constant case even without -O
4657 to make sure the warnings are given. */
4658 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4660 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4664 /* If this is a comparison of complex values and either or both
4665 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4666 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4667 may prevent needless evaluations. */
4668 if ((code == EQ_EXPR || code == NE_EXPR)
4669 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4670 && (TREE_CODE (arg0) == COMPLEX_EXPR
4671 || TREE_CODE (arg1) == COMPLEX_EXPR))
4673 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4674 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4675 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4676 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4677 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4679 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4682 fold (build (code, type, real0, real1)),
4683 fold (build (code, type, imag0, imag1))));
4686 /* From here on, the only cases we handle are when the result is
4687 known to be a constant.
4689 To compute GT, swap the arguments and do LT.
4690 To compute GE, do LT and invert the result.
4691 To compute LE, swap the arguments, do LT and invert the result.
4692 To compute NE, do EQ and invert the result.
4694 Therefore, the code below must handle only EQ and LT. */
4696 if (code == LE_EXPR || code == GT_EXPR)
4698 tem = arg0, arg0 = arg1, arg1 = tem;
4699 code = swap_tree_comparison (code);
4702 /* Note that it is safe to invert for real values here because we
4703 will check below in the one case that it matters. */
4706 if (code == NE_EXPR || code == GE_EXPR)
4709 code = invert_tree_comparison (code);
4712 /* Compute a result for LT or EQ if args permit;
4713 otherwise return T. */
4714 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4716 if (code == EQ_EXPR)
4717 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4718 == TREE_INT_CST_LOW (arg1))
4719 && (TREE_INT_CST_HIGH (arg0)
4720 == TREE_INT_CST_HIGH (arg1)),
4723 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4724 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4725 : INT_CST_LT (arg0, arg1)),
4729 /* Assume a nonexplicit constant cannot equal an explicit one,
4730 since such code would be undefined anyway.
4731 Exception: on sysvr4, using #pragma weak,
4732 a label can come out as 0. */
4733 else if (TREE_CODE (arg1) == INTEGER_CST
4734 && !integer_zerop (arg1)
4735 && TREE_CONSTANT (arg0)
4736 && TREE_CODE (arg0) == ADDR_EXPR
4738 t1 = build_int_2 (0, 0);
4740 /* Two real constants can be compared explicitly. */
4741 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4743 /* If either operand is a NaN, the result is false with two
4744 exceptions: First, an NE_EXPR is true on NaNs, but that case
4745 is already handled correctly since we will be inverting the
4746 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4747 or a GE_EXPR into a LT_EXPR, we must return true so that it
4748 will be inverted into false. */
4750 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4751 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4752 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4754 else if (code == EQ_EXPR)
4755 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4756 TREE_REAL_CST (arg1)),
4759 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4760 TREE_REAL_CST (arg1)),
4764 if (t1 == NULL_TREE)
4768 TREE_INT_CST_LOW (t1) ^= 1;
4770 TREE_TYPE (t1) = type;
4774 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4775 so all simple results must be passed through pedantic_non_lvalue. */
4776 if (TREE_CODE (arg0) == INTEGER_CST)
4777 return pedantic_non_lvalue
4778 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4779 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4780 return pedantic_omit_one_operand (type, arg1, arg0);
4782 /* If the second operand is zero, invert the comparison and swap
4783 the second and third operands. Likewise if the second operand
4784 is constant and the third is not or if the third operand is
4785 equivalent to the first operand of the comparison. */
4787 if (integer_zerop (arg1)
4788 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4789 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4790 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4791 TREE_OPERAND (t, 2),
4792 TREE_OPERAND (arg0, 1))))
4794 /* See if this can be inverted. If it can't, possibly because
4795 it was a floating-point inequality comparison, don't do
4797 tem = invert_truthvalue (arg0);
4799 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4801 t = build (code, type, tem,
4802 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
4804 arg1 = TREE_OPERAND (t, 2);
4809 /* If we have A op B ? A : C, we may be able to convert this to a
4810 simpler expression, depending on the operation and the values
4811 of B and C. IEEE floating point prevents this though,
4812 because A or B might be -0.0 or a NaN. */
4814 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4815 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4816 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4818 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4819 arg1, TREE_OPERAND (arg0, 1)))
4821 tree arg2 = TREE_OPERAND (t, 2);
4822 enum tree_code comp_code = TREE_CODE (arg0);
4826 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4827 depending on the comparison operation. */
4828 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
4829 ? real_zerop (TREE_OPERAND (arg0, 1))
4830 : integer_zerop (TREE_OPERAND (arg0, 1)))
4831 && TREE_CODE (arg2) == NEGATE_EXPR
4832 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4836 return pedantic_non_lvalue
4837 (fold (build1 (NEGATE_EXPR, type, arg1)));
4839 return pedantic_non_lvalue (convert (type, arg1));
4842 return pedantic_non_lvalue
4843 (fold (build1 (ABS_EXPR, type, arg1)));
4846 return pedantic_non_lvalue
4847 (fold (build1 (NEGATE_EXPR, type,
4848 fold (build1 (ABS_EXPR, type, arg1)))));
4851 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4854 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4856 if (comp_code == NE_EXPR)
4857 return pedantic_non_lvalue (convert (type, arg1));
4858 else if (comp_code == EQ_EXPR)
4859 return pedantic_non_lvalue (convert (type, integer_zero_node));
4862 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4863 or max (A, B), depending on the operation. */
4865 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4866 arg2, TREE_OPERAND (arg0, 0)))
4868 tree comp_op0 = TREE_OPERAND (arg0, 0);
4869 tree comp_op1 = TREE_OPERAND (arg0, 1);
4870 tree comp_type = TREE_TYPE (comp_op0);
4875 return pedantic_non_lvalue (convert (type, arg2));
4877 return pedantic_non_lvalue (convert (type, arg1));
4880 return pedantic_non_lvalue
4881 (convert (type, (fold (build (MIN_EXPR, comp_type,
4882 comp_op0, comp_op1)))));
4885 return pedantic_non_lvalue
4886 (convert (type, fold (build (MAX_EXPR, comp_type,
4887 comp_op0, comp_op1))));
4891 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4892 we might still be able to simplify this. For example,
4893 if C1 is one less or one more than C2, this might have started
4894 out as a MIN or MAX and been transformed by this function.
4895 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4897 if (INTEGRAL_TYPE_P (type)
4898 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4899 && TREE_CODE (arg2) == INTEGER_CST)
4903 /* We can replace A with C1 in this case. */
4904 arg1 = convert (type, TREE_OPERAND (arg0, 1));
4905 t = build (code, type, TREE_OPERAND (t, 0), arg1,
4906 TREE_OPERAND (t, 2));
4910 /* If C1 is C2 + 1, this is min(A, C2). */
4911 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4912 && operand_equal_p (TREE_OPERAND (arg0, 1),
4913 const_binop (PLUS_EXPR, arg2,
4914 integer_one_node, 0), 1))
4915 return pedantic_non_lvalue
4916 (fold (build (MIN_EXPR, type, arg1, arg2)));
4920 /* If C1 is C2 - 1, this is min(A, C2). */
4921 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4922 && operand_equal_p (TREE_OPERAND (arg0, 1),
4923 const_binop (MINUS_EXPR, arg2,
4924 integer_one_node, 0), 1))
4925 return pedantic_non_lvalue
4926 (fold (build (MIN_EXPR, type, arg1, arg2)));
4930 /* If C1 is C2 - 1, this is max(A, C2). */
4931 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4932 && operand_equal_p (TREE_OPERAND (arg0, 1),
4933 const_binop (MINUS_EXPR, arg2,
4934 integer_one_node, 0), 1))
4935 return pedantic_non_lvalue
4936 (fold (build (MAX_EXPR, type, arg1, arg2)));
4940 /* If C1 is C2 + 1, this is max(A, C2). */
4941 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4942 && operand_equal_p (TREE_OPERAND (arg0, 1),
4943 const_binop (PLUS_EXPR, arg2,
4944 integer_one_node, 0), 1))
4945 return pedantic_non_lvalue
4946 (fold (build (MAX_EXPR, type, arg1, arg2)));
4951 /* If the second operand is simpler than the third, swap them
4952 since that produces better jump optimization results. */
4953 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
4954 || TREE_CODE (arg1) == SAVE_EXPR)
4955 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
4956 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
4957 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
4959 /* See if this can be inverted. If it can't, possibly because
4960 it was a floating-point inequality comparison, don't do
4962 tem = invert_truthvalue (arg0);
4964 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4966 t = build (code, type, tem,
4967 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
4969 arg1 = TREE_OPERAND (t, 2);
4974 /* Convert A ? 1 : 0 to simply A. */
4975 if (integer_onep (TREE_OPERAND (t, 1))
4976 && integer_zerop (TREE_OPERAND (t, 2))
4977 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
4978 call to fold will try to move the conversion inside
4979 a COND, which will recurse. In that case, the COND_EXPR
4980 is probably the best choice, so leave it alone. */
4981 && type == TREE_TYPE (arg0))
4982 return pedantic_non_lvalue (arg0);
4984 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
4985 operation is simply A & 2. */
4987 if (integer_zerop (TREE_OPERAND (t, 2))
4988 && TREE_CODE (arg0) == NE_EXPR
4989 && integer_zerop (TREE_OPERAND (arg0, 1))
4990 && integer_pow2p (arg1)
4991 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
4992 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
4994 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
4999 /* When pedantic, a compound expression can be neither an lvalue
5000 nor an integer constant expression. */
5001 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5003 /* Don't let (0, 0) be null pointer constant. */
5004 if (integer_zerop (arg1))
5005 return non_lvalue (arg1);
5010 return build_complex (arg0, arg1);
5014 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5016 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5017 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5018 TREE_OPERAND (arg0, 1));
5019 else if (TREE_CODE (arg0) == COMPLEX_CST)
5020 return TREE_REALPART (arg0);
5021 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5022 return fold (build (TREE_CODE (arg0), type,
5023 fold (build1 (REALPART_EXPR, type,
5024 TREE_OPERAND (arg0, 0))),
5025 fold (build1 (REALPART_EXPR,
5026 type, TREE_OPERAND (arg0, 1)))));
5030 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5031 return convert (type, integer_zero_node);
5032 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5033 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5034 TREE_OPERAND (arg0, 0));
5035 else if (TREE_CODE (arg0) == COMPLEX_CST)
5036 return TREE_IMAGPART (arg0);
5037 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5038 return fold (build (TREE_CODE (arg0), type,
5039 fold (build1 (IMAGPART_EXPR, type,
5040 TREE_OPERAND (arg0, 0))),
5041 fold (build1 (IMAGPART_EXPR, type,
5042 TREE_OPERAND (arg0, 1)))));
5045 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5047 case CLEANUP_POINT_EXPR:
5048 if (! TREE_SIDE_EFFECTS (arg0))
5049 return convert (type, arg0);
5052 enum tree_code code0 = TREE_CODE (arg0);
5053 int kind0 = TREE_CODE_CLASS (code0);
5054 tree arg00 = TREE_OPERAND (arg0, 0);
5057 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5058 return fold (build1 (code0, type,
5059 fold (build1 (CLEANUP_POINT_EXPR,
5060 TREE_TYPE (arg00), arg00))));
5062 if (kind0 == '<' || kind0 == '2'
5063 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5064 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5065 || code0 == TRUTH_XOR_EXPR)
5067 arg01 = TREE_OPERAND (arg0, 1);
5069 if (! TREE_SIDE_EFFECTS (arg00))
5070 return fold (build (code0, type, arg00,
5071 fold (build1 (CLEANUP_POINT_EXPR,
5072 TREE_TYPE (arg01), arg01))));
5074 if (! TREE_SIDE_EFFECTS (arg01))
5075 return fold (build (code0, type,
5076 fold (build1 (CLEANUP_POINT_EXPR,
5077 TREE_TYPE (arg00), arg00)),
5086 } /* switch (code) */