1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'. */
47 /* Handle floating overflow for `const_binop'. */
48 static jmp_buf float_error;
50 static void encode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT, HOST_WIDE_INT));
51 static void decode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT *, HOST_WIDE_INT *));
52 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
53 HOST_WIDE_INT, HOST_WIDE_INT,
54 HOST_WIDE_INT, HOST_WIDE_INT *,
55 HOST_WIDE_INT *, HOST_WIDE_INT *,
57 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
58 static tree const_binop PROTO((enum tree_code, tree, tree, int));
59 static tree fold_convert PROTO((tree, tree));
60 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
61 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
62 static int truth_value_p PROTO((enum tree_code));
63 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
64 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
65 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
66 static tree omit_one_operand PROTO((tree, tree, tree));
67 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
68 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
69 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
70 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
72 static tree decode_field_reference PROTO((tree, int *, int *,
73 enum machine_mode *, int *,
74 int *, tree *, tree *));
75 static int all_ones_mask_p PROTO((tree, int));
76 static int simple_operand_p PROTO((tree));
77 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
78 enum tree_code, tree, tree, tree));
79 static tree unextend PROTO((tree, int, int, tree));
80 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
81 static tree strip_compound_expr PROTO((tree, tree));
87 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
88 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
89 Then this yields nonzero if overflow occurred during the addition.
90 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
91 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
92 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
94 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
95 We do that by representing the two-word integer in 4 words, with only
96 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
99 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
100 #define HIGHPART(x) \
101 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
102 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
104 /* Unpack a two-word integer into 4 words.
105 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
106 WORDS points to the array of HOST_WIDE_INTs. */
109 encode (words, low, hi)
110 HOST_WIDE_INT *words;
111 HOST_WIDE_INT low, hi;
113 words[0] = LOWPART (low);
114 words[1] = HIGHPART (low);
115 words[2] = LOWPART (hi);
116 words[3] = HIGHPART (hi);
119 /* Pack an array of 4 words into a two-word integer.
120 WORDS points to the array of words.
121 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
124 decode (words, low, hi)
125 HOST_WIDE_INT *words;
126 HOST_WIDE_INT *low, *hi;
128 *low = words[0] | words[1] * BASE;
129 *hi = words[2] | words[3] * BASE;
132 /* Make the integer constant T valid for its type
133 by setting to 0 or 1 all the bits in the constant
134 that don't belong in the type.
135 Yield 1 if a signed overflow occurs, 0 otherwise.
136 If OVERFLOW is nonzero, a signed overflow has already occurred
137 in calculating T, so propagate it.
139 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
143 force_fit_type (t, overflow)
147 HOST_WIDE_INT low, high;
150 if (TREE_CODE (t) == REAL_CST)
152 #ifdef CHECK_FLOAT_VALUE
153 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
159 else if (TREE_CODE (t) != INTEGER_CST)
162 low = TREE_INT_CST_LOW (t);
163 high = TREE_INT_CST_HIGH (t);
165 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
168 prec = TYPE_PRECISION (TREE_TYPE (t));
170 /* First clear all bits that are beyond the type's precision. */
172 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
174 else if (prec > HOST_BITS_PER_WIDE_INT)
176 TREE_INT_CST_HIGH (t)
177 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
181 TREE_INT_CST_HIGH (t) = 0;
182 if (prec < HOST_BITS_PER_WIDE_INT)
183 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
186 /* Unsigned types do not suffer sign extension or overflow. */
187 if (TREE_UNSIGNED (TREE_TYPE (t)))
190 /* If the value's sign bit is set, extend the sign. */
191 if (prec != 2 * HOST_BITS_PER_WIDE_INT
192 && (prec > HOST_BITS_PER_WIDE_INT
193 ? (TREE_INT_CST_HIGH (t)
194 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
195 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
197 /* Value is negative:
198 set to 1 all the bits that are outside this type's precision. */
199 if (prec > HOST_BITS_PER_WIDE_INT)
201 TREE_INT_CST_HIGH (t)
202 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
206 TREE_INT_CST_HIGH (t) = -1;
207 if (prec < HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
212 /* Yield nonzero if signed overflow occurred. */
214 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
218 /* Add two doubleword integers with doubleword result.
219 Each argument is given as two `HOST_WIDE_INT' pieces.
220 One argument is L1 and H1; the other, L2 and H2.
221 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
224 add_double (l1, h1, l2, h2, lv, hv)
225 HOST_WIDE_INT l1, h1, l2, h2;
226 HOST_WIDE_INT *lv, *hv;
231 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
235 return overflow_sum_sign (h1, h2, h);
238 /* Negate a doubleword integer with doubleword result.
239 Return nonzero if the operation overflows, assuming it's signed.
240 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
241 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
244 neg_double (l1, h1, lv, hv)
245 HOST_WIDE_INT l1, h1;
246 HOST_WIDE_INT *lv, *hv;
252 return (*hv & h1) < 0;
262 /* Multiply two doubleword integers with doubleword result.
263 Return nonzero if the operation overflows, assuming it's signed.
264 Each argument is given as two `HOST_WIDE_INT' pieces.
265 One argument is L1 and H1; the other, L2 and H2.
266 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
269 mul_double (l1, h1, l2, h2, lv, hv)
270 HOST_WIDE_INT l1, h1, l2, h2;
271 HOST_WIDE_INT *lv, *hv;
273 HOST_WIDE_INT arg1[4];
274 HOST_WIDE_INT arg2[4];
275 HOST_WIDE_INT prod[4 * 2];
276 register unsigned HOST_WIDE_INT carry;
277 register int i, j, k;
278 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
280 encode (arg1, l1, h1);
281 encode (arg2, l2, h2);
283 bzero ((char *) prod, sizeof prod);
285 for (i = 0; i < 4; i++)
288 for (j = 0; j < 4; j++)
291 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
292 carry += arg1[i] * arg2[j];
293 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
295 prod[k] = LOWPART (carry);
296 carry = HIGHPART (carry);
301 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
303 /* Check for overflow by calculating the top half of the answer in full;
304 it should agree with the low half's sign bit. */
305 decode (prod+4, &toplow, &tophigh);
308 neg_double (l2, h2, &neglow, &neghigh);
309 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
313 neg_double (l1, h1, &neglow, &neghigh);
314 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
316 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
319 /* Shift the doubleword integer in L1, H1 left by COUNT places
320 keeping only PREC bits of result.
321 Shift right if COUNT is negative.
322 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
323 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 lshift_double (l1, h1, count, prec, lv, hv, arith)
327 HOST_WIDE_INT l1, h1, count;
329 HOST_WIDE_INT *lv, *hv;
334 rshift_double (l1, h1, - count, prec, lv, hv, arith);
338 #ifdef SHIFT_COUNT_TRUNCATED
339 if (SHIFT_COUNT_TRUNCATED)
343 if (count >= HOST_BITS_PER_WIDE_INT)
345 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
350 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
351 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
352 *lv = (unsigned HOST_WIDE_INT) l1 << count;
356 /* Shift the doubleword integer in L1, H1 right by COUNT places
357 keeping only PREC bits of result. COUNT must be positive.
358 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
359 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
362 rshift_double (l1, h1, count, prec, lv, hv, arith)
363 HOST_WIDE_INT l1, h1, count;
365 HOST_WIDE_INT *lv, *hv;
368 unsigned HOST_WIDE_INT signmask;
370 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
378 if (count >= HOST_BITS_PER_WIDE_INT)
381 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
382 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
386 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
387 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
388 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
389 | ((unsigned HOST_WIDE_INT) h1 >> count));
393 /* Rotate the doubleword integer in L1, H1 left by COUNT places
394 keeping only PREC bits of result.
395 Rotate right if COUNT is negative.
396 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
399 lrotate_double (l1, h1, count, prec, lv, hv)
400 HOST_WIDE_INT l1, h1, count;
402 HOST_WIDE_INT *lv, *hv;
404 HOST_WIDE_INT s1l, s1h, s2l, s2h;
410 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
411 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
416 /* Rotate the doubleword integer in L1, H1 left by COUNT places
417 keeping only PREC bits of result. COUNT must be positive.
418 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
421 rrotate_double (l1, h1, count, prec, lv, hv)
422 HOST_WIDE_INT l1, h1, count;
424 HOST_WIDE_INT *lv, *hv;
426 HOST_WIDE_INT s1l, s1h, s2l, s2h;
432 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
433 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
438 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
439 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
440 CODE is a tree code for a kind of division, one of
441 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
443 It controls how the quotient is rounded to a integer.
444 Return nonzero if the operation overflows.
445 UNS nonzero says do unsigned division. */
448 div_and_round_double (code, uns,
449 lnum_orig, hnum_orig, lden_orig, hden_orig,
450 lquo, hquo, lrem, hrem)
453 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
454 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
455 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
458 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
459 HOST_WIDE_INT den[4], quo[4];
461 unsigned HOST_WIDE_INT work;
462 register unsigned HOST_WIDE_INT carry = 0;
463 HOST_WIDE_INT lnum = lnum_orig;
464 HOST_WIDE_INT hnum = hnum_orig;
465 HOST_WIDE_INT lden = lden_orig;
466 HOST_WIDE_INT hden = hden_orig;
469 if ((hden == 0) && (lden == 0))
472 /* calculate quotient sign and convert operands to unsigned. */
478 /* (minimum integer) / (-1) is the only overflow case. */
479 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
485 neg_double (lden, hden, &lden, &hden);
489 if (hnum == 0 && hden == 0)
490 { /* single precision */
492 /* This unsigned division rounds toward zero. */
493 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
498 { /* trivial case: dividend < divisor */
499 /* hden != 0 already checked. */
506 bzero ((char *) quo, sizeof quo);
508 bzero ((char *) num, sizeof num); /* to zero 9th element */
509 bzero ((char *) den, sizeof den);
511 encode (num, lnum, hnum);
512 encode (den, lden, hden);
514 /* Special code for when the divisor < BASE. */
515 if (hden == 0 && lden < BASE)
517 /* hnum != 0 already checked. */
518 for (i = 4 - 1; i >= 0; i--)
520 work = num[i] + carry * BASE;
521 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
522 carry = work % (unsigned HOST_WIDE_INT) lden;
527 /* Full double precision division,
528 with thanks to Don Knuth's "Seminumerical Algorithms". */
529 int num_hi_sig, den_hi_sig;
530 unsigned HOST_WIDE_INT quo_est, scale;
532 /* Find the highest non-zero divisor digit. */
533 for (i = 4 - 1; ; i--)
539 /* Insure that the first digit of the divisor is at least BASE/2.
540 This is required by the quotient digit estimation algorithm. */
542 scale = BASE / (den[den_hi_sig] + 1);
543 if (scale > 1) { /* scale divisor and dividend */
545 for (i = 0; i <= 4 - 1; i++) {
546 work = (num[i] * scale) + carry;
547 num[i] = LOWPART (work);
548 carry = HIGHPART (work);
551 for (i = 0; i <= 4 - 1; i++) {
552 work = (den[i] * scale) + carry;
553 den[i] = LOWPART (work);
554 carry = HIGHPART (work);
555 if (den[i] != 0) den_hi_sig = i;
562 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
563 /* guess the next quotient digit, quo_est, by dividing the first
564 two remaining dividend digits by the high order quotient digit.
565 quo_est is never low and is at most 2 high. */
566 unsigned HOST_WIDE_INT tmp;
568 num_hi_sig = i + den_hi_sig + 1;
569 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
570 if (num[num_hi_sig] != den[den_hi_sig])
571 quo_est = work / den[den_hi_sig];
575 /* refine quo_est so it's usually correct, and at most one high. */
576 tmp = work - quo_est * den[den_hi_sig];
578 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
581 /* Try QUO_EST as the quotient digit, by multiplying the
582 divisor by QUO_EST and subtracting from the remaining dividend.
583 Keep in mind that QUO_EST is the I - 1st digit. */
586 for (j = 0; j <= den_hi_sig; j++)
588 work = quo_est * den[j] + carry;
589 carry = HIGHPART (work);
590 work = num[i + j] - LOWPART (work);
591 num[i + j] = LOWPART (work);
592 carry += HIGHPART (work) != 0;
595 /* if quo_est was high by one, then num[i] went negative and
596 we need to correct things. */
598 if (num[num_hi_sig] < carry)
601 carry = 0; /* add divisor back in */
602 for (j = 0; j <= den_hi_sig; j++)
604 work = num[i + j] + den[j] + carry;
605 carry = HIGHPART (work);
606 num[i + j] = LOWPART (work);
608 num [num_hi_sig] += carry;
611 /* store the quotient digit. */
616 decode (quo, lquo, hquo);
619 /* if result is negative, make it so. */
621 neg_double (*lquo, *hquo, lquo, hquo);
623 /* compute trial remainder: rem = num - (quo * den) */
624 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
625 neg_double (*lrem, *hrem, lrem, hrem);
626 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
631 case TRUNC_MOD_EXPR: /* round toward zero */
632 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
636 case FLOOR_MOD_EXPR: /* round toward negative infinity */
637 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
640 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
643 else return overflow;
647 case CEIL_MOD_EXPR: /* round toward positive infinity */
648 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
650 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
653 else return overflow;
657 case ROUND_MOD_EXPR: /* round to closest integer */
659 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
660 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
662 /* get absolute values */
663 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
664 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
666 /* if (2 * abs (lrem) >= abs (lden)) */
667 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
668 labs_rem, habs_rem, <wice, &htwice);
669 if (((unsigned HOST_WIDE_INT) habs_den
670 < (unsigned HOST_WIDE_INT) htwice)
671 || (((unsigned HOST_WIDE_INT) habs_den
672 == (unsigned HOST_WIDE_INT) htwice)
673 && ((HOST_WIDE_INT unsigned) labs_den
674 < (unsigned HOST_WIDE_INT) ltwice)))
678 add_double (*lquo, *hquo,
679 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
682 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
685 else return overflow;
693 /* compute true remainder: rem = num - (quo * den) */
694 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
695 neg_double (*lrem, *hrem, lrem, hrem);
696 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
700 #ifndef REAL_ARITHMETIC
701 /* Effectively truncate a real value to represent the nearest possible value
702 in a narrower mode. The result is actually represented in the same data
703 type as the argument, but its value is usually different.
705 A trap may occur during the FP operations and it is the responsibility
706 of the calling function to have a handler established. */
709 real_value_truncate (mode, arg)
710 enum machine_mode mode;
713 return REAL_VALUE_TRUNCATE (mode, arg);
716 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
718 /* Check for infinity in an IEEE double precision number. */
724 /* The IEEE 64-bit double format. */
729 unsigned exponent : 11;
730 unsigned mantissa1 : 20;
735 unsigned mantissa1 : 20;
736 unsigned exponent : 11;
742 if (u.big_endian.sign == 1)
745 return (u.big_endian.exponent == 2047
746 && u.big_endian.mantissa1 == 0
747 && u.big_endian.mantissa2 == 0);
752 return (u.little_endian.exponent == 2047
753 && u.little_endian.mantissa1 == 0
754 && u.little_endian.mantissa2 == 0);
758 /* Check whether an IEEE double precision number is a NaN. */
764 /* The IEEE 64-bit double format. */
769 unsigned exponent : 11;
770 unsigned mantissa1 : 20;
775 unsigned mantissa1 : 20;
776 unsigned exponent : 11;
782 if (u.big_endian.sign == 1)
785 return (u.big_endian.exponent == 2047
786 && (u.big_endian.mantissa1 != 0
787 || u.big_endian.mantissa2 != 0));
792 return (u.little_endian.exponent == 2047
793 && (u.little_endian.mantissa1 != 0
794 || u.little_endian.mantissa2 != 0));
798 /* Check for a negative IEEE double precision number. */
804 /* The IEEE 64-bit double format. */
809 unsigned exponent : 11;
810 unsigned mantissa1 : 20;
815 unsigned mantissa1 : 20;
816 unsigned exponent : 11;
822 if (u.big_endian.sign == 1)
825 return u.big_endian.sign;
830 return u.little_endian.sign;
833 #else /* Target not IEEE */
835 /* Let's assume other float formats don't have infinity.
836 (This can be overridden by redefining REAL_VALUE_ISINF.) */
844 /* Let's assume other float formats don't have NaNs.
845 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
853 /* Let's assume other float formats don't have minus zero.
854 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
861 #endif /* Target not IEEE */
862 #endif /* no REAL_ARITHMETIC */
864 /* Split a tree IN into a constant and a variable part
865 that could be combined with CODE to make IN.
866 CODE must be a commutative arithmetic operation.
867 Store the constant part into *CONP and the variable in &VARP.
868 Return 1 if this was done; zero means the tree IN did not decompose
871 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
872 Therefore, we must tell the caller whether the variable part
873 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
874 The value stored is the coefficient for the variable term.
875 The constant term we return should always be added;
876 we negate it if necessary. */
879 split_tree (in, code, varp, conp, varsignp)
885 register tree outtype = TREE_TYPE (in);
889 /* Strip any conversions that don't change the machine mode. */
890 while ((TREE_CODE (in) == NOP_EXPR
891 || TREE_CODE (in) == CONVERT_EXPR)
892 && (TYPE_MODE (TREE_TYPE (in))
893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
894 in = TREE_OPERAND (in, 0);
896 if (TREE_CODE (in) == code
897 || (! FLOAT_TYPE_P (TREE_TYPE (in))
898 /* We can associate addition and subtraction together
899 (even though the C standard doesn't say so)
900 for integers because the value is not affected.
901 For reals, the value might be affected, so we can't. */
902 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
903 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
905 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
906 if (code == INTEGER_CST)
908 *conp = TREE_OPERAND (in, 0);
909 *varp = TREE_OPERAND (in, 1);
910 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
911 && TREE_TYPE (*varp) != outtype)
912 *varp = convert (outtype, *varp);
913 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
916 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
918 *conp = TREE_OPERAND (in, 1);
919 *varp = TREE_OPERAND (in, 0);
921 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
922 && TREE_TYPE (*varp) != outtype)
923 *varp = convert (outtype, *varp);
924 if (TREE_CODE (in) == MINUS_EXPR)
926 /* If operation is subtraction and constant is second,
927 must negate it to get an additive constant.
928 And this cannot be done unless it is a manifest constant.
929 It could also be the address of a static variable.
930 We cannot negate that, so give up. */
931 if (TREE_CODE (*conp) == INTEGER_CST)
932 /* Subtracting from integer_zero_node loses for long long. */
933 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
939 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
941 *conp = TREE_OPERAND (in, 0);
942 *varp = TREE_OPERAND (in, 1);
943 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
944 && TREE_TYPE (*varp) != outtype)
945 *varp = convert (outtype, *varp);
946 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
953 /* Combine two constants ARG1 and ARG2 under operation CODE
954 to produce a new constant.
955 We assume ARG1 and ARG2 have the same data type,
956 or at least are the same kind of constant and the same machine mode.
958 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
961 const_binop (code, arg1, arg2, notrunc)
963 register tree arg1, arg2;
966 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
968 if (TREE_CODE (arg1) == INTEGER_CST)
970 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
971 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
972 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
973 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
974 HOST_WIDE_INT low, hi;
975 HOST_WIDE_INT garbagel, garbageh;
977 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
983 t = build_int_2 (int1l | int2l, int1h | int2h);
987 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
991 t = build_int_2 (int1l & int2l, int1h & int2h);
995 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
1001 /* It's unclear from the C standard whether shifts can overflow.
1002 The following code ignores overflow; perhaps a C standard
1003 interpretation ruling is needed. */
1004 lshift_double (int1l, int1h, int2l,
1005 TYPE_PRECISION (TREE_TYPE (arg1)),
1008 t = build_int_2 (low, hi);
1009 TREE_TYPE (t) = TREE_TYPE (arg1);
1011 force_fit_type (t, 0);
1012 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1013 TREE_CONSTANT_OVERFLOW (t)
1014 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1020 lrotate_double (int1l, int1h, int2l,
1021 TYPE_PRECISION (TREE_TYPE (arg1)),
1023 t = build_int_2 (low, hi);
1030 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1033 overflow = int2h < hi;
1035 t = build_int_2 (int2l, int2h);
1041 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1044 overflow = int1h < hi;
1046 t = build_int_2 (int1l, int1h);
1049 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1050 t = build_int_2 (low, hi);
1054 if (int2h == 0 && int2l == 0)
1056 t = build_int_2 (int1l, int1h);
1059 neg_double (int2l, int2h, &low, &hi);
1060 add_double (int1l, int1h, low, hi, &low, &hi);
1061 overflow = overflow_sum_sign (hi, int2h, int1h);
1062 t = build_int_2 (low, hi);
1066 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1067 t = build_int_2 (low, hi);
1070 case TRUNC_DIV_EXPR:
1071 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1072 case EXACT_DIV_EXPR:
1073 /* This is a shortcut for a common special case.
1074 It reduces the number of tree nodes generated
1076 if (int2h == 0 && int2l > 0
1077 && TREE_TYPE (arg1) == sizetype
1078 && int1h == 0 && int1l >= 0)
1080 if (code == CEIL_DIV_EXPR)
1082 return size_int (int1l / int2l);
1084 case ROUND_DIV_EXPR:
1085 if (int2h == 0 && int2l == 1)
1087 t = build_int_2 (int1l, int1h);
1090 if (int1l == int2l && int1h == int2h)
1092 if ((int1l | int1h) == 0)
1094 t = build_int_2 (1, 0);
1097 overflow = div_and_round_double (code, uns,
1098 int1l, int1h, int2l, int2h,
1099 &low, &hi, &garbagel, &garbageh);
1100 t = build_int_2 (low, hi);
1103 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1104 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1105 overflow = div_and_round_double (code, uns,
1106 int1l, int1h, int2l, int2h,
1107 &garbagel, &garbageh, &low, &hi);
1108 t = build_int_2 (low, hi);
1115 low = (((unsigned HOST_WIDE_INT) int1h
1116 < (unsigned HOST_WIDE_INT) int2h)
1117 || (((unsigned HOST_WIDE_INT) int1h
1118 == (unsigned HOST_WIDE_INT) int2h)
1119 && ((unsigned HOST_WIDE_INT) int1l
1120 < (unsigned HOST_WIDE_INT) int2l)));
1124 low = ((int1h < int2h)
1125 || ((int1h == int2h)
1126 && ((unsigned HOST_WIDE_INT) int1l
1127 < (unsigned HOST_WIDE_INT) int2l)));
1129 if (low == (code == MIN_EXPR))
1130 t = build_int_2 (int1l, int1h);
1132 t = build_int_2 (int2l, int2h);
1139 TREE_TYPE (t) = TREE_TYPE (arg1);
1141 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow && !uns))
1142 | TREE_OVERFLOW (arg1)
1143 | TREE_OVERFLOW (arg2));
1144 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1145 | TREE_CONSTANT_OVERFLOW (arg1)
1146 | TREE_CONSTANT_OVERFLOW (arg2));
1149 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1150 if (TREE_CODE (arg1) == REAL_CST)
1155 REAL_VALUE_TYPE value;
1158 d1 = TREE_REAL_CST (arg1);
1159 d2 = TREE_REAL_CST (arg2);
1161 /* If either operand is a NaN, just return it. Otherwise, set up
1162 for floating-point trap; we return an overflow. */
1163 if (REAL_VALUE_ISNAN (d1))
1165 else if (REAL_VALUE_ISNAN (d2))
1167 else if (setjmp (float_error))
1169 t = copy_node (arg1);
1174 set_float_handler (float_error);
1176 #ifdef REAL_ARITHMETIC
1177 REAL_ARITHMETIC (value, code, d1, d2);
1194 #ifndef REAL_INFINITY
1203 value = MIN (d1, d2);
1207 value = MAX (d1, d2);
1213 #endif /* no REAL_ARITHMETIC */
1214 t = build_real (TREE_TYPE (arg1),
1215 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1217 set_float_handler (NULL_PTR);
1220 = (force_fit_type (t, overflow)
1221 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1222 TREE_CONSTANT_OVERFLOW (t)
1224 | TREE_CONSTANT_OVERFLOW (arg1)
1225 | TREE_CONSTANT_OVERFLOW (arg2);
1228 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1229 if (TREE_CODE (arg1) == COMPLEX_CST)
1231 register tree type = TREE_TYPE (arg1);
1232 register tree r1 = TREE_REALPART (arg1);
1233 register tree i1 = TREE_IMAGPART (arg1);
1234 register tree r2 = TREE_REALPART (arg2);
1235 register tree i2 = TREE_IMAGPART (arg2);
1241 t = build_complex (type,
1242 const_binop (PLUS_EXPR, r1, r2, notrunc),
1243 const_binop (PLUS_EXPR, i1, i2, notrunc));
1247 t = build_complex (type,
1248 const_binop (MINUS_EXPR, r1, r2, notrunc),
1249 const_binop (MINUS_EXPR, i1, i2, notrunc));
1253 t = build_complex (type,
1254 const_binop (MINUS_EXPR,
1255 const_binop (MULT_EXPR,
1257 const_binop (MULT_EXPR,
1260 const_binop (PLUS_EXPR,
1261 const_binop (MULT_EXPR,
1263 const_binop (MULT_EXPR,
1270 register tree magsquared
1271 = const_binop (PLUS_EXPR,
1272 const_binop (MULT_EXPR, r2, r2, notrunc),
1273 const_binop (MULT_EXPR, i2, i2, notrunc),
1276 t = build_complex (type,
1278 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1279 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1280 const_binop (PLUS_EXPR,
1281 const_binop (MULT_EXPR, r1, r2,
1283 const_binop (MULT_EXPR, i1, i2,
1286 magsquared, notrunc),
1288 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1289 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1290 const_binop (MINUS_EXPR,
1291 const_binop (MULT_EXPR, i1, r2,
1293 const_binop (MULT_EXPR, r1, i2,
1296 magsquared, notrunc));
1308 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1312 unsigned HOST_WIDE_INT number;
1315 /* Type-size nodes already made for small sizes. */
1316 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1318 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1319 && size_table[number] != 0)
1320 return size_table[number];
1321 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1323 push_obstacks_nochange ();
1324 /* Make this a permanent node. */
1325 end_temporary_allocation ();
1326 t = build_int_2 (number, 0);
1327 TREE_TYPE (t) = sizetype;
1328 size_table[number] = t;
1333 t = build_int_2 (number, 0);
1334 TREE_TYPE (t) = sizetype;
1339 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1340 CODE is a tree code. Data type is taken from `sizetype',
1341 If the operands are constant, so is the result. */
1344 size_binop (code, arg0, arg1)
1345 enum tree_code code;
1348 /* Handle the special case of two integer constants faster. */
1349 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1351 /* And some specific cases even faster than that. */
1352 if (code == PLUS_EXPR
1353 && TREE_INT_CST_LOW (arg0) == 0
1354 && TREE_INT_CST_HIGH (arg0) == 0)
1356 if (code == MINUS_EXPR
1357 && TREE_INT_CST_LOW (arg1) == 0
1358 && TREE_INT_CST_HIGH (arg1) == 0)
1360 if (code == MULT_EXPR
1361 && TREE_INT_CST_LOW (arg0) == 1
1362 && TREE_INT_CST_HIGH (arg0) == 0)
1364 /* Handle general case of two integer constants. */
1365 return const_binop (code, arg0, arg1, 0);
1368 if (arg0 == error_mark_node || arg1 == error_mark_node)
1369 return error_mark_node;
1371 return fold (build (code, sizetype, arg0, arg1));
1374 /* Given T, a tree representing type conversion of ARG1, a constant,
1375 return a constant tree representing the result of conversion. */
1378 fold_convert (t, arg1)
1382 register tree type = TREE_TYPE (t);
1385 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1387 if (TREE_CODE (arg1) == INTEGER_CST)
1389 /* If we would build a constant wider than GCC supports,
1390 leave the conversion unfolded. */
1391 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1394 /* Given an integer constant, make new constant with new type,
1395 appropriately sign-extended or truncated. */
1396 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1397 TREE_INT_CST_HIGH (arg1));
1398 TREE_TYPE (t) = type;
1399 /* Indicate an overflow if (1) ARG1 already overflowed,
1400 or (2) force_fit_type indicates an overflow.
1401 Tell force_fit_type that an overflow has already occurred
1402 if ARG1 is a too-large unsigned value and T is signed. */
1404 = (TREE_OVERFLOW (arg1)
1405 | force_fit_type (t,
1406 (TREE_INT_CST_HIGH (arg1) < 0
1407 & (TREE_UNSIGNED (type)
1408 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1409 TREE_CONSTANT_OVERFLOW (t)
1410 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1412 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1413 else if (TREE_CODE (arg1) == REAL_CST)
1415 /* Don't initialize these, use assignments.
1416 Initialized local aggregates don't work on old compilers. */
1420 tree type1 = TREE_TYPE (arg1);
1422 x = TREE_REAL_CST (arg1);
1423 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1424 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1425 /* See if X will be in range after truncation towards 0.
1426 To compensate for truncation, move the bounds away from 0,
1427 but reject if X exactly equals the adjusted bounds. */
1428 #ifdef REAL_ARITHMETIC
1429 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1430 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1435 /* If X is a NaN, use zero instead and show we have an overflow.
1436 Otherwise, range check. */
1437 if (REAL_VALUE_ISNAN (x))
1438 overflow = 1, x = dconst0;
1439 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1442 #ifndef REAL_ARITHMETIC
1444 HOST_WIDE_INT low, high;
1445 HOST_WIDE_INT half_word
1446 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1451 high = (HOST_WIDE_INT) (x / half_word / half_word);
1452 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1453 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1455 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1456 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1459 low = (HOST_WIDE_INT) x;
1460 if (TREE_REAL_CST (arg1) < 0)
1461 neg_double (low, high, &low, &high);
1462 t = build_int_2 (low, high);
1466 HOST_WIDE_INT low, high;
1467 REAL_VALUE_TO_INT (&low, &high, x);
1468 t = build_int_2 (low, high);
1471 TREE_TYPE (t) = type;
1473 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1474 TREE_CONSTANT_OVERFLOW (t)
1475 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1477 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1478 TREE_TYPE (t) = type;
1480 else if (TREE_CODE (type) == REAL_TYPE)
1482 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1483 if (TREE_CODE (arg1) == INTEGER_CST)
1484 return build_real_from_int_cst (type, arg1);
1485 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1486 if (TREE_CODE (arg1) == REAL_CST)
1488 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1491 TREE_TYPE (arg1) = type;
1494 else if (setjmp (float_error))
1497 t = copy_node (arg1);
1500 set_float_handler (float_error);
1502 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1503 TREE_REAL_CST (arg1)));
1504 set_float_handler (NULL_PTR);
1508 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1509 TREE_CONSTANT_OVERFLOW (t)
1510 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1514 TREE_CONSTANT (t) = 1;
1518 /* Return an expr equal to X but certainly not valid as an lvalue.
1519 Also make sure it is not valid as an null pointer constant. */
1527 /* These things are certainly not lvalues. */
1528 if (TREE_CODE (x) == NON_LVALUE_EXPR
1529 || TREE_CODE (x) == INTEGER_CST
1530 || TREE_CODE (x) == REAL_CST
1531 || TREE_CODE (x) == STRING_CST
1532 || TREE_CODE (x) == ADDR_EXPR)
1534 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1536 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1537 so convert_for_assignment won't strip it.
1538 This is so this 0 won't be treated as a null pointer constant. */
1539 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1540 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1546 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1547 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1551 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1552 Zero means allow extended lvalues. */
1554 int pedantic_lvalues;
1556 /* When pedantic, return an expr equal to X but certainly not valid as a
1557 pedantic lvalue. Otherwise, return X. */
1560 pedantic_non_lvalue (x)
1563 if (pedantic_lvalues)
1564 return non_lvalue (x);
1569 /* Given a tree comparison code, return the code that is the logical inverse
1570 of the given code. It is not safe to do this for floating-point
1571 comparisons, except for NE_EXPR and EQ_EXPR. */
1573 static enum tree_code
1574 invert_tree_comparison (code)
1575 enum tree_code code;
1596 /* Similar, but return the comparison that results if the operands are
1597 swapped. This is safe for floating-point. */
1599 static enum tree_code
1600 swap_tree_comparison (code)
1601 enum tree_code code;
1621 /* Return nonzero if CODE is a tree code that represents a truth value. */
1624 truth_value_p (code)
1625 enum tree_code code;
1627 return (TREE_CODE_CLASS (code) == '<'
1628 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1629 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1630 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1633 /* Return nonzero if two operands are necessarily equal.
1634 If ONLY_CONST is non-zero, only return non-zero for constants.
1635 This function tests whether the operands are indistinguishable;
1636 it does not test whether they are equal using C's == operation.
1637 The distinction is important for IEEE floating point, because
1638 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1639 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1642 operand_equal_p (arg0, arg1, only_const)
1646 /* If both types don't have the same signedness, then we can't consider
1647 them equal. We must check this before the STRIP_NOPS calls
1648 because they may change the signedness of the arguments. */
1649 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1655 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1656 We don't care about side effects in that case because the SAVE_EXPR
1657 takes care of that for us. */
1658 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1659 return ! only_const;
1661 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1664 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1665 && TREE_CODE (arg0) == ADDR_EXPR
1666 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1669 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1670 && TREE_CODE (arg0) == INTEGER_CST
1671 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1672 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1675 /* Detect when real constants are equal. */
1676 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1677 && TREE_CODE (arg0) == REAL_CST)
1678 return !bcmp ((char *) &TREE_REAL_CST (arg0),
1679 (char *) &TREE_REAL_CST (arg1),
1680 sizeof (REAL_VALUE_TYPE));
1688 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1690 /* This is needed for conversions and for COMPONENT_REF.
1691 Might as well play it safe and always test this. */
1692 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1695 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1698 /* Two conversions are equal only if signedness and modes match. */
1699 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1700 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1701 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1704 return operand_equal_p (TREE_OPERAND (arg0, 0),
1705 TREE_OPERAND (arg1, 0), 0);
1709 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1710 TREE_OPERAND (arg1, 0), 0)
1711 && operand_equal_p (TREE_OPERAND (arg0, 1),
1712 TREE_OPERAND (arg1, 1), 0));
1715 switch (TREE_CODE (arg0))
1718 return operand_equal_p (TREE_OPERAND (arg0, 0),
1719 TREE_OPERAND (arg1, 0), 0);
1723 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1724 TREE_OPERAND (arg1, 0), 0)
1725 && operand_equal_p (TREE_OPERAND (arg0, 1),
1726 TREE_OPERAND (arg1, 1), 0));
1729 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1730 TREE_OPERAND (arg1, 0), 0)
1731 && operand_equal_p (TREE_OPERAND (arg0, 1),
1732 TREE_OPERAND (arg1, 1), 0)
1733 && operand_equal_p (TREE_OPERAND (arg0, 2),
1734 TREE_OPERAND (arg1, 2), 0));
1742 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1743 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1745 When in doubt, return 0. */
1748 operand_equal_for_comparison_p (arg0, arg1, other)
1752 int unsignedp1, unsignedpo;
1753 tree primarg1, primother;
1754 unsigned correct_width;
1756 if (operand_equal_p (arg0, arg1, 0))
1759 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1760 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1763 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1764 actual comparison operand, ARG0.
1766 First throw away any conversions to wider types
1767 already present in the operands. */
1769 primarg1 = get_narrower (arg1, &unsignedp1);
1770 primother = get_narrower (other, &unsignedpo);
1772 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1773 if (unsignedp1 == unsignedpo
1774 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1775 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1777 tree type = TREE_TYPE (arg0);
1779 /* Make sure shorter operand is extended the right way
1780 to match the longer operand. */
1781 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1782 TREE_TYPE (primarg1)),
1785 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1792 /* See if ARG is an expression that is either a comparison or is performing
1793 arithmetic on comparisons. The comparisons must only be comparing
1794 two different values, which will be stored in *CVAL1 and *CVAL2; if
1795 they are non-zero it means that some operands have already been found.
1796 No variables may be used anywhere else in the expression except in the
1797 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1798 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1800 If this is true, return 1. Otherwise, return zero. */
1803 twoval_comparison_p (arg, cval1, cval2, save_p)
1805 tree *cval1, *cval2;
1808 enum tree_code code = TREE_CODE (arg);
1809 char class = TREE_CODE_CLASS (code);
1811 /* We can handle some of the 'e' cases here. */
1812 if (class == 'e' && code == TRUTH_NOT_EXPR)
1814 else if (class == 'e'
1815 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1816 || code == COMPOUND_EXPR))
1819 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1820 the expression. There may be no way to make this work, but it needs
1821 to be looked at again for 2.6. */
1823 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1825 /* If we've already found a CVAL1 or CVAL2, this expression is
1826 two complex to handle. */
1827 if (*cval1 || *cval2)
1838 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1841 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1842 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1843 cval1, cval2, save_p));
1849 if (code == COND_EXPR)
1850 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1851 cval1, cval2, save_p)
1852 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1853 cval1, cval2, save_p)
1854 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1855 cval1, cval2, save_p));
1859 /* First see if we can handle the first operand, then the second. For
1860 the second operand, we know *CVAL1 can't be zero. It must be that
1861 one side of the comparison is each of the values; test for the
1862 case where this isn't true by failing if the two operands
1865 if (operand_equal_p (TREE_OPERAND (arg, 0),
1866 TREE_OPERAND (arg, 1), 0))
1870 *cval1 = TREE_OPERAND (arg, 0);
1871 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1873 else if (*cval2 == 0)
1874 *cval2 = TREE_OPERAND (arg, 0);
1875 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1880 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1882 else if (*cval2 == 0)
1883 *cval2 = TREE_OPERAND (arg, 1);
1884 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1895 /* ARG is a tree that is known to contain just arithmetic operations and
1896 comparisons. Evaluate the operations in the tree substituting NEW0 for
1897 any occurrence of OLD0 as an operand of a comparison and likewise for
1901 eval_subst (arg, old0, new0, old1, new1)
1903 tree old0, new0, old1, new1;
1905 tree type = TREE_TYPE (arg);
1906 enum tree_code code = TREE_CODE (arg);
1907 char class = TREE_CODE_CLASS (code);
1909 /* We can handle some of the 'e' cases here. */
1910 if (class == 'e' && code == TRUTH_NOT_EXPR)
1912 else if (class == 'e'
1913 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1919 return fold (build1 (code, type,
1920 eval_subst (TREE_OPERAND (arg, 0),
1921 old0, new0, old1, new1)));
1924 return fold (build (code, type,
1925 eval_subst (TREE_OPERAND (arg, 0),
1926 old0, new0, old1, new1),
1927 eval_subst (TREE_OPERAND (arg, 1),
1928 old0, new0, old1, new1)));
1934 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1937 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1940 return fold (build (code, type,
1941 eval_subst (TREE_OPERAND (arg, 0),
1942 old0, new0, old1, new1),
1943 eval_subst (TREE_OPERAND (arg, 1),
1944 old0, new0, old1, new1),
1945 eval_subst (TREE_OPERAND (arg, 2),
1946 old0, new0, old1, new1)));
1951 tree arg0 = TREE_OPERAND (arg, 0);
1952 tree arg1 = TREE_OPERAND (arg, 1);
1954 /* We need to check both for exact equality and tree equality. The
1955 former will be true if the operand has a side-effect. In that
1956 case, we know the operand occurred exactly once. */
1958 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1960 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1963 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1965 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1968 return fold (build (code, type, arg0, arg1));
1975 /* Return a tree for the case when the result of an expression is RESULT
1976 converted to TYPE and OMITTED was previously an operand of the expression
1977 but is now not needed (e.g., we folded OMITTED * 0).
1979 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1980 the conversion of RESULT to TYPE. */
1983 omit_one_operand (type, result, omitted)
1984 tree type, result, omitted;
1986 tree t = convert (type, result);
1988 if (TREE_SIDE_EFFECTS (omitted))
1989 return build (COMPOUND_EXPR, type, omitted, t);
1991 return non_lvalue (t);
1994 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
1997 pedantic_omit_one_operand (type, result, omitted)
1998 tree type, result, omitted;
2000 tree t = convert (type, result);
2002 if (TREE_SIDE_EFFECTS (omitted))
2003 return build (COMPOUND_EXPR, type, omitted, t);
2005 return pedantic_non_lvalue (t);
2010 /* Return a simplified tree node for the truth-negation of ARG. This
2011 never alters ARG itself. We assume that ARG is an operation that
2012 returns a truth value (0 or 1). */
2015 invert_truthvalue (arg)
2018 tree type = TREE_TYPE (arg);
2019 enum tree_code code = TREE_CODE (arg);
2021 if (code == ERROR_MARK)
2024 /* If this is a comparison, we can simply invert it, except for
2025 floating-point non-equality comparisons, in which case we just
2026 enclose a TRUTH_NOT_EXPR around what we have. */
2028 if (TREE_CODE_CLASS (code) == '<')
2030 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2031 && code != NE_EXPR && code != EQ_EXPR)
2032 return build1 (TRUTH_NOT_EXPR, type, arg);
2034 return build (invert_tree_comparison (code), type,
2035 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2041 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2042 && TREE_INT_CST_HIGH (arg) == 0, 0));
2044 case TRUTH_AND_EXPR:
2045 return build (TRUTH_OR_EXPR, type,
2046 invert_truthvalue (TREE_OPERAND (arg, 0)),
2047 invert_truthvalue (TREE_OPERAND (arg, 1)));
2050 return build (TRUTH_AND_EXPR, type,
2051 invert_truthvalue (TREE_OPERAND (arg, 0)),
2052 invert_truthvalue (TREE_OPERAND (arg, 1)));
2054 case TRUTH_XOR_EXPR:
2055 /* Here we can invert either operand. We invert the first operand
2056 unless the second operand is a TRUTH_NOT_EXPR in which case our
2057 result is the XOR of the first operand with the inside of the
2058 negation of the second operand. */
2060 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2061 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2062 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2064 return build (TRUTH_XOR_EXPR, type,
2065 invert_truthvalue (TREE_OPERAND (arg, 0)),
2066 TREE_OPERAND (arg, 1));
2068 case TRUTH_ANDIF_EXPR:
2069 return build (TRUTH_ORIF_EXPR, type,
2070 invert_truthvalue (TREE_OPERAND (arg, 0)),
2071 invert_truthvalue (TREE_OPERAND (arg, 1)));
2073 case TRUTH_ORIF_EXPR:
2074 return build (TRUTH_ANDIF_EXPR, type,
2075 invert_truthvalue (TREE_OPERAND (arg, 0)),
2076 invert_truthvalue (TREE_OPERAND (arg, 1)));
2078 case TRUTH_NOT_EXPR:
2079 return TREE_OPERAND (arg, 0);
2082 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2083 invert_truthvalue (TREE_OPERAND (arg, 1)),
2084 invert_truthvalue (TREE_OPERAND (arg, 2)));
2087 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2088 invert_truthvalue (TREE_OPERAND (arg, 1)));
2090 case NON_LVALUE_EXPR:
2091 return invert_truthvalue (TREE_OPERAND (arg, 0));
2096 return build1 (TREE_CODE (arg), type,
2097 invert_truthvalue (TREE_OPERAND (arg, 0)));
2100 if (!integer_onep (TREE_OPERAND (arg, 1)))
2102 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2105 return build1 (TRUTH_NOT_EXPR, type, arg);
2107 case CLEANUP_POINT_EXPR:
2108 return build1 (CLEANUP_POINT_EXPR, type,
2109 invert_truthvalue (TREE_OPERAND (arg, 0)));
2111 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2113 return build1 (TRUTH_NOT_EXPR, type, arg);
2116 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2117 operands are another bit-wise operation with a common input. If so,
2118 distribute the bit operations to save an operation and possibly two if
2119 constants are involved. For example, convert
2120 (A | B) & (A | C) into A | (B & C)
2121 Further simplification will occur if B and C are constants.
2123 If this optimization cannot be done, 0 will be returned. */
2126 distribute_bit_expr (code, type, arg0, arg1)
2127 enum tree_code code;
2134 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2135 || TREE_CODE (arg0) == code
2136 || (TREE_CODE (arg0) != BIT_AND_EXPR
2137 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2140 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2142 common = TREE_OPERAND (arg0, 0);
2143 left = TREE_OPERAND (arg0, 1);
2144 right = TREE_OPERAND (arg1, 1);
2146 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2148 common = TREE_OPERAND (arg0, 0);
2149 left = TREE_OPERAND (arg0, 1);
2150 right = TREE_OPERAND (arg1, 0);
2152 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2154 common = TREE_OPERAND (arg0, 1);
2155 left = TREE_OPERAND (arg0, 0);
2156 right = TREE_OPERAND (arg1, 1);
2158 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2160 common = TREE_OPERAND (arg0, 1);
2161 left = TREE_OPERAND (arg0, 0);
2162 right = TREE_OPERAND (arg1, 0);
2167 return fold (build (TREE_CODE (arg0), type, common,
2168 fold (build (code, type, left, right))));
2171 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2172 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2175 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2178 int bitsize, bitpos;
2181 tree result = build (BIT_FIELD_REF, type, inner,
2182 size_int (bitsize), size_int (bitpos));
2184 TREE_UNSIGNED (result) = unsignedp;
2189 /* Optimize a bit-field compare.
2191 There are two cases: First is a compare against a constant and the
2192 second is a comparison of two items where the fields are at the same
2193 bit position relative to the start of a chunk (byte, halfword, word)
2194 large enough to contain it. In these cases we can avoid the shift
2195 implicit in bitfield extractions.
2197 For constants, we emit a compare of the shifted constant with the
2198 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2199 compared. For two fields at the same position, we do the ANDs with the
2200 similar mask and compare the result of the ANDs.
2202 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2203 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2204 are the left and right operands of the comparison, respectively.
2206 If the optimization described above can be done, we return the resulting
2207 tree. Otherwise we return zero. */
2210 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2211 enum tree_code code;
2215 int lbitpos, lbitsize, rbitpos, rbitsize;
2216 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2217 tree type = TREE_TYPE (lhs);
2218 tree signed_type, unsigned_type;
2219 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2220 enum machine_mode lmode, rmode, lnmode, rnmode;
2221 int lunsignedp, runsignedp;
2222 int lvolatilep = 0, rvolatilep = 0;
2223 tree linner, rinner;
2227 /* Get all the information about the extractions being done. If the bit size
2228 if the same as the size of the underlying object, we aren't doing an
2229 extraction at all and so can do nothing. */
2230 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2231 &lunsignedp, &lvolatilep);
2232 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2238 /* If this is not a constant, we can only do something if bit positions,
2239 sizes, and signedness are the same. */
2240 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2241 &rmode, &runsignedp, &rvolatilep);
2243 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2244 || lunsignedp != runsignedp || offset != 0)
2248 /* See if we can find a mode to refer to this field. We should be able to,
2249 but fail if we can't. */
2250 lnmode = get_best_mode (lbitsize, lbitpos,
2251 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2253 if (lnmode == VOIDmode)
2256 /* Set signed and unsigned types of the precision of this mode for the
2258 signed_type = type_for_mode (lnmode, 0);
2259 unsigned_type = type_for_mode (lnmode, 1);
2263 rnmode = get_best_mode (rbitsize, rbitpos,
2264 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2266 if (rnmode == VOIDmode)
2270 /* Compute the bit position and size for the new reference and our offset
2271 within it. If the new reference is the same size as the original, we
2272 won't optimize anything, so return zero. */
2273 lnbitsize = GET_MODE_BITSIZE (lnmode);
2274 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2275 lbitpos -= lnbitpos;
2276 if (lnbitsize == lbitsize)
2281 rnbitsize = GET_MODE_BITSIZE (rnmode);
2282 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2283 rbitpos -= rnbitpos;
2284 if (rnbitsize == rbitsize)
2288 if (BYTES_BIG_ENDIAN)
2289 lbitpos = lnbitsize - lbitsize - lbitpos;
2291 /* Make the mask to be used against the extracted field. */
2292 mask = build_int_2 (~0, ~0);
2293 TREE_TYPE (mask) = unsigned_type;
2294 force_fit_type (mask, 0);
2295 mask = convert (unsigned_type, mask);
2296 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2297 mask = const_binop (RSHIFT_EXPR, mask,
2298 size_int (lnbitsize - lbitsize - lbitpos), 0);
2301 /* If not comparing with constant, just rework the comparison
2303 return build (code, compare_type,
2304 build (BIT_AND_EXPR, unsigned_type,
2305 make_bit_field_ref (linner, unsigned_type,
2306 lnbitsize, lnbitpos, 1),
2308 build (BIT_AND_EXPR, unsigned_type,
2309 make_bit_field_ref (rinner, unsigned_type,
2310 rnbitsize, rnbitpos, 1),
2313 /* Otherwise, we are handling the constant case. See if the constant is too
2314 big for the field. Warn and return a tree of for 0 (false) if so. We do
2315 this not only for its own sake, but to avoid having to test for this
2316 error case below. If we didn't, we might generate wrong code.
2318 For unsigned fields, the constant shifted right by the field length should
2319 be all zero. For signed fields, the high-order bits should agree with
2324 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2325 convert (unsigned_type, rhs),
2326 size_int (lbitsize), 0)))
2328 warning ("comparison is always %s due to width of bitfield",
2329 code == NE_EXPR ? "one" : "zero");
2330 return convert (compare_type,
2332 ? integer_one_node : integer_zero_node));
2337 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2338 size_int (lbitsize - 1), 0);
2339 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2341 warning ("comparison is always %s due to width of bitfield",
2342 code == NE_EXPR ? "one" : "zero");
2343 return convert (compare_type,
2345 ? integer_one_node : integer_zero_node));
2349 /* Single-bit compares should always be against zero. */
2350 if (lbitsize == 1 && ! integer_zerop (rhs))
2352 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2353 rhs = convert (type, integer_zero_node);
2356 /* Make a new bitfield reference, shift the constant over the
2357 appropriate number of bits and mask it with the computed mask
2358 (in case this was a signed field). If we changed it, make a new one. */
2359 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2362 TREE_SIDE_EFFECTS (lhs) = 1;
2363 TREE_THIS_VOLATILE (lhs) = 1;
2366 rhs = fold (const_binop (BIT_AND_EXPR,
2367 const_binop (LSHIFT_EXPR,
2368 convert (unsigned_type, rhs),
2369 size_int (lbitpos), 0),
2372 return build (code, compare_type,
2373 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2377 /* Subroutine for fold_truthop: decode a field reference.
2379 If EXP is a comparison reference, we return the innermost reference.
2381 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2382 set to the starting bit number.
2384 If the innermost field can be completely contained in a mode-sized
2385 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2387 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2388 otherwise it is not changed.
2390 *PUNSIGNEDP is set to the signedness of the field.
2392 *PMASK is set to the mask used. This is either contained in a
2393 BIT_AND_EXPR or derived from the width of the field.
2395 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2397 Return 0 if this is not a component reference or is one that we can't
2398 do anything with. */
2401 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2402 pvolatilep, pmask, pand_mask)
2404 int *pbitsize, *pbitpos;
2405 enum machine_mode *pmode;
2406 int *punsignedp, *pvolatilep;
2411 tree mask, inner, offset;
2415 /* All the optimizations using this function assume integer fields.
2416 There are problems with FP fields since the type_for_size call
2417 below can fail for, e.g., XFmode. */
2418 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2423 if (TREE_CODE (exp) == BIT_AND_EXPR)
2425 and_mask = TREE_OPERAND (exp, 1);
2426 exp = TREE_OPERAND (exp, 0);
2427 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2428 if (TREE_CODE (and_mask) != INTEGER_CST)
2433 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2434 punsignedp, pvolatilep);
2435 if ((inner == exp && and_mask == 0)
2436 || *pbitsize < 0 || offset != 0)
2439 /* Compute the mask to access the bitfield. */
2440 unsigned_type = type_for_size (*pbitsize, 1);
2441 precision = TYPE_PRECISION (unsigned_type);
2443 mask = build_int_2 (~0, ~0);
2444 TREE_TYPE (mask) = unsigned_type;
2445 force_fit_type (mask, 0);
2446 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2447 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2449 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2451 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2452 convert (unsigned_type, and_mask), mask));
2455 *pand_mask = and_mask;
2459 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2463 all_ones_mask_p (mask, size)
2467 tree type = TREE_TYPE (mask);
2468 int precision = TYPE_PRECISION (type);
2471 tmask = build_int_2 (~0, ~0);
2472 TREE_TYPE (tmask) = signed_type (type);
2473 force_fit_type (tmask, 0);
2475 tree_int_cst_equal (mask,
2476 const_binop (RSHIFT_EXPR,
2477 const_binop (LSHIFT_EXPR, tmask,
2478 size_int (precision - size),
2480 size_int (precision - size), 0));
2483 /* Subroutine for fold_truthop: determine if an operand is simple enough
2484 to be evaluated unconditionally. */
2487 simple_operand_p (exp)
2490 /* Strip any conversions that don't change the machine mode. */
2491 while ((TREE_CODE (exp) == NOP_EXPR
2492 || TREE_CODE (exp) == CONVERT_EXPR)
2493 && (TYPE_MODE (TREE_TYPE (exp))
2494 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2495 exp = TREE_OPERAND (exp, 0);
2497 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2498 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2499 && ! TREE_ADDRESSABLE (exp)
2500 && ! TREE_THIS_VOLATILE (exp)
2501 && ! DECL_NONLOCAL (exp)
2502 /* Don't regard global variables as simple. They may be
2503 allocated in ways unknown to the compiler (shared memory,
2504 #pragma weak, etc). */
2505 && ! TREE_PUBLIC (exp)
2506 && ! DECL_EXTERNAL (exp)
2507 /* Loading a static variable is unduly expensive, but global
2508 registers aren't expensive. */
2509 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2512 /* Subroutine for fold_truthop: try to optimize a range test.
2514 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2516 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2517 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2518 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2521 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2522 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2523 larger than HI_CST (they may be equal).
2525 We return the simplified tree or 0 if no optimization is possible. */
2528 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2529 enum tree_code jcode, lo_code, hi_code;
2530 tree type, var, lo_cst, hi_cst;
2533 enum tree_code rcode;
2535 /* See if this is a range test and normalize the constant terms. */
2537 if (jcode == TRUTH_AND_EXPR)
2542 /* See if we have VAR != CST && VAR != CST+1. */
2543 if (! (hi_code == NE_EXPR
2544 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2545 && tree_int_cst_equal (integer_one_node,
2546 const_binop (MINUS_EXPR,
2547 hi_cst, lo_cst, 0))))
2555 if (hi_code == LT_EXPR)
2556 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2557 else if (hi_code != LE_EXPR)
2560 if (lo_code == GT_EXPR)
2561 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2563 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2576 /* See if we have VAR == CST || VAR == CST+1. */
2577 if (! (hi_code == EQ_EXPR
2578 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2579 && tree_int_cst_equal (integer_one_node,
2580 const_binop (MINUS_EXPR,
2581 hi_cst, lo_cst, 0))))
2589 if (hi_code == GE_EXPR)
2590 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2591 else if (hi_code != GT_EXPR)
2594 if (lo_code == LE_EXPR)
2595 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2597 /* We now have VAR < LO_CST || VAR > HI_CST. */
2606 /* When normalizing, it is possible to both increment the smaller constant
2607 and decrement the larger constant. See if they are still ordered. */
2608 if (tree_int_cst_lt (hi_cst, lo_cst))
2611 /* Fail if VAR isn't an integer. */
2612 utype = TREE_TYPE (var);
2613 if (! INTEGRAL_TYPE_P (utype))
2616 /* The range test is invalid if subtracting the two constants results
2617 in overflow. This can happen in traditional mode. */
2618 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2619 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2622 if (! TREE_UNSIGNED (utype))
2624 utype = unsigned_type (utype);
2625 return fold (convert (type,
2626 build (rcode, utype,
2628 build (MINUS_EXPR, TREE_TYPE (var),
2631 const_binop (MINUS_EXPR, hi_cst,
2635 return fold (convert (type,
2636 build (rcode, utype,
2637 build (MINUS_EXPR, utype, var, lo_cst),
2638 const_binop (MINUS_EXPR, hi_cst,
2642 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
2643 bit value. Arrange things so the extra bits will be set to zero if and
2644 only if C is signed-extended to its full width. If MASK is nonzero,
2645 it is an INTEGER_CST that should be AND'ed with the extra bits. */
2648 unextend (c, p, unsignedp, mask)
2654 tree type = TREE_TYPE (c);
2655 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
2658 if (p == modesize || unsignedp)
2661 if (TREE_UNSIGNED (type))
2662 c = convert (signed_type (type), c);
2664 /* We work by getting just the sign bit into the low-order bit, then
2665 into the high-order bit, then sign-extend. We then XOR that value
2667 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
2668 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
2669 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
2670 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
2672 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
2674 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
2677 /* Find ways of folding logical expressions of LHS and RHS:
2678 Try to merge two comparisons to the same innermost item.
2679 Look for range tests like "ch >= '0' && ch <= '9'".
2680 Look for combinations of simple terms on machines with expensive branches
2681 and evaluate the RHS unconditionally.
2683 For example, if we have p->a == 2 && p->b == 4 and we can make an
2684 object large enough to span both A and B, we can do this with a comparison
2685 against the object ANDed with the a mask.
2687 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2688 operations to do this with one comparison.
2690 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2691 function and the one above.
2693 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2694 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2696 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2699 We return the simplified tree or 0 if no optimization is possible. */
2702 fold_truthop (code, truth_type, lhs, rhs)
2703 enum tree_code code;
2704 tree truth_type, lhs, rhs;
2706 /* If this is the "or" of two comparisons, we can do something if we
2707 the comparisons are NE_EXPR. If this is the "and", we can do something
2708 if the comparisons are EQ_EXPR. I.e.,
2709 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2711 WANTED_CODE is this operation code. For single bit fields, we can
2712 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2713 comparison for one-bit fields. */
2715 enum tree_code wanted_code;
2716 enum tree_code lcode, rcode;
2717 tree ll_arg, lr_arg, rl_arg, rr_arg;
2718 tree ll_inner, lr_inner, rl_inner, rr_inner;
2719 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2720 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2721 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2722 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2723 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2724 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2725 enum machine_mode lnmode, rnmode;
2726 tree ll_mask, lr_mask, rl_mask, rr_mask;
2727 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
2728 tree l_const, r_const;
2730 int first_bit, end_bit;
2733 /* Start by getting the comparison codes and seeing if this looks like
2734 a range test. Fail if anything is volatile. If one operand is a
2735 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2738 if (TREE_SIDE_EFFECTS (lhs)
2739 || TREE_SIDE_EFFECTS (rhs))
2742 lcode = TREE_CODE (lhs);
2743 rcode = TREE_CODE (rhs);
2745 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2746 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2748 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2749 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2751 if (TREE_CODE_CLASS (lcode) != '<'
2752 || TREE_CODE_CLASS (rcode) != '<')
2755 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2756 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2758 ll_arg = TREE_OPERAND (lhs, 0);
2759 lr_arg = TREE_OPERAND (lhs, 1);
2760 rl_arg = TREE_OPERAND (rhs, 0);
2761 rr_arg = TREE_OPERAND (rhs, 1);
2763 if (TREE_CODE (lr_arg) == INTEGER_CST
2764 && TREE_CODE (rr_arg) == INTEGER_CST
2765 && operand_equal_p (ll_arg, rl_arg, 0))
2767 if (tree_int_cst_lt (lr_arg, rr_arg))
2768 result = range_test (code, truth_type, lcode, rcode,
2769 ll_arg, lr_arg, rr_arg);
2771 result = range_test (code, truth_type, rcode, lcode,
2772 ll_arg, rr_arg, lr_arg);
2774 /* If this isn't a range test, it also isn't a comparison that
2775 can be merged. However, it wins to evaluate the RHS unconditionally
2776 on machines with expensive branches. */
2778 if (result == 0 && BRANCH_COST >= 2)
2780 if (TREE_CODE (ll_arg) != VAR_DECL
2781 && TREE_CODE (ll_arg) != PARM_DECL)
2783 /* Avoid evaluating the variable part twice. */
2784 ll_arg = save_expr (ll_arg);
2785 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2786 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2788 return build (code, truth_type, lhs, rhs);
2793 /* If the RHS can be evaluated unconditionally and its operands are
2794 simple, it wins to evaluate the RHS unconditionally on machines
2795 with expensive branches. In this case, this isn't a comparison
2796 that can be merged. */
2798 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2799 are with zero (tmw). */
2801 if (BRANCH_COST >= 2
2802 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2803 && simple_operand_p (rl_arg)
2804 && simple_operand_p (rr_arg))
2805 return build (code, truth_type, lhs, rhs);
2807 /* See if the comparisons can be merged. Then get all the parameters for
2810 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2811 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2815 ll_inner = decode_field_reference (ll_arg,
2816 &ll_bitsize, &ll_bitpos, &ll_mode,
2817 &ll_unsignedp, &volatilep, &ll_mask,
2819 lr_inner = decode_field_reference (lr_arg,
2820 &lr_bitsize, &lr_bitpos, &lr_mode,
2821 &lr_unsignedp, &volatilep, &lr_mask,
2823 rl_inner = decode_field_reference (rl_arg,
2824 &rl_bitsize, &rl_bitpos, &rl_mode,
2825 &rl_unsignedp, &volatilep, &rl_mask,
2827 rr_inner = decode_field_reference (rr_arg,
2828 &rr_bitsize, &rr_bitpos, &rr_mode,
2829 &rr_unsignedp, &volatilep, &rr_mask,
2832 /* It must be true that the inner operation on the lhs of each
2833 comparison must be the same if we are to be able to do anything.
2834 Then see if we have constants. If not, the same must be true for
2836 if (volatilep || ll_inner == 0 || rl_inner == 0
2837 || ! operand_equal_p (ll_inner, rl_inner, 0))
2840 if (TREE_CODE (lr_arg) == INTEGER_CST
2841 && TREE_CODE (rr_arg) == INTEGER_CST)
2842 l_const = lr_arg, r_const = rr_arg;
2843 else if (lr_inner == 0 || rr_inner == 0
2844 || ! operand_equal_p (lr_inner, rr_inner, 0))
2847 l_const = r_const = 0;
2849 /* If either comparison code is not correct for our logical operation,
2850 fail. However, we can convert a one-bit comparison against zero into
2851 the opposite comparison against that bit being set in the field. */
2853 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2854 if (lcode != wanted_code)
2856 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2862 if (rcode != wanted_code)
2864 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2870 /* See if we can find a mode that contains both fields being compared on
2871 the left. If we can't, fail. Otherwise, update all constants and masks
2872 to be relative to a field of that size. */
2873 first_bit = MIN (ll_bitpos, rl_bitpos);
2874 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2875 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2876 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2878 if (lnmode == VOIDmode)
2881 lnbitsize = GET_MODE_BITSIZE (lnmode);
2882 lnbitpos = first_bit & ~ (lnbitsize - 1);
2883 type = type_for_size (lnbitsize, 1);
2884 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2886 if (BYTES_BIG_ENDIAN)
2888 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2889 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2892 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2893 size_int (xll_bitpos), 0);
2894 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2895 size_int (xrl_bitpos), 0);
2899 l_const = convert (type, l_const);
2900 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
2901 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
2902 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
2903 fold (build1 (BIT_NOT_EXPR,
2907 warning ("comparison is always %s",
2908 wanted_code == NE_EXPR ? "one" : "zero");
2910 return convert (truth_type,
2911 wanted_code == NE_EXPR
2912 ? integer_one_node : integer_zero_node);
2917 r_const = convert (type, r_const);
2918 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
2919 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
2920 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
2921 fold (build1 (BIT_NOT_EXPR,
2925 warning ("comparison is always %s",
2926 wanted_code == NE_EXPR ? "one" : "zero");
2928 return convert (truth_type,
2929 wanted_code == NE_EXPR
2930 ? integer_one_node : integer_zero_node);
2934 /* If the right sides are not constant, do the same for it. Also,
2935 disallow this optimization if a size or signedness mismatch occurs
2936 between the left and right sides. */
2939 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2940 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2941 /* Make sure the two fields on the right
2942 correspond to the left without being swapped. */
2943 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2946 first_bit = MIN (lr_bitpos, rr_bitpos);
2947 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2948 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2949 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2951 if (rnmode == VOIDmode)
2954 rnbitsize = GET_MODE_BITSIZE (rnmode);
2955 rnbitpos = first_bit & ~ (rnbitsize - 1);
2956 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2958 if (BYTES_BIG_ENDIAN)
2960 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2961 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2964 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2965 size_int (xlr_bitpos), 0);
2966 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2967 size_int (xrr_bitpos), 0);
2969 /* Make a mask that corresponds to both fields being compared.
2970 Do this for both items being compared. If the masks agree,
2971 we can do this by masking both and comparing the masked
2973 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2974 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
2975 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2977 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2978 ll_unsignedp || rl_unsignedp);
2979 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2980 lr_unsignedp || rr_unsignedp);
2981 if (! all_ones_mask_p (ll_mask, lnbitsize))
2983 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2984 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2986 return build (wanted_code, truth_type, lhs, rhs);
2989 /* There is still another way we can do something: If both pairs of
2990 fields being compared are adjacent, we may be able to make a wider
2991 field containing them both. */
2992 if ((ll_bitsize + ll_bitpos == rl_bitpos
2993 && lr_bitsize + lr_bitpos == rr_bitpos)
2994 || (ll_bitpos == rl_bitpos + rl_bitsize
2995 && lr_bitpos == rr_bitpos + rr_bitsize))
2996 return build (wanted_code, truth_type,
2997 make_bit_field_ref (ll_inner, type,
2998 ll_bitsize + rl_bitsize,
2999 MIN (ll_bitpos, rl_bitpos),
3001 make_bit_field_ref (lr_inner, type,
3002 lr_bitsize + rr_bitsize,
3003 MIN (lr_bitpos, rr_bitpos),
3009 /* Handle the case of comparisons with constants. If there is something in
3010 common between the masks, those bits of the constants must be the same.
3011 If not, the condition is always false. Test for this to avoid generating
3012 incorrect code below. */
3013 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3014 if (! integer_zerop (result)
3015 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3016 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3018 if (wanted_code == NE_EXPR)
3020 warning ("`or' of unmatched not-equal tests is always 1");
3021 return convert (truth_type, integer_one_node);
3025 warning ("`and' of mutually exclusive equal-tests is always zero");
3026 return convert (truth_type, integer_zero_node);
3030 /* Construct the expression we will return. First get the component
3031 reference we will make. Unless the mask is all ones the width of
3032 that field, perform the mask operation. Then compare with the
3034 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3035 ll_unsignedp || rl_unsignedp);
3037 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3038 if (! all_ones_mask_p (ll_mask, lnbitsize))
3039 result = build (BIT_AND_EXPR, type, result, ll_mask);
3041 return build (wanted_code, truth_type, result,
3042 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3045 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3046 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3047 that we may sometimes modify the tree. */
3050 strip_compound_expr (t, s)
3054 tree type = TREE_TYPE (t);
3055 enum tree_code code = TREE_CODE (t);
3057 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3058 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3059 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3060 return TREE_OPERAND (t, 1);
3062 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3063 don't bother handling any other types. */
3064 else if (code == COND_EXPR)
3066 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3067 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3068 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3070 else if (TREE_CODE_CLASS (code) == '1')
3071 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3072 else if (TREE_CODE_CLASS (code) == '<'
3073 || TREE_CODE_CLASS (code) == '2')
3075 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3076 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3082 /* Perform constant folding and related simplification of EXPR.
3083 The related simplifications include x*1 => x, x*0 => 0, etc.,
3084 and application of the associative law.
3085 NOP_EXPR conversions may be removed freely (as long as we
3086 are careful not to change the C type of the overall expression)
3087 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3088 but we can constant-fold them if they have constant operands. */
3094 register tree t = expr;
3095 tree t1 = NULL_TREE;
3097 tree type = TREE_TYPE (expr);
3098 register tree arg0, arg1;
3099 register enum tree_code code = TREE_CODE (t);
3103 /* WINS will be nonzero when the switch is done
3104 if all operands are constant. */
3108 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3109 Likewise for a SAVE_EXPR that's already been evaluated. */
3110 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3113 /* Return right away if already constant. */
3114 if (TREE_CONSTANT (t))
3116 if (code == CONST_DECL)
3117 return DECL_INITIAL (t);
3121 kind = TREE_CODE_CLASS (code);
3122 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3126 /* Special case for conversion ops that can have fixed point args. */
3127 arg0 = TREE_OPERAND (t, 0);
3129 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3131 STRIP_TYPE_NOPS (arg0);
3133 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3134 subop = TREE_REALPART (arg0);
3138 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3139 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3140 && TREE_CODE (subop) != REAL_CST
3141 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3143 /* Note that TREE_CONSTANT isn't enough:
3144 static var addresses are constant but we can't
3145 do arithmetic on them. */
3148 else if (kind == 'e' || kind == '<'
3149 || kind == '1' || kind == '2' || kind == 'r')
3151 register int len = tree_code_length[(int) code];
3153 for (i = 0; i < len; i++)
3155 tree op = TREE_OPERAND (t, i);
3159 continue; /* Valid for CALL_EXPR, at least. */
3161 if (kind == '<' || code == RSHIFT_EXPR)
3163 /* Signedness matters here. Perhaps we can refine this
3165 STRIP_TYPE_NOPS (op);
3169 /* Strip any conversions that don't change the mode. */
3173 if (TREE_CODE (op) == COMPLEX_CST)
3174 subop = TREE_REALPART (op);
3178 if (TREE_CODE (subop) != INTEGER_CST
3179 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3180 && TREE_CODE (subop) != REAL_CST
3181 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3183 /* Note that TREE_CONSTANT isn't enough:
3184 static var addresses are constant but we can't
3185 do arithmetic on them. */
3195 /* If this is a commutative operation, and ARG0 is a constant, move it
3196 to ARG1 to reduce the number of tests below. */
3197 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3198 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3199 || code == BIT_AND_EXPR)
3200 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3202 tem = arg0; arg0 = arg1; arg1 = tem;
3204 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3205 TREE_OPERAND (t, 1) = tem;
3208 /* Now WINS is set as described above,
3209 ARG0 is the first operand of EXPR,
3210 and ARG1 is the second operand (if it has more than one operand).
3212 First check for cases where an arithmetic operation is applied to a
3213 compound, conditional, or comparison operation. Push the arithmetic
3214 operation inside the compound or conditional to see if any folding
3215 can then be done. Convert comparison to conditional for this purpose.
3216 The also optimizes non-constant cases that used to be done in
3219 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3220 one of the operands is a comparison and the other is a comparison, a
3221 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3222 code below would make the expression more complex. Change it to a
3223 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3224 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3226 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3227 || code == EQ_EXPR || code == NE_EXPR)
3228 && ((truth_value_p (TREE_CODE (arg0))
3229 && (truth_value_p (TREE_CODE (arg1))
3230 || (TREE_CODE (arg1) == BIT_AND_EXPR
3231 && integer_onep (TREE_OPERAND (arg1, 1)))))
3232 || (truth_value_p (TREE_CODE (arg1))
3233 && (truth_value_p (TREE_CODE (arg0))
3234 || (TREE_CODE (arg0) == BIT_AND_EXPR
3235 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3237 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3238 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3242 if (code == EQ_EXPR)
3243 t = invert_truthvalue (t);
3248 if (TREE_CODE_CLASS (code) == '1')
3250 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3251 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3252 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3253 else if (TREE_CODE (arg0) == COND_EXPR)
3255 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3256 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3257 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3259 /* If this was a conversion, and all we did was to move into
3260 inside the COND_EXPR, bring it back out. But leave it if
3261 it is a conversion from integer to integer and the
3262 result precision is no wider than a word since such a
3263 conversion is cheap and may be optimized away by combine,
3264 while it couldn't if it were outside the COND_EXPR. Then return
3265 so we don't get into an infinite recursion loop taking the
3266 conversion out and then back in. */
3268 if ((code == NOP_EXPR || code == CONVERT_EXPR
3269 || code == NON_LVALUE_EXPR)
3270 && TREE_CODE (t) == COND_EXPR
3271 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3272 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3273 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3274 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3275 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3276 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3277 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3278 t = build1 (code, type,
3280 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3281 TREE_OPERAND (t, 0),
3282 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3283 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3286 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3287 return fold (build (COND_EXPR, type, arg0,
3288 fold (build1 (code, type, integer_one_node)),
3289 fold (build1 (code, type, integer_zero_node))));
3291 else if (TREE_CODE_CLASS (code) == '2'
3292 || TREE_CODE_CLASS (code) == '<')
3294 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3295 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3296 fold (build (code, type,
3297 arg0, TREE_OPERAND (arg1, 1))));
3298 else if (TREE_CODE (arg1) == COND_EXPR
3299 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3301 tree test, true_value, false_value;
3303 if (TREE_CODE (arg1) == COND_EXPR)
3305 test = TREE_OPERAND (arg1, 0);
3306 true_value = TREE_OPERAND (arg1, 1);
3307 false_value = TREE_OPERAND (arg1, 2);
3311 tree testtype = TREE_TYPE (arg1);
3313 true_value = convert (testtype, integer_one_node);
3314 false_value = convert (testtype, integer_zero_node);
3317 /* If ARG0 is complex we want to make sure we only evaluate
3318 it once. Though this is only required if it is volatile, it
3319 might be more efficient even if it is not. However, if we
3320 succeed in folding one part to a constant, we do not need
3321 to make this SAVE_EXPR. Since we do this optimization
3322 primarily to see if we do end up with constant and this
3323 SAVE_EXPR interferes with later optimizations, suppressing
3324 it when we can is important. */
3326 if (TREE_CODE (arg0) != SAVE_EXPR
3327 && ((TREE_CODE (arg0) != VAR_DECL
3328 && TREE_CODE (arg0) != PARM_DECL)
3329 || TREE_SIDE_EFFECTS (arg0)))
3331 tree lhs = fold (build (code, type, arg0, true_value));
3332 tree rhs = fold (build (code, type, arg0, false_value));
3334 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3335 return fold (build (COND_EXPR, type, test, lhs, rhs));
3337 arg0 = save_expr (arg0);
3340 test = fold (build (COND_EXPR, type, test,
3341 fold (build (code, type, arg0, true_value)),
3342 fold (build (code, type, arg0, false_value))));
3343 if (TREE_CODE (arg0) == SAVE_EXPR)
3344 return build (COMPOUND_EXPR, type,
3345 convert (void_type_node, arg0),
3346 strip_compound_expr (test, arg0));
3348 return convert (type, test);
3351 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3352 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3353 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3354 else if (TREE_CODE (arg0) == COND_EXPR
3355 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3357 tree test, true_value, false_value;
3359 if (TREE_CODE (arg0) == COND_EXPR)
3361 test = TREE_OPERAND (arg0, 0);
3362 true_value = TREE_OPERAND (arg0, 1);
3363 false_value = TREE_OPERAND (arg0, 2);
3367 tree testtype = TREE_TYPE (arg0);
3369 true_value = convert (testtype, integer_one_node);
3370 false_value = convert (testtype, integer_zero_node);
3373 if (TREE_CODE (arg1) != SAVE_EXPR
3374 && ((TREE_CODE (arg1) != VAR_DECL
3375 && TREE_CODE (arg1) != PARM_DECL)
3376 || TREE_SIDE_EFFECTS (arg1)))
3378 tree lhs = fold (build (code, type, true_value, arg1));
3379 tree rhs = fold (build (code, type, false_value, arg1));
3381 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3382 || TREE_CONSTANT (arg1))
3383 return fold (build (COND_EXPR, type, test, lhs, rhs));
3385 arg1 = save_expr (arg1);
3388 test = fold (build (COND_EXPR, type, test,
3389 fold (build (code, type, true_value, arg1)),
3390 fold (build (code, type, false_value, arg1))));
3391 if (TREE_CODE (arg1) == SAVE_EXPR)
3392 return build (COMPOUND_EXPR, type,
3393 convert (void_type_node, arg1),
3394 strip_compound_expr (test, arg1));
3396 return convert (type, test);
3399 else if (TREE_CODE_CLASS (code) == '<'
3400 && TREE_CODE (arg0) == COMPOUND_EXPR)
3401 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3402 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3403 else if (TREE_CODE_CLASS (code) == '<'
3404 && TREE_CODE (arg1) == COMPOUND_EXPR)
3405 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3406 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3418 return fold (DECL_INITIAL (t));
3423 case FIX_TRUNC_EXPR:
3424 /* Other kinds of FIX are not handled properly by fold_convert. */
3426 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
3427 return TREE_OPERAND (t, 0);
3429 /* Handle cases of two conversions in a row. */
3430 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3431 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3433 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3434 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
3435 tree final_type = TREE_TYPE (t);
3436 int inside_int = INTEGRAL_TYPE_P (inside_type);
3437 int inside_ptr = POINTER_TYPE_P (inside_type);
3438 int inside_float = FLOAT_TYPE_P (inside_type);
3439 int inside_prec = TYPE_PRECISION (inside_type);
3440 int inside_unsignedp = TREE_UNSIGNED (inside_type);
3441 int inter_int = INTEGRAL_TYPE_P (inter_type);
3442 int inter_ptr = POINTER_TYPE_P (inter_type);
3443 int inter_float = FLOAT_TYPE_P (inter_type);
3444 int inter_prec = TYPE_PRECISION (inter_type);
3445 int inter_unsignedp = TREE_UNSIGNED (inter_type);
3446 int final_int = INTEGRAL_TYPE_P (final_type);
3447 int final_ptr = POINTER_TYPE_P (final_type);
3448 int final_float = FLOAT_TYPE_P (final_type);
3449 int final_prec = TYPE_PRECISION (final_type);
3450 int final_unsignedp = TREE_UNSIGNED (final_type);
3452 /* In addition to the cases of two conversions in a row
3453 handled below, if we are converting something to its own
3454 type via an object of identical or wider precision, neither
3455 conversion is needed. */
3456 if (inside_type == final_type
3457 && ((inter_int && final_int) || (inter_float && final_float))
3458 && inter_prec >= final_prec)
3459 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3461 /* Likewise, if the intermediate and final types are either both
3462 float or both integer, we don't need the middle conversion if
3463 it is wider than the final type and doesn't change the signedness
3464 (for integers). Avoid this if the final type is a pointer
3465 since then we sometimes need the inner conversion. Likewise if
3466 the outer has a precision not equal to the size of its mode. */
3467 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
3468 || (inter_float && inside_float))
3469 && inter_prec >= inside_prec
3470 && (inter_float || inter_unsignedp == inside_unsignedp)
3471 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3472 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3474 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3476 /* Two conversions in a row are not needed unless:
3477 - some conversion is floating-point (overstrict for now), or
3478 - the intermediate type is narrower than both initial and
3480 - the intermediate type and innermost type differ in signedness,
3481 and the outermost type is wider than the intermediate, or
3482 - the initial type is a pointer type and the precisions of the
3483 intermediate and final types differ, or
3484 - the final type is a pointer type and the precisions of the
3485 initial and intermediate types differ. */
3486 if (! inside_float && ! inter_float && ! final_float
3487 && (inter_prec > inside_prec || inter_prec > final_prec)
3488 && ! (inside_int && inter_int
3489 && inter_unsignedp != inside_unsignedp
3490 && inter_prec < final_prec)
3491 && ((inter_unsignedp && inter_prec > inside_prec)
3492 == (final_unsignedp && final_prec > inter_prec))
3493 && ! (inside_ptr && inter_prec != final_prec)
3494 && ! (final_ptr && inside_prec != inter_prec)
3495 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3496 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3498 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3501 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3502 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3503 /* Detect assigning a bitfield. */
3504 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3505 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3507 /* Don't leave an assignment inside a conversion
3508 unless assigning a bitfield. */
3509 tree prev = TREE_OPERAND (t, 0);
3510 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3511 /* First do the assignment, then return converted constant. */
3512 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3518 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3521 return fold_convert (t, arg0);
3523 #if 0 /* This loses on &"foo"[0]. */
3528 /* Fold an expression like: "foo"[2] */
3529 if (TREE_CODE (arg0) == STRING_CST
3530 && TREE_CODE (arg1) == INTEGER_CST
3531 && !TREE_INT_CST_HIGH (arg1)
3532 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3534 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3535 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3536 force_fit_type (t, 0);
3543 if (TREE_CODE (arg0) == CONSTRUCTOR)
3545 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
3552 TREE_CONSTANT (t) = wins;
3558 if (TREE_CODE (arg0) == INTEGER_CST)
3560 HOST_WIDE_INT low, high;
3561 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3562 TREE_INT_CST_HIGH (arg0),
3564 t = build_int_2 (low, high);
3565 TREE_TYPE (t) = type;
3567 = (TREE_OVERFLOW (arg0)
3568 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
3569 TREE_CONSTANT_OVERFLOW (t)
3570 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3572 else if (TREE_CODE (arg0) == REAL_CST)
3573 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3574 TREE_TYPE (t) = type;
3576 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3577 return TREE_OPERAND (arg0, 0);
3579 /* Convert - (a - b) to (b - a) for non-floating-point. */
3580 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3581 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3582 TREE_OPERAND (arg0, 0));
3589 if (TREE_CODE (arg0) == INTEGER_CST)
3591 if (! TREE_UNSIGNED (type)
3592 && TREE_INT_CST_HIGH (arg0) < 0)
3594 HOST_WIDE_INT low, high;
3595 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3596 TREE_INT_CST_HIGH (arg0),
3598 t = build_int_2 (low, high);
3599 TREE_TYPE (t) = type;
3601 = (TREE_OVERFLOW (arg0)
3602 | force_fit_type (t, overflow));
3603 TREE_CONSTANT_OVERFLOW (t)
3604 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3607 else if (TREE_CODE (arg0) == REAL_CST)
3609 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3610 t = build_real (type,
3611 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3613 TREE_TYPE (t) = type;
3615 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3616 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3620 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3622 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3623 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3624 TREE_OPERAND (arg0, 0),
3625 fold (build1 (NEGATE_EXPR,
3626 TREE_TYPE (TREE_TYPE (arg0)),
3627 TREE_OPERAND (arg0, 1))));
3628 else if (TREE_CODE (arg0) == COMPLEX_CST)
3629 return build_complex (type, TREE_OPERAND (arg0, 0),
3630 fold (build1 (NEGATE_EXPR,
3631 TREE_TYPE (TREE_TYPE (arg0)),
3632 TREE_OPERAND (arg0, 1))));
3633 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3634 return fold (build (TREE_CODE (arg0), type,
3635 fold (build1 (CONJ_EXPR, type,
3636 TREE_OPERAND (arg0, 0))),
3637 fold (build1 (CONJ_EXPR,
3638 type, TREE_OPERAND (arg0, 1)))));
3639 else if (TREE_CODE (arg0) == CONJ_EXPR)
3640 return TREE_OPERAND (arg0, 0);
3646 if (TREE_CODE (arg0) == INTEGER_CST)
3647 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3648 ~ TREE_INT_CST_HIGH (arg0));
3649 TREE_TYPE (t) = type;
3650 force_fit_type (t, 0);
3651 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3652 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3654 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3655 return TREE_OPERAND (arg0, 0);
3659 /* A + (-B) -> A - B */
3660 if (TREE_CODE (arg1) == NEGATE_EXPR)
3661 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3662 else if (! FLOAT_TYPE_P (type))
3664 if (integer_zerop (arg1))
3665 return non_lvalue (convert (type, arg0));
3667 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3668 with a constant, and the two constants have no bits in common,
3669 we should treat this as a BIT_IOR_EXPR since this may produce more
3671 if (TREE_CODE (arg0) == BIT_AND_EXPR
3672 && TREE_CODE (arg1) == BIT_AND_EXPR
3673 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3674 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3675 && integer_zerop (const_binop (BIT_AND_EXPR,
3676 TREE_OPERAND (arg0, 1),
3677 TREE_OPERAND (arg1, 1), 0)))
3679 code = BIT_IOR_EXPR;
3683 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3684 about the case where C is a constant, just try one of the
3685 four possibilities. */
3687 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3688 && operand_equal_p (TREE_OPERAND (arg0, 1),
3689 TREE_OPERAND (arg1, 1), 0))
3690 return fold (build (MULT_EXPR, type,
3691 fold (build (PLUS_EXPR, type,
3692 TREE_OPERAND (arg0, 0),
3693 TREE_OPERAND (arg1, 0))),
3694 TREE_OPERAND (arg0, 1)));
3696 /* In IEEE floating point, x+0 may not equal x. */
3697 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3699 && real_zerop (arg1))
3700 return non_lvalue (convert (type, arg0));
3702 /* In most languages, can't associate operations on floats
3703 through parentheses. Rather than remember where the parentheses
3704 were, we don't associate floats at all. It shouldn't matter much.
3705 However, associating multiplications is only very slightly
3706 inaccurate, so do that if -ffast-math is specified. */
3707 if (FLOAT_TYPE_P (type)
3708 && ! (flag_fast_math && code == MULT_EXPR))
3711 /* The varsign == -1 cases happen only for addition and subtraction.
3712 It says that the arg that was split was really CON minus VAR.
3713 The rest of the code applies to all associative operations. */
3719 if (split_tree (arg0, code, &var, &con, &varsign))
3723 /* EXPR is (CON-VAR) +- ARG1. */
3724 /* If it is + and VAR==ARG1, return just CONST. */
3725 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3726 return convert (TREE_TYPE (t), con);
3728 /* If ARG0 is a constant, don't change things around;
3729 instead keep all the constant computations together. */
3731 if (TREE_CONSTANT (arg0))
3734 /* Otherwise return (CON +- ARG1) - VAR. */
3735 t = build (MINUS_EXPR, type,
3736 fold (build (code, type, con, arg1)), var);
3740 /* EXPR is (VAR+CON) +- ARG1. */
3741 /* If it is - and VAR==ARG1, return just CONST. */
3742 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3743 return convert (TREE_TYPE (t), con);
3745 /* If ARG0 is a constant, don't change things around;
3746 instead keep all the constant computations together. */
3748 if (TREE_CONSTANT (arg0))
3751 /* Otherwise return VAR +- (ARG1 +- CON). */
3752 tem = fold (build (code, type, arg1, con));
3753 t = build (code, type, var, tem);
3755 if (integer_zerop (tem)
3756 && (code == PLUS_EXPR || code == MINUS_EXPR))
3757 return convert (type, var);
3758 /* If we have x +/- (c - d) [c an explicit integer]
3759 change it to x -/+ (d - c) since if d is relocatable
3760 then the latter can be a single immediate insn
3761 and the former cannot. */
3762 if (TREE_CODE (tem) == MINUS_EXPR
3763 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3765 tree tem1 = TREE_OPERAND (tem, 1);
3766 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3767 TREE_OPERAND (tem, 0) = tem1;
3769 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3775 if (split_tree (arg1, code, &var, &con, &varsign))
3777 if (TREE_CONSTANT (arg1))
3782 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3784 /* EXPR is ARG0 +- (CON +- VAR). */
3785 if (TREE_CODE (t) == MINUS_EXPR
3786 && operand_equal_p (var, arg0, 0))
3788 /* If VAR and ARG0 cancel, return just CON or -CON. */
3789 if (code == PLUS_EXPR)
3790 return convert (TREE_TYPE (t), con);
3791 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3792 convert (TREE_TYPE (t), con)));
3795 t = build (TREE_CODE (t), type,
3796 fold (build (code, TREE_TYPE (t), arg0, con)), var);
3798 if (integer_zerop (TREE_OPERAND (t, 0))
3799 && TREE_CODE (t) == PLUS_EXPR)
3800 return convert (TREE_TYPE (t), var);
3805 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3806 if (TREE_CODE (arg1) == REAL_CST)
3808 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3810 t1 = const_binop (code, arg0, arg1, 0);
3811 if (t1 != NULL_TREE)
3813 /* The return value should always have
3814 the same type as the original expression. */
3815 TREE_TYPE (t1) = TREE_TYPE (t);
3821 if (! FLOAT_TYPE_P (type))
3823 if (! wins && integer_zerop (arg0))
3824 return build1 (NEGATE_EXPR, type, arg1);
3825 if (integer_zerop (arg1))
3826 return non_lvalue (convert (type, arg0));
3828 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3829 about the case where C is a constant, just try one of the
3830 four possibilities. */
3832 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3833 && operand_equal_p (TREE_OPERAND (arg0, 1),
3834 TREE_OPERAND (arg1, 1), 0))
3835 return fold (build (MULT_EXPR, type,
3836 fold (build (MINUS_EXPR, type,
3837 TREE_OPERAND (arg0, 0),
3838 TREE_OPERAND (arg1, 0))),
3839 TREE_OPERAND (arg0, 1)));
3841 /* Convert A - (-B) to A + B. */
3842 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3843 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3845 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3848 /* Except with IEEE floating point, 0-x equals -x. */
3849 if (! wins && real_zerop (arg0))
3850 return build1 (NEGATE_EXPR, type, arg1);
3851 /* Except with IEEE floating point, x-0 equals x. */
3852 if (real_zerop (arg1))
3853 return non_lvalue (convert (type, arg0));
3856 /* Fold &x - &x. This can happen from &x.foo - &x.
3857 This is unsafe for certain floats even in non-IEEE formats.
3858 In IEEE, it is unsafe because it does wrong for NaNs.
3859 Also note that operand_equal_p is always false if an operand
3862 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
3863 && operand_equal_p (arg0, arg1, 0))
3864 return convert (type, integer_zero_node);
3869 if (! FLOAT_TYPE_P (type))
3871 if (integer_zerop (arg1))
3872 return omit_one_operand (type, arg1, arg0);
3873 if (integer_onep (arg1))
3874 return non_lvalue (convert (type, arg0));
3876 /* ((A / C) * C) is A if the division is an
3877 EXACT_DIV_EXPR. Since C is normally a constant,
3878 just check for one of the four possibilities. */
3880 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3881 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3882 return TREE_OPERAND (arg0, 0);
3884 /* (a * (1 << b)) is (a << b) */
3885 if (TREE_CODE (arg1) == LSHIFT_EXPR
3886 && integer_onep (TREE_OPERAND (arg1, 0)))
3887 return fold (build (LSHIFT_EXPR, type, arg0,
3888 TREE_OPERAND (arg1, 1)));
3889 if (TREE_CODE (arg0) == LSHIFT_EXPR
3890 && integer_onep (TREE_OPERAND (arg0, 0)))
3891 return fold (build (LSHIFT_EXPR, type, arg1,
3892 TREE_OPERAND (arg0, 1)));
3896 /* x*0 is 0, except for IEEE floating point. */
3897 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3899 && real_zerop (arg1))
3900 return omit_one_operand (type, arg1, arg0);
3901 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3902 However, ANSI says we can drop signals,
3903 so we can do this anyway. */
3904 if (real_onep (arg1))
3905 return non_lvalue (convert (type, arg0));
3907 if (! wins && real_twop (arg1))
3909 tree arg = save_expr (arg0);
3910 return build (PLUS_EXPR, type, arg, arg);
3918 register enum tree_code code0, code1;
3920 if (integer_all_onesp (arg1))
3921 return omit_one_operand (type, arg1, arg0);
3922 if (integer_zerop (arg1))
3923 return non_lvalue (convert (type, arg0));
3924 t1 = distribute_bit_expr (code, type, arg0, arg1);
3925 if (t1 != NULL_TREE)
3928 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
3929 is a rotate of A by C1 bits. */
3930 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
3931 is a rotate of A by B bits. */
3933 code0 = TREE_CODE (arg0);
3934 code1 = TREE_CODE (arg1);
3935 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
3936 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
3937 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3938 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3940 register tree tree01, tree11;
3941 register enum tree_code code01, code11;
3943 tree01 = TREE_OPERAND (arg0, 1);
3944 tree11 = TREE_OPERAND (arg1, 1);
3945 code01 = TREE_CODE (tree01);
3946 code11 = TREE_CODE (tree11);
3947 if (code01 == INTEGER_CST
3948 && code11 == INTEGER_CST
3949 && TREE_INT_CST_HIGH (tree01) == 0
3950 && TREE_INT_CST_HIGH (tree11) == 0
3951 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
3952 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3953 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3954 code0 == LSHIFT_EXPR ? tree01 : tree11);
3955 else if (code11 == MINUS_EXPR
3956 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
3957 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
3958 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
3959 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3960 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
3961 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
3962 type, TREE_OPERAND (arg0, 0), tree01);
3963 else if (code01 == MINUS_EXPR
3964 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
3965 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
3966 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
3967 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3968 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
3969 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
3970 type, TREE_OPERAND (arg0, 0), tree11);
3977 if (integer_zerop (arg1))
3978 return non_lvalue (convert (type, arg0));
3979 if (integer_all_onesp (arg1))
3980 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3985 if (integer_all_onesp (arg1))
3986 return non_lvalue (convert (type, arg0));
3987 if (integer_zerop (arg1))
3988 return omit_one_operand (type, arg1, arg0);
3989 t1 = distribute_bit_expr (code, type, arg0, arg1);
3990 if (t1 != NULL_TREE)
3992 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3993 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3994 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3996 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3997 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3998 && (~TREE_INT_CST_LOW (arg0)
3999 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4000 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4002 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4003 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4005 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4006 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4007 && (~TREE_INT_CST_LOW (arg1)
4008 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4009 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4013 case BIT_ANDTC_EXPR:
4014 if (integer_all_onesp (arg0))
4015 return non_lvalue (convert (type, arg1));
4016 if (integer_zerop (arg0))
4017 return omit_one_operand (type, arg0, arg1);
4018 if (TREE_CODE (arg1) == INTEGER_CST)
4020 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4021 code = BIT_AND_EXPR;
4027 /* In most cases, do nothing with a divide by zero. */
4028 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4029 #ifndef REAL_INFINITY
4030 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4033 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4035 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4036 However, ANSI says we can drop signals, so we can do this anyway. */
4037 if (real_onep (arg1))
4038 return non_lvalue (convert (type, arg0));
4040 /* If ARG1 is a constant, we can convert this to a multiply by the
4041 reciprocal. This does not have the same rounding properties,
4042 so only do this if -ffast-math. We can actually always safely
4043 do it if ARG1 is a power of two, but it's hard to tell if it is
4044 or not in a portable manner. */
4045 if (TREE_CODE (arg1) == REAL_CST && flag_fast_math
4046 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4048 return fold (build (MULT_EXPR, type, arg0, tem));
4052 case TRUNC_DIV_EXPR:
4053 case ROUND_DIV_EXPR:
4054 case FLOOR_DIV_EXPR:
4056 case EXACT_DIV_EXPR:
4057 if (integer_onep (arg1))
4058 return non_lvalue (convert (type, arg0));
4059 if (integer_zerop (arg1))
4062 /* If we have ((a / C1) / C2) where both division are the same type, try
4063 to simplify. First see if C1 * C2 overflows or not. */
4064 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4065 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4069 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4070 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4072 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4073 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4075 /* If no overflow, divide by C1*C2. */
4076 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4080 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4081 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4082 expressions, which often appear in the offsets or sizes of
4083 objects with a varying size. Only deal with positive divisors
4084 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4086 Look for NOPs and SAVE_EXPRs inside. */
4088 if (TREE_CODE (arg1) == INTEGER_CST
4089 && tree_int_cst_sgn (arg1) >= 0)
4091 int have_save_expr = 0;
4092 tree c2 = integer_zero_node;
4095 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4096 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4100 if (TREE_CODE (xarg0) == PLUS_EXPR
4101 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4102 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4103 else if (TREE_CODE (xarg0) == MINUS_EXPR
4104 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4105 /* If we are doing this computation unsigned, the negate
4107 && ! TREE_UNSIGNED (type))
4109 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4110 xarg0 = TREE_OPERAND (xarg0, 0);
4113 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4114 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4118 if (TREE_CODE (xarg0) == MULT_EXPR
4119 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4120 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4121 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4122 TREE_OPERAND (xarg0, 1), arg1, 1))
4123 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4124 TREE_OPERAND (xarg0, 1), 1)))
4125 && (tree_int_cst_sgn (c2) >= 0
4126 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4129 tree outer_div = integer_one_node;
4130 tree c1 = TREE_OPERAND (xarg0, 1);
4133 /* If C3 > C1, set them equal and do a divide by
4134 C3/C1 at the end of the operation. */
4135 if (tree_int_cst_lt (c1, c3))
4136 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4138 /* The result is A * (C1/C3) + (C2/C3). */
4139 t = fold (build (PLUS_EXPR, type,
4140 fold (build (MULT_EXPR, type,
4141 TREE_OPERAND (xarg0, 0),
4142 const_binop (code, c1, c3, 1))),
4143 const_binop (code, c2, c3, 1)));
4145 if (! integer_onep (outer_div))
4146 t = fold (build (code, type, t, convert (type, outer_div)));
4158 case FLOOR_MOD_EXPR:
4159 case ROUND_MOD_EXPR:
4160 case TRUNC_MOD_EXPR:
4161 if (integer_onep (arg1))
4162 return omit_one_operand (type, integer_zero_node, arg0);
4163 if (integer_zerop (arg1))
4166 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4167 where C1 % C3 == 0. Handle similarly to the division case,
4168 but don't bother with SAVE_EXPRs. */
4170 if (TREE_CODE (arg1) == INTEGER_CST
4171 && ! integer_zerop (arg1))
4173 tree c2 = integer_zero_node;
4176 if (TREE_CODE (xarg0) == PLUS_EXPR
4177 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4178 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4179 else if (TREE_CODE (xarg0) == MINUS_EXPR
4180 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4181 && ! TREE_UNSIGNED (type))
4183 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4184 xarg0 = TREE_OPERAND (xarg0, 0);
4189 if (TREE_CODE (xarg0) == MULT_EXPR
4190 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4191 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4192 TREE_OPERAND (xarg0, 1),
4194 && tree_int_cst_sgn (c2) >= 0)
4195 /* The result is (C2%C3). */
4196 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4197 TREE_OPERAND (xarg0, 0));
4206 if (integer_zerop (arg1))
4207 return non_lvalue (convert (type, arg0));
4208 /* Since negative shift count is not well-defined,
4209 don't try to compute it in the compiler. */
4210 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4212 /* Rewrite an LROTATE_EXPR by a constant into an
4213 RROTATE_EXPR by a new constant. */
4214 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4216 TREE_SET_CODE (t, RROTATE_EXPR);
4217 code = RROTATE_EXPR;
4218 TREE_OPERAND (t, 1) = arg1
4221 convert (TREE_TYPE (arg1),
4222 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4224 if (tree_int_cst_sgn (arg1) < 0)
4228 /* If we have a rotate of a bit operation with the rotate count and
4229 the second operand of the bit operation both constant,
4230 permute the two operations. */
4231 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4232 && (TREE_CODE (arg0) == BIT_AND_EXPR
4233 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4234 || TREE_CODE (arg0) == BIT_IOR_EXPR
4235 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4237 return fold (build (TREE_CODE (arg0), type,
4238 fold (build (code, type,
4239 TREE_OPERAND (arg0, 0), arg1)),
4240 fold (build (code, type,
4241 TREE_OPERAND (arg0, 1), arg1))));
4243 /* Two consecutive rotates adding up to the width of the mode can
4245 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4246 && TREE_CODE (arg0) == RROTATE_EXPR
4247 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4248 && TREE_INT_CST_HIGH (arg1) == 0
4249 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4250 && ((TREE_INT_CST_LOW (arg1)
4251 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4252 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4253 return TREE_OPERAND (arg0, 0);
4258 if (operand_equal_p (arg0, arg1, 0))
4260 if (INTEGRAL_TYPE_P (type)
4261 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4262 return omit_one_operand (type, arg1, arg0);
4266 if (operand_equal_p (arg0, arg1, 0))
4268 if (INTEGRAL_TYPE_P (type)
4269 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4270 return omit_one_operand (type, arg1, arg0);
4273 case TRUTH_NOT_EXPR:
4274 /* Note that the operand of this must be an int
4275 and its values must be 0 or 1.
4276 ("true" is a fixed value perhaps depending on the language,
4277 but we don't handle values other than 1 correctly yet.) */
4278 tem = invert_truthvalue (arg0);
4279 /* Avoid infinite recursion. */
4280 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4282 return convert (type, tem);
4284 case TRUTH_ANDIF_EXPR:
4285 /* Note that the operands of this must be ints
4286 and their values must be 0 or 1.
4287 ("true" is a fixed value perhaps depending on the language.) */
4288 /* If first arg is constant zero, return it. */
4289 if (integer_zerop (arg0))
4291 case TRUTH_AND_EXPR:
4292 /* If either arg is constant true, drop it. */
4293 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4294 return non_lvalue (arg1);
4295 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4296 return non_lvalue (arg0);
4297 /* If second arg is constant zero, result is zero, but first arg
4298 must be evaluated. */
4299 if (integer_zerop (arg1))
4300 return omit_one_operand (type, arg1, arg0);
4303 /* We only do these simplifications if we are optimizing. */
4307 /* Check for things like (A || B) && (A || C). We can convert this
4308 to A || (B && C). Note that either operator can be any of the four
4309 truth and/or operations and the transformation will still be
4310 valid. Also note that we only care about order for the
4311 ANDIF and ORIF operators. */
4312 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4313 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4314 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4315 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4316 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4318 tree a00 = TREE_OPERAND (arg0, 0);
4319 tree a01 = TREE_OPERAND (arg0, 1);
4320 tree a10 = TREE_OPERAND (arg1, 0);
4321 tree a11 = TREE_OPERAND (arg1, 1);
4322 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4323 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4324 && (code == TRUTH_AND_EXPR
4325 || code == TRUTH_OR_EXPR));
4327 if (operand_equal_p (a00, a10, 0))
4328 return fold (build (TREE_CODE (arg0), type, a00,
4329 fold (build (code, type, a01, a11))));
4330 else if (commutative && operand_equal_p (a00, a11, 0))
4331 return fold (build (TREE_CODE (arg0), type, a00,
4332 fold (build (code, type, a01, a10))));
4333 else if (commutative && operand_equal_p (a01, a10, 0))
4334 return fold (build (TREE_CODE (arg0), type, a01,
4335 fold (build (code, type, a00, a11))));
4337 /* This case if tricky because we must either have commutative
4338 operators or else A10 must not have side-effects. */
4340 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4341 && operand_equal_p (a01, a11, 0))
4342 return fold (build (TREE_CODE (arg0), type,
4343 fold (build (code, type, a00, a10)),
4347 /* Check for the possibility of merging component references. If our
4348 lhs is another similar operation, try to merge its rhs with our
4349 rhs. Then try to merge our lhs and rhs. */
4350 if (TREE_CODE (arg0) == code
4351 && 0 != (tem = fold_truthop (code, type,
4352 TREE_OPERAND (arg0, 1), arg1)))
4353 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4355 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4360 case TRUTH_ORIF_EXPR:
4361 /* Note that the operands of this must be ints
4362 and their values must be 0 or true.
4363 ("true" is a fixed value perhaps depending on the language.) */
4364 /* If first arg is constant true, return it. */
4365 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4368 /* If either arg is constant zero, drop it. */
4369 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4370 return non_lvalue (arg1);
4371 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4372 return non_lvalue (arg0);
4373 /* If second arg is constant true, result is true, but we must
4374 evaluate first arg. */
4375 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4376 return omit_one_operand (type, arg1, arg0);
4379 case TRUTH_XOR_EXPR:
4380 /* If either arg is constant zero, drop it. */
4381 if (integer_zerop (arg0))
4382 return non_lvalue (arg1);
4383 if (integer_zerop (arg1))
4384 return non_lvalue (arg0);
4385 /* If either arg is constant true, this is a logical inversion. */
4386 if (integer_onep (arg0))
4387 return non_lvalue (invert_truthvalue (arg1));
4388 if (integer_onep (arg1))
4389 return non_lvalue (invert_truthvalue (arg0));
4398 /* If one arg is a constant integer, put it last. */
4399 if (TREE_CODE (arg0) == INTEGER_CST
4400 && TREE_CODE (arg1) != INTEGER_CST)
4402 TREE_OPERAND (t, 0) = arg1;
4403 TREE_OPERAND (t, 1) = arg0;
4404 arg0 = TREE_OPERAND (t, 0);
4405 arg1 = TREE_OPERAND (t, 1);
4406 code = swap_tree_comparison (code);
4407 TREE_SET_CODE (t, code);
4410 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4411 First, see if one arg is constant; find the constant arg
4412 and the other one. */
4414 tree constop = 0, varop;
4415 int constopnum = -1;
4417 if (TREE_CONSTANT (arg1))
4418 constopnum = 1, constop = arg1, varop = arg0;
4419 if (TREE_CONSTANT (arg0))
4420 constopnum = 0, constop = arg0, varop = arg1;
4422 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4424 /* This optimization is invalid for ordered comparisons
4425 if CONST+INCR overflows or if foo+incr might overflow.
4426 This optimization is invalid for floating point due to rounding.
4427 For pointer types we assume overflow doesn't happen. */
4428 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4429 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4430 && (code == EQ_EXPR || code == NE_EXPR)))
4433 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4434 constop, TREE_OPERAND (varop, 1)));
4435 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4437 /* If VAROP is a reference to a bitfield, we must mask
4438 the constant by the width of the field. */
4439 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
4440 && DECL_BIT_FIELD(TREE_OPERAND
4441 (TREE_OPERAND (varop, 0), 1)))
4444 = TREE_INT_CST_LOW (DECL_SIZE
4446 (TREE_OPERAND (varop, 0), 1)));
4448 newconst = fold (build (BIT_AND_EXPR,
4449 TREE_TYPE (varop), newconst,
4450 convert (TREE_TYPE (varop),
4451 build_int_2 (size, 0))));
4455 t = build (code, type, TREE_OPERAND (t, 0),
4456 TREE_OPERAND (t, 1));
4457 TREE_OPERAND (t, constopnum) = newconst;
4461 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4463 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4464 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4465 && (code == EQ_EXPR || code == NE_EXPR)))
4468 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4469 constop, TREE_OPERAND (varop, 1)));
4470 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4472 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
4473 && DECL_BIT_FIELD(TREE_OPERAND
4474 (TREE_OPERAND (varop, 0), 1)))
4477 = TREE_INT_CST_LOW (DECL_SIZE
4479 (TREE_OPERAND (varop, 0), 1)));
4481 newconst = fold (build (BIT_AND_EXPR,
4482 TREE_TYPE (varop), newconst,
4483 convert (TREE_TYPE (varop),
4484 build_int_2 (size, 0))));
4488 t = build (code, type, TREE_OPERAND (t, 0),
4489 TREE_OPERAND (t, 1));
4490 TREE_OPERAND (t, constopnum) = newconst;
4496 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4497 if (TREE_CODE (arg1) == INTEGER_CST
4498 && TREE_CODE (arg0) != INTEGER_CST
4499 && tree_int_cst_sgn (arg1) > 0)
4501 switch (TREE_CODE (t))
4505 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4506 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4511 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4512 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4517 /* If this is an EQ or NE comparison with zero and ARG0 is
4518 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4519 two operations, but the latter can be done in one less insn
4520 one machine that have only two-operand insns or on which a
4521 constant cannot be the first operand. */
4522 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4523 && TREE_CODE (arg0) == BIT_AND_EXPR)
4525 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4526 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4528 fold (build (code, type,
4529 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4531 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4532 TREE_OPERAND (arg0, 1),
4533 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4534 convert (TREE_TYPE (arg0),
4537 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4538 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4540 fold (build (code, type,
4541 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4543 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4544 TREE_OPERAND (arg0, 0),
4545 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4546 convert (TREE_TYPE (arg0),
4551 /* If this is an NE or EQ comparison of zero against the result of a
4552 signed MOD operation whose second operand is a power of 2, make
4553 the MOD operation unsigned since it is simpler and equivalent. */
4554 if ((code == NE_EXPR || code == EQ_EXPR)
4555 && integer_zerop (arg1)
4556 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4557 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4558 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4559 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4560 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4561 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4563 tree newtype = unsigned_type (TREE_TYPE (arg0));
4564 tree newmod = build (TREE_CODE (arg0), newtype,
4565 convert (newtype, TREE_OPERAND (arg0, 0)),
4566 convert (newtype, TREE_OPERAND (arg0, 1)));
4568 return build (code, type, newmod, convert (newtype, arg1));
4571 /* If this is an NE comparison of zero with an AND of one, remove the
4572 comparison since the AND will give the correct value. */
4573 if (code == NE_EXPR && integer_zerop (arg1)
4574 && TREE_CODE (arg0) == BIT_AND_EXPR
4575 && integer_onep (TREE_OPERAND (arg0, 1)))
4576 return convert (type, arg0);
4578 /* If we have (A & C) == C where C is a power of 2, convert this into
4579 (A & C) != 0. Similarly for NE_EXPR. */
4580 if ((code == EQ_EXPR || code == NE_EXPR)
4581 && TREE_CODE (arg0) == BIT_AND_EXPR
4582 && integer_pow2p (TREE_OPERAND (arg0, 1))
4583 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4584 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4585 arg0, integer_zero_node);
4587 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4588 and similarly for >= into !=. */
4589 if ((code == LT_EXPR || code == GE_EXPR)
4590 && TREE_UNSIGNED (TREE_TYPE (arg0))
4591 && TREE_CODE (arg1) == LSHIFT_EXPR
4592 && integer_onep (TREE_OPERAND (arg1, 0)))
4593 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4594 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4595 TREE_OPERAND (arg1, 1)),
4596 convert (TREE_TYPE (arg0), integer_zero_node));
4598 else if ((code == LT_EXPR || code == GE_EXPR)
4599 && TREE_UNSIGNED (TREE_TYPE (arg0))
4600 && (TREE_CODE (arg1) == NOP_EXPR
4601 || TREE_CODE (arg1) == CONVERT_EXPR)
4602 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
4603 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
4605 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4606 convert (TREE_TYPE (arg0),
4607 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4608 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
4609 convert (TREE_TYPE (arg0), integer_zero_node));
4611 /* Simplify comparison of something with itself. (For IEEE
4612 floating-point, we can only do some of these simplifications.) */
4613 if (operand_equal_p (arg0, arg1, 0))
4620 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4622 t = build_int_2 (1, 0);
4623 TREE_TYPE (t) = type;
4627 TREE_SET_CODE (t, code);
4631 /* For NE, we can only do this simplification if integer. */
4632 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4634 /* ... fall through ... */
4637 t = build_int_2 (0, 0);
4638 TREE_TYPE (t) = type;
4643 /* An unsigned comparison against 0 can be simplified. */
4644 if (integer_zerop (arg1)
4645 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4646 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4647 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4649 switch (TREE_CODE (t))
4653 TREE_SET_CODE (t, NE_EXPR);
4657 TREE_SET_CODE (t, EQ_EXPR);
4660 return omit_one_operand (type,
4661 convert (type, integer_one_node),
4664 return omit_one_operand (type,
4665 convert (type, integer_zero_node),
4670 /* If we are comparing an expression that just has comparisons
4671 of two integer values, arithmetic expressions of those comparisons,
4672 and constants, we can simplify it. There are only three cases
4673 to check: the two values can either be equal, the first can be
4674 greater, or the second can be greater. Fold the expression for
4675 those three values. Since each value must be 0 or 1, we have
4676 eight possibilities, each of which corresponds to the constant 0
4677 or 1 or one of the six possible comparisons.
4679 This handles common cases like (a > b) == 0 but also handles
4680 expressions like ((x > y) - (y > x)) > 0, which supposedly
4681 occur in macroized code. */
4683 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4685 tree cval1 = 0, cval2 = 0;
4688 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4689 /* Don't handle degenerate cases here; they should already
4690 have been handled anyway. */
4691 && cval1 != 0 && cval2 != 0
4692 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4693 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4694 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4695 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4696 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4698 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4699 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4701 /* We can't just pass T to eval_subst in case cval1 or cval2
4702 was the same as ARG1. */
4705 = fold (build (code, type,
4706 eval_subst (arg0, cval1, maxval, cval2, minval),
4709 = fold (build (code, type,
4710 eval_subst (arg0, cval1, maxval, cval2, maxval),
4713 = fold (build (code, type,
4714 eval_subst (arg0, cval1, minval, cval2, maxval),
4717 /* All three of these results should be 0 or 1. Confirm they
4718 are. Then use those values to select the proper code
4721 if ((integer_zerop (high_result)
4722 || integer_onep (high_result))
4723 && (integer_zerop (equal_result)
4724 || integer_onep (equal_result))
4725 && (integer_zerop (low_result)
4726 || integer_onep (low_result)))
4728 /* Make a 3-bit mask with the high-order bit being the
4729 value for `>', the next for '=', and the low for '<'. */
4730 switch ((integer_onep (high_result) * 4)
4731 + (integer_onep (equal_result) * 2)
4732 + integer_onep (low_result))
4736 return omit_one_operand (type, integer_zero_node, arg0);
4757 return omit_one_operand (type, integer_one_node, arg0);
4760 t = build (code, type, cval1, cval2);
4762 return save_expr (t);
4769 /* If this is a comparison of a field, we may be able to simplify it. */
4770 if ((TREE_CODE (arg0) == COMPONENT_REF
4771 || TREE_CODE (arg0) == BIT_FIELD_REF)
4772 && (code == EQ_EXPR || code == NE_EXPR)
4773 /* Handle the constant case even without -O
4774 to make sure the warnings are given. */
4775 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4777 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4781 /* If this is a comparison of complex values and either or both
4782 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4783 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4784 may prevent needless evaluations. */
4785 if ((code == EQ_EXPR || code == NE_EXPR)
4786 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4787 && (TREE_CODE (arg0) == COMPLEX_EXPR
4788 || TREE_CODE (arg1) == COMPLEX_EXPR))
4790 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4791 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4792 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4793 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4794 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4796 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4799 fold (build (code, type, real0, real1)),
4800 fold (build (code, type, imag0, imag1))));
4803 /* From here on, the only cases we handle are when the result is
4804 known to be a constant.
4806 To compute GT, swap the arguments and do LT.
4807 To compute GE, do LT and invert the result.
4808 To compute LE, swap the arguments, do LT and invert the result.
4809 To compute NE, do EQ and invert the result.
4811 Therefore, the code below must handle only EQ and LT. */
4813 if (code == LE_EXPR || code == GT_EXPR)
4815 tem = arg0, arg0 = arg1, arg1 = tem;
4816 code = swap_tree_comparison (code);
4819 /* Note that it is safe to invert for real values here because we
4820 will check below in the one case that it matters. */
4823 if (code == NE_EXPR || code == GE_EXPR)
4826 code = invert_tree_comparison (code);
4829 /* Compute a result for LT or EQ if args permit;
4830 otherwise return T. */
4831 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4833 if (code == EQ_EXPR)
4834 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4835 == TREE_INT_CST_LOW (arg1))
4836 && (TREE_INT_CST_HIGH (arg0)
4837 == TREE_INT_CST_HIGH (arg1)),
4840 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4841 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4842 : INT_CST_LT (arg0, arg1)),
4846 /* Assume a nonexplicit constant cannot equal an explicit one,
4847 since such code would be undefined anyway.
4848 Exception: on sysvr4, using #pragma weak,
4849 a label can come out as 0. */
4850 else if (TREE_CODE (arg1) == INTEGER_CST
4851 && !integer_zerop (arg1)
4852 && TREE_CONSTANT (arg0)
4853 && TREE_CODE (arg0) == ADDR_EXPR
4855 t1 = build_int_2 (0, 0);
4857 /* Two real constants can be compared explicitly. */
4858 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4860 /* If either operand is a NaN, the result is false with two
4861 exceptions: First, an NE_EXPR is true on NaNs, but that case
4862 is already handled correctly since we will be inverting the
4863 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4864 or a GE_EXPR into a LT_EXPR, we must return true so that it
4865 will be inverted into false. */
4867 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4868 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4869 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4871 else if (code == EQ_EXPR)
4872 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4873 TREE_REAL_CST (arg1)),
4876 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4877 TREE_REAL_CST (arg1)),
4881 if (t1 == NULL_TREE)
4885 TREE_INT_CST_LOW (t1) ^= 1;
4887 TREE_TYPE (t1) = type;
4891 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4892 so all simple results must be passed through pedantic_non_lvalue. */
4893 if (TREE_CODE (arg0) == INTEGER_CST)
4894 return pedantic_non_lvalue
4895 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4896 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4897 return pedantic_omit_one_operand (type, arg1, arg0);
4899 /* If the second operand is zero, invert the comparison and swap
4900 the second and third operands. Likewise if the second operand
4901 is constant and the third is not or if the third operand is
4902 equivalent to the first operand of the comparison. */
4904 if (integer_zerop (arg1)
4905 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4906 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4907 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4908 TREE_OPERAND (t, 2),
4909 TREE_OPERAND (arg0, 1))))
4911 /* See if this can be inverted. If it can't, possibly because
4912 it was a floating-point inequality comparison, don't do
4914 tem = invert_truthvalue (arg0);
4916 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4918 t = build (code, type, tem,
4919 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
4921 arg1 = TREE_OPERAND (t, 2);
4926 /* If we have A op B ? A : C, we may be able to convert this to a
4927 simpler expression, depending on the operation and the values
4928 of B and C. IEEE floating point prevents this though,
4929 because A or B might be -0.0 or a NaN. */
4931 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4932 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4933 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4935 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4936 arg1, TREE_OPERAND (arg0, 1)))
4938 tree arg2 = TREE_OPERAND (t, 2);
4939 enum tree_code comp_code = TREE_CODE (arg0);
4943 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4944 depending on the comparison operation. */
4945 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
4946 ? real_zerop (TREE_OPERAND (arg0, 1))
4947 : integer_zerop (TREE_OPERAND (arg0, 1)))
4948 && TREE_CODE (arg2) == NEGATE_EXPR
4949 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4953 return pedantic_non_lvalue
4954 (fold (build1 (NEGATE_EXPR, type, arg1)));
4956 return pedantic_non_lvalue (convert (type, arg1));
4959 return pedantic_non_lvalue
4960 (convert (type, fold (build1 (ABS_EXPR,
4961 TREE_TYPE (arg1), arg1))));
4964 return pedantic_non_lvalue
4965 (fold (build1 (NEGATE_EXPR, type,
4967 fold (build1 (ABS_EXPR,
4972 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4975 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4977 if (comp_code == NE_EXPR)
4978 return pedantic_non_lvalue (convert (type, arg1));
4979 else if (comp_code == EQ_EXPR)
4980 return pedantic_non_lvalue (convert (type, integer_zero_node));
4983 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4984 or max (A, B), depending on the operation. */
4986 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4987 arg2, TREE_OPERAND (arg0, 0)))
4989 tree comp_op0 = TREE_OPERAND (arg0, 0);
4990 tree comp_op1 = TREE_OPERAND (arg0, 1);
4991 tree comp_type = TREE_TYPE (comp_op0);
4996 return pedantic_non_lvalue (convert (type, arg2));
4998 return pedantic_non_lvalue (convert (type, arg1));
5001 return pedantic_non_lvalue
5002 (convert (type, (fold (build (MIN_EXPR, comp_type,
5003 comp_op0, comp_op1)))));
5006 return pedantic_non_lvalue
5007 (convert (type, fold (build (MAX_EXPR, comp_type,
5008 comp_op0, comp_op1))));
5012 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5013 we might still be able to simplify this. For example,
5014 if C1 is one less or one more than C2, this might have started
5015 out as a MIN or MAX and been transformed by this function.
5016 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5018 if (INTEGRAL_TYPE_P (type)
5019 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5020 && TREE_CODE (arg2) == INTEGER_CST)
5024 /* We can replace A with C1 in this case. */
5025 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5026 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5027 TREE_OPERAND (t, 2));
5031 /* If C1 is C2 + 1, this is min(A, C2). */
5032 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5033 && operand_equal_p (TREE_OPERAND (arg0, 1),
5034 const_binop (PLUS_EXPR, arg2,
5035 integer_one_node, 0), 1))
5036 return pedantic_non_lvalue
5037 (fold (build (MIN_EXPR, type, arg1, arg2)));
5041 /* If C1 is C2 - 1, this is min(A, C2). */
5042 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5043 && operand_equal_p (TREE_OPERAND (arg0, 1),
5044 const_binop (MINUS_EXPR, arg2,
5045 integer_one_node, 0), 1))
5046 return pedantic_non_lvalue
5047 (fold (build (MIN_EXPR, type, arg1, arg2)));
5051 /* If C1 is C2 - 1, this is max(A, C2). */
5052 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5053 && operand_equal_p (TREE_OPERAND (arg0, 1),
5054 const_binop (MINUS_EXPR, arg2,
5055 integer_one_node, 0), 1))
5056 return pedantic_non_lvalue
5057 (fold (build (MAX_EXPR, type, arg1, arg2)));
5061 /* If C1 is C2 + 1, this is max(A, C2). */
5062 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5063 && operand_equal_p (TREE_OPERAND (arg0, 1),
5064 const_binop (PLUS_EXPR, arg2,
5065 integer_one_node, 0), 1))
5066 return pedantic_non_lvalue
5067 (fold (build (MAX_EXPR, type, arg1, arg2)));
5072 /* If the second operand is simpler than the third, swap them
5073 since that produces better jump optimization results. */
5074 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5075 || TREE_CODE (arg1) == SAVE_EXPR)
5076 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5077 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5078 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5080 /* See if this can be inverted. If it can't, possibly because
5081 it was a floating-point inequality comparison, don't do
5083 tem = invert_truthvalue (arg0);
5085 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5087 t = build (code, type, tem,
5088 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5090 arg1 = TREE_OPERAND (t, 2);
5095 /* Convert A ? 1 : 0 to simply A. */
5096 if (integer_onep (TREE_OPERAND (t, 1))
5097 && integer_zerop (TREE_OPERAND (t, 2))
5098 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5099 call to fold will try to move the conversion inside
5100 a COND, which will recurse. In that case, the COND_EXPR
5101 is probably the best choice, so leave it alone. */
5102 && type == TREE_TYPE (arg0))
5103 return pedantic_non_lvalue (arg0);
5105 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5106 operation is simply A & 2. */
5108 if (integer_zerop (TREE_OPERAND (t, 2))
5109 && TREE_CODE (arg0) == NE_EXPR
5110 && integer_zerop (TREE_OPERAND (arg0, 1))
5111 && integer_pow2p (arg1)
5112 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5113 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5115 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5120 /* When pedantic, a compound expression can be neither an lvalue
5121 nor an integer constant expression. */
5122 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5124 /* Don't let (0, 0) be null pointer constant. */
5125 if (integer_zerop (arg1))
5126 return non_lvalue (arg1);
5131 return build_complex (type, arg0, arg1);
5135 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5137 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5138 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5139 TREE_OPERAND (arg0, 1));
5140 else if (TREE_CODE (arg0) == COMPLEX_CST)
5141 return TREE_REALPART (arg0);
5142 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5143 return fold (build (TREE_CODE (arg0), type,
5144 fold (build1 (REALPART_EXPR, type,
5145 TREE_OPERAND (arg0, 0))),
5146 fold (build1 (REALPART_EXPR,
5147 type, TREE_OPERAND (arg0, 1)))));
5151 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5152 return convert (type, integer_zero_node);
5153 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5154 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5155 TREE_OPERAND (arg0, 0));
5156 else if (TREE_CODE (arg0) == COMPLEX_CST)
5157 return TREE_IMAGPART (arg0);
5158 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5159 return fold (build (TREE_CODE (arg0), type,
5160 fold (build1 (IMAGPART_EXPR, type,
5161 TREE_OPERAND (arg0, 0))),
5162 fold (build1 (IMAGPART_EXPR, type,
5163 TREE_OPERAND (arg0, 1)))));
5166 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5168 case CLEANUP_POINT_EXPR:
5169 if (! TREE_SIDE_EFFECTS (arg0))
5170 return TREE_OPERAND (t, 0);
5173 enum tree_code code0 = TREE_CODE (arg0);
5174 int kind0 = TREE_CODE_CLASS (code0);
5175 tree arg00 = TREE_OPERAND (arg0, 0);
5178 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5179 return fold (build1 (code0, type,
5180 fold (build1 (CLEANUP_POINT_EXPR,
5181 TREE_TYPE (arg00), arg00))));
5183 if (kind0 == '<' || kind0 == '2'
5184 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5185 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5186 || code0 == TRUTH_XOR_EXPR)
5188 arg01 = TREE_OPERAND (arg0, 1);
5190 if (! TREE_SIDE_EFFECTS (arg00))
5191 return fold (build (code0, type, arg00,
5192 fold (build1 (CLEANUP_POINT_EXPR,
5193 TREE_TYPE (arg01), arg01))));
5195 if (! TREE_SIDE_EFFECTS (arg01))
5196 return fold (build (code0, type,
5197 fold (build1 (CLEANUP_POINT_EXPR,
5198 TREE_TYPE (arg00), arg00)),
5207 } /* switch (code) */