1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'. */
47 /* Handle floating overflow for `const_binop'. */
48 static jmp_buf float_error;
50 static void encode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT, HOST_WIDE_INT));
51 static void decode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT *, HOST_WIDE_INT *));
52 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
53 HOST_WIDE_INT, HOST_WIDE_INT,
54 HOST_WIDE_INT, HOST_WIDE_INT *,
55 HOST_WIDE_INT *, HOST_WIDE_INT *,
57 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
58 static tree const_binop PROTO((enum tree_code, tree, tree, int));
59 static tree fold_convert PROTO((tree, tree));
60 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
61 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
62 static int truth_value_p PROTO((enum tree_code));
63 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
64 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
65 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
66 static tree omit_one_operand PROTO((tree, tree, tree));
67 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
68 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
69 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
70 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
72 static tree decode_field_reference PROTO((tree, int *, int *,
73 enum machine_mode *, int *,
74 int *, tree *, tree *));
75 static int all_ones_mask_p PROTO((tree, int));
76 static int simple_operand_p PROTO((tree));
77 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
78 enum tree_code, tree, tree, tree));
79 static tree unextend PROTO((tree, int, int, tree));
80 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
81 static tree strip_compound_expr PROTO((tree, tree));
87 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
88 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
89 Then this yields nonzero if overflow occurred during the addition.
90 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
91 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
92 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
94 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
95 We do that by representing the two-word integer in 4 words, with only
96 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
99 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
100 #define HIGHPART(x) \
101 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
102 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
104 /* Unpack a two-word integer into 4 words.
105 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
106 WORDS points to the array of HOST_WIDE_INTs. */
109 encode (words, low, hi)
110 HOST_WIDE_INT *words;
111 HOST_WIDE_INT low, hi;
113 words[0] = LOWPART (low);
114 words[1] = HIGHPART (low);
115 words[2] = LOWPART (hi);
116 words[3] = HIGHPART (hi);
119 /* Pack an array of 4 words into a two-word integer.
120 WORDS points to the array of words.
121 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
124 decode (words, low, hi)
125 HOST_WIDE_INT *words;
126 HOST_WIDE_INT *low, *hi;
128 *low = words[0] | words[1] * BASE;
129 *hi = words[2] | words[3] * BASE;
132 /* Make the integer constant T valid for its type
133 by setting to 0 or 1 all the bits in the constant
134 that don't belong in the type.
135 Yield 1 if a signed overflow occurs, 0 otherwise.
136 If OVERFLOW is nonzero, a signed overflow has already occurred
137 in calculating T, so propagate it.
139 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
143 force_fit_type (t, overflow)
147 HOST_WIDE_INT low, high;
150 if (TREE_CODE (t) == REAL_CST)
152 #ifdef CHECK_FLOAT_VALUE
153 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
159 else if (TREE_CODE (t) != INTEGER_CST)
162 low = TREE_INT_CST_LOW (t);
163 high = TREE_INT_CST_HIGH (t);
165 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
168 prec = TYPE_PRECISION (TREE_TYPE (t));
170 /* First clear all bits that are beyond the type's precision. */
172 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
174 else if (prec > HOST_BITS_PER_WIDE_INT)
176 TREE_INT_CST_HIGH (t)
177 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
181 TREE_INT_CST_HIGH (t) = 0;
182 if (prec < HOST_BITS_PER_WIDE_INT)
183 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
186 /* Unsigned types do not suffer sign extension or overflow. */
187 if (TREE_UNSIGNED (TREE_TYPE (t)))
190 /* If the value's sign bit is set, extend the sign. */
191 if (prec != 2 * HOST_BITS_PER_WIDE_INT
192 && (prec > HOST_BITS_PER_WIDE_INT
193 ? (TREE_INT_CST_HIGH (t)
194 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
195 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
197 /* Value is negative:
198 set to 1 all the bits that are outside this type's precision. */
199 if (prec > HOST_BITS_PER_WIDE_INT)
201 TREE_INT_CST_HIGH (t)
202 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
206 TREE_INT_CST_HIGH (t) = -1;
207 if (prec < HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
212 /* Yield nonzero if signed overflow occurred. */
214 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
218 /* Add two doubleword integers with doubleword result.
219 Each argument is given as two `HOST_WIDE_INT' pieces.
220 One argument is L1 and H1; the other, L2 and H2.
221 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
224 add_double (l1, h1, l2, h2, lv, hv)
225 HOST_WIDE_INT l1, h1, l2, h2;
226 HOST_WIDE_INT *lv, *hv;
231 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
235 return overflow_sum_sign (h1, h2, h);
238 /* Negate a doubleword integer with doubleword result.
239 Return nonzero if the operation overflows, assuming it's signed.
240 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
241 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
244 neg_double (l1, h1, lv, hv)
245 HOST_WIDE_INT l1, h1;
246 HOST_WIDE_INT *lv, *hv;
252 return (*hv & h1) < 0;
262 /* Multiply two doubleword integers with doubleword result.
263 Return nonzero if the operation overflows, assuming it's signed.
264 Each argument is given as two `HOST_WIDE_INT' pieces.
265 One argument is L1 and H1; the other, L2 and H2.
266 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
269 mul_double (l1, h1, l2, h2, lv, hv)
270 HOST_WIDE_INT l1, h1, l2, h2;
271 HOST_WIDE_INT *lv, *hv;
273 HOST_WIDE_INT arg1[4];
274 HOST_WIDE_INT arg2[4];
275 HOST_WIDE_INT prod[4 * 2];
276 register unsigned HOST_WIDE_INT carry;
277 register int i, j, k;
278 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
280 encode (arg1, l1, h1);
281 encode (arg2, l2, h2);
283 bzero ((char *) prod, sizeof prod);
285 for (i = 0; i < 4; i++)
288 for (j = 0; j < 4; j++)
291 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
292 carry += arg1[i] * arg2[j];
293 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
295 prod[k] = LOWPART (carry);
296 carry = HIGHPART (carry);
301 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
303 /* Check for overflow by calculating the top half of the answer in full;
304 it should agree with the low half's sign bit. */
305 decode (prod+4, &toplow, &tophigh);
308 neg_double (l2, h2, &neglow, &neghigh);
309 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
313 neg_double (l1, h1, &neglow, &neghigh);
314 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
316 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
319 /* Shift the doubleword integer in L1, H1 left by COUNT places
320 keeping only PREC bits of result.
321 Shift right if COUNT is negative.
322 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
323 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
326 lshift_double (l1, h1, count, prec, lv, hv, arith)
327 HOST_WIDE_INT l1, h1, count;
329 HOST_WIDE_INT *lv, *hv;
334 rshift_double (l1, h1, - count, prec, lv, hv, arith);
338 #ifdef SHIFT_COUNT_TRUNCATED
339 if (SHIFT_COUNT_TRUNCATED)
343 if (count >= HOST_BITS_PER_WIDE_INT)
345 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
350 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
351 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
352 *lv = (unsigned HOST_WIDE_INT) l1 << count;
356 /* Shift the doubleword integer in L1, H1 right by COUNT places
357 keeping only PREC bits of result. COUNT must be positive.
358 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
359 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
362 rshift_double (l1, h1, count, prec, lv, hv, arith)
363 HOST_WIDE_INT l1, h1, count;
365 HOST_WIDE_INT *lv, *hv;
368 unsigned HOST_WIDE_INT signmask;
370 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
378 if (count >= HOST_BITS_PER_WIDE_INT)
381 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
382 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
386 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
387 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
388 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
389 | ((unsigned HOST_WIDE_INT) h1 >> count));
393 /* Rotate the doubleword integer in L1, H1 left by COUNT places
394 keeping only PREC bits of result.
395 Rotate right if COUNT is negative.
396 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
399 lrotate_double (l1, h1, count, prec, lv, hv)
400 HOST_WIDE_INT l1, h1, count;
402 HOST_WIDE_INT *lv, *hv;
404 HOST_WIDE_INT s1l, s1h, s2l, s2h;
410 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
411 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
416 /* Rotate the doubleword integer in L1, H1 left by COUNT places
417 keeping only PREC bits of result. COUNT must be positive.
418 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
421 rrotate_double (l1, h1, count, prec, lv, hv)
422 HOST_WIDE_INT l1, h1, count;
424 HOST_WIDE_INT *lv, *hv;
426 HOST_WIDE_INT s1l, s1h, s2l, s2h;
432 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
433 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
438 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
439 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
440 CODE is a tree code for a kind of division, one of
441 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
443 It controls how the quotient is rounded to a integer.
444 Return nonzero if the operation overflows.
445 UNS nonzero says do unsigned division. */
448 div_and_round_double (code, uns,
449 lnum_orig, hnum_orig, lden_orig, hden_orig,
450 lquo, hquo, lrem, hrem)
453 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
454 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
455 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
458 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
459 HOST_WIDE_INT den[4], quo[4];
461 unsigned HOST_WIDE_INT work;
462 register unsigned HOST_WIDE_INT carry = 0;
463 HOST_WIDE_INT lnum = lnum_orig;
464 HOST_WIDE_INT hnum = hnum_orig;
465 HOST_WIDE_INT lden = lden_orig;
466 HOST_WIDE_INT hden = hden_orig;
469 if ((hden == 0) && (lden == 0))
472 /* calculate quotient sign and convert operands to unsigned. */
478 /* (minimum integer) / (-1) is the only overflow case. */
479 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
485 neg_double (lden, hden, &lden, &hden);
489 if (hnum == 0 && hden == 0)
490 { /* single precision */
492 /* This unsigned division rounds toward zero. */
493 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
498 { /* trivial case: dividend < divisor */
499 /* hden != 0 already checked. */
506 bzero ((char *) quo, sizeof quo);
508 bzero ((char *) num, sizeof num); /* to zero 9th element */
509 bzero ((char *) den, sizeof den);
511 encode (num, lnum, hnum);
512 encode (den, lden, hden);
514 /* Special code for when the divisor < BASE. */
515 if (hden == 0 && lden < BASE)
517 /* hnum != 0 already checked. */
518 for (i = 4 - 1; i >= 0; i--)
520 work = num[i] + carry * BASE;
521 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
522 carry = work % (unsigned HOST_WIDE_INT) lden;
527 /* Full double precision division,
528 with thanks to Don Knuth's "Seminumerical Algorithms". */
529 int num_hi_sig, den_hi_sig;
530 unsigned HOST_WIDE_INT quo_est, scale;
532 /* Find the highest non-zero divisor digit. */
533 for (i = 4 - 1; ; i--)
539 /* Insure that the first digit of the divisor is at least BASE/2.
540 This is required by the quotient digit estimation algorithm. */
542 scale = BASE / (den[den_hi_sig] + 1);
543 if (scale > 1) { /* scale divisor and dividend */
545 for (i = 0; i <= 4 - 1; i++) {
546 work = (num[i] * scale) + carry;
547 num[i] = LOWPART (work);
548 carry = HIGHPART (work);
551 for (i = 0; i <= 4 - 1; i++) {
552 work = (den[i] * scale) + carry;
553 den[i] = LOWPART (work);
554 carry = HIGHPART (work);
555 if (den[i] != 0) den_hi_sig = i;
562 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
563 /* guess the next quotient digit, quo_est, by dividing the first
564 two remaining dividend digits by the high order quotient digit.
565 quo_est is never low and is at most 2 high. */
566 unsigned HOST_WIDE_INT tmp;
568 num_hi_sig = i + den_hi_sig + 1;
569 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
570 if (num[num_hi_sig] != den[den_hi_sig])
571 quo_est = work / den[den_hi_sig];
575 /* refine quo_est so it's usually correct, and at most one high. */
576 tmp = work - quo_est * den[den_hi_sig];
578 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
581 /* Try QUO_EST as the quotient digit, by multiplying the
582 divisor by QUO_EST and subtracting from the remaining dividend.
583 Keep in mind that QUO_EST is the I - 1st digit. */
586 for (j = 0; j <= den_hi_sig; j++)
588 work = quo_est * den[j] + carry;
589 carry = HIGHPART (work);
590 work = num[i + j] - LOWPART (work);
591 num[i + j] = LOWPART (work);
592 carry += HIGHPART (work) != 0;
595 /* if quo_est was high by one, then num[i] went negative and
596 we need to correct things. */
598 if (num[num_hi_sig] < carry)
601 carry = 0; /* add divisor back in */
602 for (j = 0; j <= den_hi_sig; j++)
604 work = num[i + j] + den[j] + carry;
605 carry = HIGHPART (work);
606 num[i + j] = LOWPART (work);
608 num [num_hi_sig] += carry;
611 /* store the quotient digit. */
616 decode (quo, lquo, hquo);
619 /* if result is negative, make it so. */
621 neg_double (*lquo, *hquo, lquo, hquo);
623 /* compute trial remainder: rem = num - (quo * den) */
624 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
625 neg_double (*lrem, *hrem, lrem, hrem);
626 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
631 case TRUNC_MOD_EXPR: /* round toward zero */
632 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
636 case FLOOR_MOD_EXPR: /* round toward negative infinity */
637 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
640 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
643 else return overflow;
647 case CEIL_MOD_EXPR: /* round toward positive infinity */
648 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
650 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
653 else return overflow;
657 case ROUND_MOD_EXPR: /* round to closest integer */
659 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
660 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
662 /* get absolute values */
663 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
664 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
666 /* if (2 * abs (lrem) >= abs (lden)) */
667 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
668 labs_rem, habs_rem, <wice, &htwice);
669 if (((unsigned HOST_WIDE_INT) habs_den
670 < (unsigned HOST_WIDE_INT) htwice)
671 || (((unsigned HOST_WIDE_INT) habs_den
672 == (unsigned HOST_WIDE_INT) htwice)
673 && ((HOST_WIDE_INT unsigned) labs_den
674 < (unsigned HOST_WIDE_INT) ltwice)))
678 add_double (*lquo, *hquo,
679 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
682 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
685 else return overflow;
693 /* compute true remainder: rem = num - (quo * den) */
694 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
695 neg_double (*lrem, *hrem, lrem, hrem);
696 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
700 #ifndef REAL_ARITHMETIC
701 /* Effectively truncate a real value to represent the nearest possible value
702 in a narrower mode. The result is actually represented in the same data
703 type as the argument, but its value is usually different.
705 A trap may occur during the FP operations and it is the responsibility
706 of the calling function to have a handler established. */
709 real_value_truncate (mode, arg)
710 enum machine_mode mode;
713 return REAL_VALUE_TRUNCATE (mode, arg);
716 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
718 /* Check for infinity in an IEEE double precision number. */
724 /* The IEEE 64-bit double format. */
729 unsigned exponent : 11;
730 unsigned mantissa1 : 20;
735 unsigned mantissa1 : 20;
736 unsigned exponent : 11;
742 if (u.big_endian.sign == 1)
745 return (u.big_endian.exponent == 2047
746 && u.big_endian.mantissa1 == 0
747 && u.big_endian.mantissa2 == 0);
752 return (u.little_endian.exponent == 2047
753 && u.little_endian.mantissa1 == 0
754 && u.little_endian.mantissa2 == 0);
758 /* Check whether an IEEE double precision number is a NaN. */
764 /* The IEEE 64-bit double format. */
769 unsigned exponent : 11;
770 unsigned mantissa1 : 20;
775 unsigned mantissa1 : 20;
776 unsigned exponent : 11;
782 if (u.big_endian.sign == 1)
785 return (u.big_endian.exponent == 2047
786 && (u.big_endian.mantissa1 != 0
787 || u.big_endian.mantissa2 != 0));
792 return (u.little_endian.exponent == 2047
793 && (u.little_endian.mantissa1 != 0
794 || u.little_endian.mantissa2 != 0));
798 /* Check for a negative IEEE double precision number. */
804 /* The IEEE 64-bit double format. */
809 unsigned exponent : 11;
810 unsigned mantissa1 : 20;
815 unsigned mantissa1 : 20;
816 unsigned exponent : 11;
822 if (u.big_endian.sign == 1)
825 return u.big_endian.sign;
830 return u.little_endian.sign;
833 #else /* Target not IEEE */
835 /* Let's assume other float formats don't have infinity.
836 (This can be overridden by redefining REAL_VALUE_ISINF.) */
844 /* Let's assume other float formats don't have NaNs.
845 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
853 /* Let's assume other float formats don't have minus zero.
854 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
861 #endif /* Target not IEEE */
862 #endif /* no REAL_ARITHMETIC */
864 /* Split a tree IN into a constant and a variable part
865 that could be combined with CODE to make IN.
866 CODE must be a commutative arithmetic operation.
867 Store the constant part into *CONP and the variable in &VARP.
868 Return 1 if this was done; zero means the tree IN did not decompose
871 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
872 Therefore, we must tell the caller whether the variable part
873 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
874 The value stored is the coefficient for the variable term.
875 The constant term we return should always be added;
876 we negate it if necessary. */
879 split_tree (in, code, varp, conp, varsignp)
885 register tree outtype = TREE_TYPE (in);
889 /* Strip any conversions that don't change the machine mode. */
890 while ((TREE_CODE (in) == NOP_EXPR
891 || TREE_CODE (in) == CONVERT_EXPR)
892 && (TYPE_MODE (TREE_TYPE (in))
893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
894 in = TREE_OPERAND (in, 0);
896 if (TREE_CODE (in) == code
897 || (! FLOAT_TYPE_P (TREE_TYPE (in))
898 /* We can associate addition and subtraction together
899 (even though the C standard doesn't say so)
900 for integers because the value is not affected.
901 For reals, the value might be affected, so we can't. */
902 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
903 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
905 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
906 if (code == INTEGER_CST)
908 *conp = TREE_OPERAND (in, 0);
909 *varp = TREE_OPERAND (in, 1);
910 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
911 && TREE_TYPE (*varp) != outtype)
912 *varp = convert (outtype, *varp);
913 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
916 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
918 *conp = TREE_OPERAND (in, 1);
919 *varp = TREE_OPERAND (in, 0);
921 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
922 && TREE_TYPE (*varp) != outtype)
923 *varp = convert (outtype, *varp);
924 if (TREE_CODE (in) == MINUS_EXPR)
926 /* If operation is subtraction and constant is second,
927 must negate it to get an additive constant.
928 And this cannot be done unless it is a manifest constant.
929 It could also be the address of a static variable.
930 We cannot negate that, so give up. */
931 if (TREE_CODE (*conp) == INTEGER_CST)
932 /* Subtracting from integer_zero_node loses for long long. */
933 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
939 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
941 *conp = TREE_OPERAND (in, 0);
942 *varp = TREE_OPERAND (in, 1);
943 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
944 && TREE_TYPE (*varp) != outtype)
945 *varp = convert (outtype, *varp);
946 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
953 /* Combine two constants NUM and ARG2 under operation CODE
954 to produce a new constant.
955 We assume ARG1 and ARG2 have the same data type,
956 or at least are the same kind of constant and the same machine mode.
958 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
961 const_binop (code, arg1, arg2, notrunc)
963 register tree arg1, arg2;
966 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
968 if (TREE_CODE (arg1) == INTEGER_CST)
970 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
971 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
972 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
973 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
974 HOST_WIDE_INT low, hi;
975 HOST_WIDE_INT garbagel, garbageh;
977 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
983 t = build_int_2 (int1l | int2l, int1h | int2h);
987 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
991 t = build_int_2 (int1l & int2l, int1h & int2h);
995 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
1001 /* It's unclear from the C standard whether shifts can overflow.
1002 The following code ignores overflow; perhaps a C standard
1003 interpretation ruling is needed. */
1004 lshift_double (int1l, int1h, int2l,
1005 TYPE_PRECISION (TREE_TYPE (arg1)),
1008 t = build_int_2 (low, hi);
1009 TREE_TYPE (t) = TREE_TYPE (arg1);
1011 force_fit_type (t, 0);
1012 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1013 TREE_CONSTANT_OVERFLOW (t)
1014 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1020 lrotate_double (int1l, int1h, int2l,
1021 TYPE_PRECISION (TREE_TYPE (arg1)),
1023 t = build_int_2 (low, hi);
1030 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1033 overflow = int2h < hi;
1035 t = build_int_2 (int2l, int2h);
1041 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1044 overflow = int1h < hi;
1046 t = build_int_2 (int1l, int1h);
1049 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1050 t = build_int_2 (low, hi);
1054 if (int2h == 0 && int2l == 0)
1056 t = build_int_2 (int1l, int1h);
1059 neg_double (int2l, int2h, &low, &hi);
1060 add_double (int1l, int1h, low, hi, &low, &hi);
1061 overflow = overflow_sum_sign (hi, int2h, int1h);
1062 t = build_int_2 (low, hi);
1066 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1067 t = build_int_2 (low, hi);
1070 case TRUNC_DIV_EXPR:
1071 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1072 case EXACT_DIV_EXPR:
1073 /* This is a shortcut for a common special case.
1074 It reduces the number of tree nodes generated
1076 if (int2h == 0 && int2l > 0
1077 && TREE_TYPE (arg1) == sizetype
1078 && int1h == 0 && int1l >= 0)
1080 if (code == CEIL_DIV_EXPR)
1082 return size_int (int1l / int2l);
1084 case ROUND_DIV_EXPR:
1085 if (int2h == 0 && int2l == 1)
1087 t = build_int_2 (int1l, int1h);
1090 if (int1l == int2l && int1h == int2h)
1092 if ((int1l | int1h) == 0)
1094 t = build_int_2 (1, 0);
1097 overflow = div_and_round_double (code, uns,
1098 int1l, int1h, int2l, int2h,
1099 &low, &hi, &garbagel, &garbageh);
1100 t = build_int_2 (low, hi);
1103 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1104 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1105 overflow = div_and_round_double (code, uns,
1106 int1l, int1h, int2l, int2h,
1107 &garbagel, &garbageh, &low, &hi);
1108 t = build_int_2 (low, hi);
1115 low = (((unsigned HOST_WIDE_INT) int1h
1116 < (unsigned HOST_WIDE_INT) int2h)
1117 || (((unsigned HOST_WIDE_INT) int1h
1118 == (unsigned HOST_WIDE_INT) int2h)
1119 && ((unsigned HOST_WIDE_INT) int1l
1120 < (unsigned HOST_WIDE_INT) int2l)));
1124 low = ((int1h < int2h)
1125 || ((int1h == int2h)
1126 && ((unsigned HOST_WIDE_INT) int1l
1127 < (unsigned HOST_WIDE_INT) int2l)));
1129 if (low == (code == MIN_EXPR))
1130 t = build_int_2 (int1l, int1h);
1132 t = build_int_2 (int2l, int2h);
1139 TREE_TYPE (t) = TREE_TYPE (arg1);
1141 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow && !uns))
1142 | TREE_OVERFLOW (arg1)
1143 | TREE_OVERFLOW (arg2));
1144 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1145 | TREE_CONSTANT_OVERFLOW (arg1)
1146 | TREE_CONSTANT_OVERFLOW (arg2));
1149 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1150 if (TREE_CODE (arg1) == REAL_CST)
1155 REAL_VALUE_TYPE value;
1158 d1 = TREE_REAL_CST (arg1);
1159 d2 = TREE_REAL_CST (arg2);
1161 /* If either operand is a NaN, just return it. Otherwise, set up
1162 for floating-point trap; we return an overflow. */
1163 if (REAL_VALUE_ISNAN (d1))
1165 else if (REAL_VALUE_ISNAN (d2))
1167 else if (setjmp (float_error))
1169 t = copy_node (arg1);
1174 set_float_handler (float_error);
1176 #ifdef REAL_ARITHMETIC
1177 REAL_ARITHMETIC (value, code, d1, d2);
1194 #ifndef REAL_INFINITY
1203 value = MIN (d1, d2);
1207 value = MAX (d1, d2);
1213 #endif /* no REAL_ARITHMETIC */
1214 t = build_real (TREE_TYPE (arg1),
1215 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1217 set_float_handler (NULL_PTR);
1220 = (force_fit_type (t, overflow)
1221 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1222 TREE_CONSTANT_OVERFLOW (t)
1224 | TREE_CONSTANT_OVERFLOW (arg1)
1225 | TREE_CONSTANT_OVERFLOW (arg2);
1228 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1229 if (TREE_CODE (arg1) == COMPLEX_CST)
1231 register tree r1 = TREE_REALPART (arg1);
1232 register tree i1 = TREE_IMAGPART (arg1);
1233 register tree r2 = TREE_REALPART (arg2);
1234 register tree i2 = TREE_IMAGPART (arg2);
1240 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1241 const_binop (PLUS_EXPR, i1, i2, notrunc));
1245 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1246 const_binop (MINUS_EXPR, i1, i2, notrunc));
1250 t = build_complex (const_binop (MINUS_EXPR,
1251 const_binop (MULT_EXPR,
1253 const_binop (MULT_EXPR,
1256 const_binop (PLUS_EXPR,
1257 const_binop (MULT_EXPR,
1259 const_binop (MULT_EXPR,
1266 register tree magsquared
1267 = const_binop (PLUS_EXPR,
1268 const_binop (MULT_EXPR, r2, r2, notrunc),
1269 const_binop (MULT_EXPR, i2, i2, notrunc),
1273 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1274 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1275 const_binop (PLUS_EXPR,
1276 const_binop (MULT_EXPR, r1, r2,
1278 const_binop (MULT_EXPR, i1, i2,
1281 magsquared, notrunc),
1282 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1283 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1284 const_binop (MINUS_EXPR,
1285 const_binop (MULT_EXPR, i1, r2,
1287 const_binop (MULT_EXPR, r1, i2,
1290 magsquared, notrunc));
1297 TREE_TYPE (t) = TREE_TYPE (arg1);
1303 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1307 unsigned HOST_WIDE_INT number;
1310 /* Type-size nodes already made for small sizes. */
1311 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1313 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1314 && size_table[number] != 0)
1315 return size_table[number];
1316 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1318 push_obstacks_nochange ();
1319 /* Make this a permanent node. */
1320 end_temporary_allocation ();
1321 t = build_int_2 (number, 0);
1322 TREE_TYPE (t) = sizetype;
1323 size_table[number] = t;
1328 t = build_int_2 (number, 0);
1329 TREE_TYPE (t) = sizetype;
1334 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1335 CODE is a tree code. Data type is taken from `sizetype',
1336 If the operands are constant, so is the result. */
1339 size_binop (code, arg0, arg1)
1340 enum tree_code code;
1343 /* Handle the special case of two integer constants faster. */
1344 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1346 /* And some specific cases even faster than that. */
1347 if (code == PLUS_EXPR
1348 && TREE_INT_CST_LOW (arg0) == 0
1349 && TREE_INT_CST_HIGH (arg0) == 0)
1351 if (code == MINUS_EXPR
1352 && TREE_INT_CST_LOW (arg1) == 0
1353 && TREE_INT_CST_HIGH (arg1) == 0)
1355 if (code == MULT_EXPR
1356 && TREE_INT_CST_LOW (arg0) == 1
1357 && TREE_INT_CST_HIGH (arg0) == 0)
1359 /* Handle general case of two integer constants. */
1360 return const_binop (code, arg0, arg1, 0);
1363 if (arg0 == error_mark_node || arg1 == error_mark_node)
1364 return error_mark_node;
1366 return fold (build (code, sizetype, arg0, arg1));
1369 /* Given T, a tree representing type conversion of ARG1, a constant,
1370 return a constant tree representing the result of conversion. */
1373 fold_convert (t, arg1)
1377 register tree type = TREE_TYPE (t);
1380 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1382 if (TREE_CODE (arg1) == INTEGER_CST)
1384 /* If we would build a constant wider than GCC supports,
1385 leave the conversion unfolded. */
1386 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1389 /* Given an integer constant, make new constant with new type,
1390 appropriately sign-extended or truncated. */
1391 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1392 TREE_INT_CST_HIGH (arg1));
1393 TREE_TYPE (t) = type;
1394 /* Indicate an overflow if (1) ARG1 already overflowed,
1395 or (2) force_fit_type indicates an overflow.
1396 Tell force_fit_type that an overflow has already occurred
1397 if ARG1 is a too-large unsigned value and T is signed. */
1399 = (TREE_OVERFLOW (arg1)
1400 | force_fit_type (t,
1401 (TREE_INT_CST_HIGH (arg1) < 0
1402 & (TREE_UNSIGNED (type)
1403 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1404 TREE_CONSTANT_OVERFLOW (t)
1405 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1407 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1408 else if (TREE_CODE (arg1) == REAL_CST)
1410 /* Don't initialize these, use assignments.
1411 Initialized local aggregates don't work on old compilers. */
1415 tree type1 = TREE_TYPE (arg1);
1417 x = TREE_REAL_CST (arg1);
1418 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1419 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1420 /* See if X will be in range after truncation towards 0.
1421 To compensate for truncation, move the bounds away from 0,
1422 but reject if X exactly equals the adjusted bounds. */
1423 #ifdef REAL_ARITHMETIC
1424 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1425 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1430 /* If X is a NaN, use zero instead and show we have an overflow.
1431 Otherwise, range check. */
1432 if (REAL_VALUE_ISNAN (x))
1433 overflow = 1, x = dconst0;
1434 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1437 #ifndef REAL_ARITHMETIC
1439 HOST_WIDE_INT low, high;
1440 HOST_WIDE_INT half_word
1441 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1446 high = (HOST_WIDE_INT) (x / half_word / half_word);
1447 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1448 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1450 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1451 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1454 low = (HOST_WIDE_INT) x;
1455 if (TREE_REAL_CST (arg1) < 0)
1456 neg_double (low, high, &low, &high);
1457 t = build_int_2 (low, high);
1461 HOST_WIDE_INT low, high;
1462 REAL_VALUE_TO_INT (&low, &high, x);
1463 t = build_int_2 (low, high);
1466 TREE_TYPE (t) = type;
1468 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1469 TREE_CONSTANT_OVERFLOW (t)
1470 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1472 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1473 TREE_TYPE (t) = type;
1475 else if (TREE_CODE (type) == REAL_TYPE)
1477 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1478 if (TREE_CODE (arg1) == INTEGER_CST)
1479 return build_real_from_int_cst (type, arg1);
1480 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1481 if (TREE_CODE (arg1) == REAL_CST)
1483 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1486 TREE_TYPE (arg1) = type;
1489 else if (setjmp (float_error))
1492 t = copy_node (arg1);
1495 set_float_handler (float_error);
1497 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1498 TREE_REAL_CST (arg1)));
1499 set_float_handler (NULL_PTR);
1503 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1504 TREE_CONSTANT_OVERFLOW (t)
1505 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1509 TREE_CONSTANT (t) = 1;
1513 /* Return an expr equal to X but certainly not valid as an lvalue.
1514 Also make sure it is not valid as an null pointer constant. */
1522 /* These things are certainly not lvalues. */
1523 if (TREE_CODE (x) == NON_LVALUE_EXPR
1524 || TREE_CODE (x) == INTEGER_CST
1525 || TREE_CODE (x) == REAL_CST
1526 || TREE_CODE (x) == STRING_CST
1527 || TREE_CODE (x) == ADDR_EXPR)
1529 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1531 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1532 so convert_for_assignment won't strip it.
1533 This is so this 0 won't be treated as a null pointer constant. */
1534 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1535 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1541 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1542 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1546 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1547 Zero means allow extended lvalues. */
1549 int pedantic_lvalues;
1551 /* When pedantic, return an expr equal to X but certainly not valid as a
1552 pedantic lvalue. Otherwise, return X. */
1555 pedantic_non_lvalue (x)
1558 if (pedantic_lvalues)
1559 return non_lvalue (x);
1564 /* Given a tree comparison code, return the code that is the logical inverse
1565 of the given code. It is not safe to do this for floating-point
1566 comparisons, except for NE_EXPR and EQ_EXPR. */
1568 static enum tree_code
1569 invert_tree_comparison (code)
1570 enum tree_code code;
1591 /* Similar, but return the comparison that results if the operands are
1592 swapped. This is safe for floating-point. */
1594 static enum tree_code
1595 swap_tree_comparison (code)
1596 enum tree_code code;
1616 /* Return nonzero if CODE is a tree code that represents a truth value. */
1619 truth_value_p (code)
1620 enum tree_code code;
1622 return (TREE_CODE_CLASS (code) == '<'
1623 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1624 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1625 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1628 /* Return nonzero if two operands are necessarily equal.
1629 If ONLY_CONST is non-zero, only return non-zero for constants.
1630 This function tests whether the operands are indistinguishable;
1631 it does not test whether they are equal using C's == operation.
1632 The distinction is important for IEEE floating point, because
1633 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1634 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1637 operand_equal_p (arg0, arg1, only_const)
1641 /* If both types don't have the same signedness, then we can't consider
1642 them equal. We must check this before the STRIP_NOPS calls
1643 because they may change the signedness of the arguments. */
1644 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1650 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1651 We don't care about side effects in that case because the SAVE_EXPR
1652 takes care of that for us. */
1653 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1654 return ! only_const;
1656 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1659 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1660 && TREE_CODE (arg0) == ADDR_EXPR
1661 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1664 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1665 && TREE_CODE (arg0) == INTEGER_CST
1666 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1667 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1670 /* Detect when real constants are equal. */
1671 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1672 && TREE_CODE (arg0) == REAL_CST)
1673 return !bcmp ((char *) &TREE_REAL_CST (arg0),
1674 (char *) &TREE_REAL_CST (arg1),
1675 sizeof (REAL_VALUE_TYPE));
1683 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1685 /* This is needed for conversions and for COMPONENT_REF.
1686 Might as well play it safe and always test this. */
1687 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1690 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1693 /* Two conversions are equal only if signedness and modes match. */
1694 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1695 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1696 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1699 return operand_equal_p (TREE_OPERAND (arg0, 0),
1700 TREE_OPERAND (arg1, 0), 0);
1704 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1705 TREE_OPERAND (arg1, 0), 0)
1706 && operand_equal_p (TREE_OPERAND (arg0, 1),
1707 TREE_OPERAND (arg1, 1), 0));
1710 switch (TREE_CODE (arg0))
1713 return operand_equal_p (TREE_OPERAND (arg0, 0),
1714 TREE_OPERAND (arg1, 0), 0);
1718 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1719 TREE_OPERAND (arg1, 0), 0)
1720 && operand_equal_p (TREE_OPERAND (arg0, 1),
1721 TREE_OPERAND (arg1, 1), 0));
1724 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1725 TREE_OPERAND (arg1, 0), 0)
1726 && operand_equal_p (TREE_OPERAND (arg0, 1),
1727 TREE_OPERAND (arg1, 1), 0)
1728 && operand_equal_p (TREE_OPERAND (arg0, 2),
1729 TREE_OPERAND (arg1, 2), 0));
1737 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1738 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1740 When in doubt, return 0. */
1743 operand_equal_for_comparison_p (arg0, arg1, other)
1747 int unsignedp1, unsignedpo;
1748 tree primarg1, primother;
1749 unsigned correct_width;
1751 if (operand_equal_p (arg0, arg1, 0))
1754 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1755 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1758 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1759 actual comparison operand, ARG0.
1761 First throw away any conversions to wider types
1762 already present in the operands. */
1764 primarg1 = get_narrower (arg1, &unsignedp1);
1765 primother = get_narrower (other, &unsignedpo);
1767 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1768 if (unsignedp1 == unsignedpo
1769 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1770 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1772 tree type = TREE_TYPE (arg0);
1774 /* Make sure shorter operand is extended the right way
1775 to match the longer operand. */
1776 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1777 TREE_TYPE (primarg1)),
1780 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1787 /* See if ARG is an expression that is either a comparison or is performing
1788 arithmetic on comparisons. The comparisons must only be comparing
1789 two different values, which will be stored in *CVAL1 and *CVAL2; if
1790 they are non-zero it means that some operands have already been found.
1791 No variables may be used anywhere else in the expression except in the
1792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1795 If this is true, return 1. Otherwise, return zero. */
1798 twoval_comparison_p (arg, cval1, cval2, save_p)
1800 tree *cval1, *cval2;
1803 enum tree_code code = TREE_CODE (arg);
1804 char class = TREE_CODE_CLASS (code);
1806 /* We can handle some of the 'e' cases here. */
1807 if (class == 'e' && code == TRUTH_NOT_EXPR)
1809 else if (class == 'e'
1810 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1811 || code == COMPOUND_EXPR))
1814 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1815 the expression. There may be no way to make this work, but it needs
1816 to be looked at again for 2.6. */
1818 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1820 /* If we've already found a CVAL1 or CVAL2, this expression is
1821 two complex to handle. */
1822 if (*cval1 || *cval2)
1833 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1836 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1838 cval1, cval2, save_p));
1844 if (code == COND_EXPR)
1845 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1846 cval1, cval2, save_p)
1847 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1848 cval1, cval2, save_p)
1849 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1850 cval1, cval2, save_p));
1854 /* First see if we can handle the first operand, then the second. For
1855 the second operand, we know *CVAL1 can't be zero. It must be that
1856 one side of the comparison is each of the values; test for the
1857 case where this isn't true by failing if the two operands
1860 if (operand_equal_p (TREE_OPERAND (arg, 0),
1861 TREE_OPERAND (arg, 1), 0))
1865 *cval1 = TREE_OPERAND (arg, 0);
1866 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1868 else if (*cval2 == 0)
1869 *cval2 = TREE_OPERAND (arg, 0);
1870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1875 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1877 else if (*cval2 == 0)
1878 *cval2 = TREE_OPERAND (arg, 1);
1879 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1890 /* ARG is a tree that is known to contain just arithmetic operations and
1891 comparisons. Evaluate the operations in the tree substituting NEW0 for
1892 any occurrence of OLD0 as an operand of a comparison and likewise for
1896 eval_subst (arg, old0, new0, old1, new1)
1898 tree old0, new0, old1, new1;
1900 tree type = TREE_TYPE (arg);
1901 enum tree_code code = TREE_CODE (arg);
1902 char class = TREE_CODE_CLASS (code);
1904 /* We can handle some of the 'e' cases here. */
1905 if (class == 'e' && code == TRUTH_NOT_EXPR)
1907 else if (class == 'e'
1908 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1914 return fold (build1 (code, type,
1915 eval_subst (TREE_OPERAND (arg, 0),
1916 old0, new0, old1, new1)));
1919 return fold (build (code, type,
1920 eval_subst (TREE_OPERAND (arg, 0),
1921 old0, new0, old1, new1),
1922 eval_subst (TREE_OPERAND (arg, 1),
1923 old0, new0, old1, new1)));
1929 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1932 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1935 return fold (build (code, type,
1936 eval_subst (TREE_OPERAND (arg, 0),
1937 old0, new0, old1, new1),
1938 eval_subst (TREE_OPERAND (arg, 1),
1939 old0, new0, old1, new1),
1940 eval_subst (TREE_OPERAND (arg, 2),
1941 old0, new0, old1, new1)));
1946 tree arg0 = TREE_OPERAND (arg, 0);
1947 tree arg1 = TREE_OPERAND (arg, 1);
1949 /* We need to check both for exact equality and tree equality. The
1950 former will be true if the operand has a side-effect. In that
1951 case, we know the operand occurred exactly once. */
1953 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1955 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1958 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1960 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1963 return fold (build (code, type, arg0, arg1));
1970 /* Return a tree for the case when the result of an expression is RESULT
1971 converted to TYPE and OMITTED was previously an operand of the expression
1972 but is now not needed (e.g., we folded OMITTED * 0).
1974 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1975 the conversion of RESULT to TYPE. */
1978 omit_one_operand (type, result, omitted)
1979 tree type, result, omitted;
1981 tree t = convert (type, result);
1983 if (TREE_SIDE_EFFECTS (omitted))
1984 return build (COMPOUND_EXPR, type, omitted, t);
1986 return non_lvalue (t);
1989 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
1992 pedantic_omit_one_operand (type, result, omitted)
1993 tree type, result, omitted;
1995 tree t = convert (type, result);
1997 if (TREE_SIDE_EFFECTS (omitted))
1998 return build (COMPOUND_EXPR, type, omitted, t);
2000 return pedantic_non_lvalue (t);
2005 /* Return a simplified tree node for the truth-negation of ARG. This
2006 never alters ARG itself. We assume that ARG is an operation that
2007 returns a truth value (0 or 1). */
2010 invert_truthvalue (arg)
2013 tree type = TREE_TYPE (arg);
2014 enum tree_code code = TREE_CODE (arg);
2016 if (code == ERROR_MARK)
2019 /* If this is a comparison, we can simply invert it, except for
2020 floating-point non-equality comparisons, in which case we just
2021 enclose a TRUTH_NOT_EXPR around what we have. */
2023 if (TREE_CODE_CLASS (code) == '<')
2025 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2026 && code != NE_EXPR && code != EQ_EXPR)
2027 return build1 (TRUTH_NOT_EXPR, type, arg);
2029 return build (invert_tree_comparison (code), type,
2030 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2036 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2037 && TREE_INT_CST_HIGH (arg) == 0, 0));
2039 case TRUTH_AND_EXPR:
2040 return build (TRUTH_OR_EXPR, type,
2041 invert_truthvalue (TREE_OPERAND (arg, 0)),
2042 invert_truthvalue (TREE_OPERAND (arg, 1)));
2045 return build (TRUTH_AND_EXPR, type,
2046 invert_truthvalue (TREE_OPERAND (arg, 0)),
2047 invert_truthvalue (TREE_OPERAND (arg, 1)));
2049 case TRUTH_XOR_EXPR:
2050 /* Here we can invert either operand. We invert the first operand
2051 unless the second operand is a TRUTH_NOT_EXPR in which case our
2052 result is the XOR of the first operand with the inside of the
2053 negation of the second operand. */
2055 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2056 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2057 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2059 return build (TRUTH_XOR_EXPR, type,
2060 invert_truthvalue (TREE_OPERAND (arg, 0)),
2061 TREE_OPERAND (arg, 1));
2063 case TRUTH_ANDIF_EXPR:
2064 return build (TRUTH_ORIF_EXPR, type,
2065 invert_truthvalue (TREE_OPERAND (arg, 0)),
2066 invert_truthvalue (TREE_OPERAND (arg, 1)));
2068 case TRUTH_ORIF_EXPR:
2069 return build (TRUTH_ANDIF_EXPR, type,
2070 invert_truthvalue (TREE_OPERAND (arg, 0)),
2071 invert_truthvalue (TREE_OPERAND (arg, 1)));
2073 case TRUTH_NOT_EXPR:
2074 return TREE_OPERAND (arg, 0);
2077 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2078 invert_truthvalue (TREE_OPERAND (arg, 1)),
2079 invert_truthvalue (TREE_OPERAND (arg, 2)));
2082 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2083 invert_truthvalue (TREE_OPERAND (arg, 1)));
2085 case NON_LVALUE_EXPR:
2086 return invert_truthvalue (TREE_OPERAND (arg, 0));
2091 return build1 (TREE_CODE (arg), type,
2092 invert_truthvalue (TREE_OPERAND (arg, 0)));
2095 if (!integer_onep (TREE_OPERAND (arg, 1)))
2097 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2100 return build1 (TRUTH_NOT_EXPR, type, arg);
2102 case CLEANUP_POINT_EXPR:
2103 return build1 (CLEANUP_POINT_EXPR, type,
2104 invert_truthvalue (TREE_OPERAND (arg, 0)));
2106 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2108 return build1 (TRUTH_NOT_EXPR, type, arg);
2111 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2112 operands are another bit-wise operation with a common input. If so,
2113 distribute the bit operations to save an operation and possibly two if
2114 constants are involved. For example, convert
2115 (A | B) & (A | C) into A | (B & C)
2116 Further simplification will occur if B and C are constants.
2118 If this optimization cannot be done, 0 will be returned. */
2121 distribute_bit_expr (code, type, arg0, arg1)
2122 enum tree_code code;
2129 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2130 || TREE_CODE (arg0) == code
2131 || (TREE_CODE (arg0) != BIT_AND_EXPR
2132 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2135 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2137 common = TREE_OPERAND (arg0, 0);
2138 left = TREE_OPERAND (arg0, 1);
2139 right = TREE_OPERAND (arg1, 1);
2141 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2143 common = TREE_OPERAND (arg0, 0);
2144 left = TREE_OPERAND (arg0, 1);
2145 right = TREE_OPERAND (arg1, 0);
2147 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2149 common = TREE_OPERAND (arg0, 1);
2150 left = TREE_OPERAND (arg0, 0);
2151 right = TREE_OPERAND (arg1, 1);
2153 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2155 common = TREE_OPERAND (arg0, 1);
2156 left = TREE_OPERAND (arg0, 0);
2157 right = TREE_OPERAND (arg1, 0);
2162 return fold (build (TREE_CODE (arg0), type, common,
2163 fold (build (code, type, left, right))));
2166 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2167 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2170 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2173 int bitsize, bitpos;
2176 tree result = build (BIT_FIELD_REF, type, inner,
2177 size_int (bitsize), size_int (bitpos));
2179 TREE_UNSIGNED (result) = unsignedp;
2184 /* Optimize a bit-field compare.
2186 There are two cases: First is a compare against a constant and the
2187 second is a comparison of two items where the fields are at the same
2188 bit position relative to the start of a chunk (byte, halfword, word)
2189 large enough to contain it. In these cases we can avoid the shift
2190 implicit in bitfield extractions.
2192 For constants, we emit a compare of the shifted constant with the
2193 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2194 compared. For two fields at the same position, we do the ANDs with the
2195 similar mask and compare the result of the ANDs.
2197 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2198 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2199 are the left and right operands of the comparison, respectively.
2201 If the optimization described above can be done, we return the resulting
2202 tree. Otherwise we return zero. */
2205 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2206 enum tree_code code;
2210 int lbitpos, lbitsize, rbitpos, rbitsize;
2211 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2212 tree type = TREE_TYPE (lhs);
2213 tree signed_type, unsigned_type;
2214 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2215 enum machine_mode lmode, rmode, lnmode, rnmode;
2216 int lunsignedp, runsignedp;
2217 int lvolatilep = 0, rvolatilep = 0;
2218 tree linner, rinner;
2222 /* Get all the information about the extractions being done. If the bit size
2223 if the same as the size of the underlying object, we aren't doing an
2224 extraction at all and so can do nothing. */
2225 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2226 &lunsignedp, &lvolatilep);
2227 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2233 /* If this is not a constant, we can only do something if bit positions,
2234 sizes, and signedness are the same. */
2235 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2236 &rmode, &runsignedp, &rvolatilep);
2238 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2239 || lunsignedp != runsignedp || offset != 0)
2243 /* See if we can find a mode to refer to this field. We should be able to,
2244 but fail if we can't. */
2245 lnmode = get_best_mode (lbitsize, lbitpos,
2246 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2248 if (lnmode == VOIDmode)
2251 /* Set signed and unsigned types of the precision of this mode for the
2253 signed_type = type_for_mode (lnmode, 0);
2254 unsigned_type = type_for_mode (lnmode, 1);
2258 rnmode = get_best_mode (rbitsize, rbitpos,
2259 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2261 if (rnmode == VOIDmode)
2265 /* Compute the bit position and size for the new reference and our offset
2266 within it. If the new reference is the same size as the original, we
2267 won't optimize anything, so return zero. */
2268 lnbitsize = GET_MODE_BITSIZE (lnmode);
2269 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2270 lbitpos -= lnbitpos;
2271 if (lnbitsize == lbitsize)
2276 rnbitsize = GET_MODE_BITSIZE (rnmode);
2277 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2278 rbitpos -= rnbitpos;
2279 if (rnbitsize == rbitsize)
2283 if (BYTES_BIG_ENDIAN)
2284 lbitpos = lnbitsize - lbitsize - lbitpos;
2286 /* Make the mask to be used against the extracted field. */
2287 mask = build_int_2 (~0, ~0);
2288 TREE_TYPE (mask) = unsigned_type;
2289 force_fit_type (mask, 0);
2290 mask = convert (unsigned_type, mask);
2291 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2292 mask = const_binop (RSHIFT_EXPR, mask,
2293 size_int (lnbitsize - lbitsize - lbitpos), 0);
2296 /* If not comparing with constant, just rework the comparison
2298 return build (code, compare_type,
2299 build (BIT_AND_EXPR, unsigned_type,
2300 make_bit_field_ref (linner, unsigned_type,
2301 lnbitsize, lnbitpos, 1),
2303 build (BIT_AND_EXPR, unsigned_type,
2304 make_bit_field_ref (rinner, unsigned_type,
2305 rnbitsize, rnbitpos, 1),
2308 /* Otherwise, we are handling the constant case. See if the constant is too
2309 big for the field. Warn and return a tree of for 0 (false) if so. We do
2310 this not only for its own sake, but to avoid having to test for this
2311 error case below. If we didn't, we might generate wrong code.
2313 For unsigned fields, the constant shifted right by the field length should
2314 be all zero. For signed fields, the high-order bits should agree with
2319 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2320 convert (unsigned_type, rhs),
2321 size_int (lbitsize), 0)))
2323 warning ("comparison is always %s due to width of bitfield",
2324 code == NE_EXPR ? "one" : "zero");
2325 return convert (compare_type,
2327 ? integer_one_node : integer_zero_node));
2332 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2333 size_int (lbitsize - 1), 0);
2334 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2336 warning ("comparison is always %s due to width of bitfield",
2337 code == NE_EXPR ? "one" : "zero");
2338 return convert (compare_type,
2340 ? integer_one_node : integer_zero_node));
2344 /* Single-bit compares should always be against zero. */
2345 if (lbitsize == 1 && ! integer_zerop (rhs))
2347 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2348 rhs = convert (type, integer_zero_node);
2351 /* Make a new bitfield reference, shift the constant over the
2352 appropriate number of bits and mask it with the computed mask
2353 (in case this was a signed field). If we changed it, make a new one. */
2354 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2357 TREE_SIDE_EFFECTS (lhs) = 1;
2358 TREE_THIS_VOLATILE (lhs) = 1;
2361 rhs = fold (const_binop (BIT_AND_EXPR,
2362 const_binop (LSHIFT_EXPR,
2363 convert (unsigned_type, rhs),
2364 size_int (lbitpos), 0),
2367 return build (code, compare_type,
2368 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2372 /* Subroutine for fold_truthop: decode a field reference.
2374 If EXP is a comparison reference, we return the innermost reference.
2376 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2377 set to the starting bit number.
2379 If the innermost field can be completely contained in a mode-sized
2380 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2382 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2383 otherwise it is not changed.
2385 *PUNSIGNEDP is set to the signedness of the field.
2387 *PMASK is set to the mask used. This is either contained in a
2388 BIT_AND_EXPR or derived from the width of the field.
2390 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2392 Return 0 if this is not a component reference or is one that we can't
2393 do anything with. */
2396 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2397 pvolatilep, pmask, pand_mask)
2399 int *pbitsize, *pbitpos;
2400 enum machine_mode *pmode;
2401 int *punsignedp, *pvolatilep;
2406 tree mask, inner, offset;
2410 /* All the optimizations using this function assume integer fields.
2411 There are problems with FP fields since the type_for_size call
2412 below can fail for, e.g., XFmode. */
2413 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2418 if (TREE_CODE (exp) == BIT_AND_EXPR)
2420 and_mask = TREE_OPERAND (exp, 1);
2421 exp = TREE_OPERAND (exp, 0);
2422 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2423 if (TREE_CODE (and_mask) != INTEGER_CST)
2428 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2429 punsignedp, pvolatilep);
2430 if ((inner == exp && and_mask == 0)
2431 || *pbitsize < 0 || offset != 0)
2434 /* Compute the mask to access the bitfield. */
2435 unsigned_type = type_for_size (*pbitsize, 1);
2436 precision = TYPE_PRECISION (unsigned_type);
2438 mask = build_int_2 (~0, ~0);
2439 TREE_TYPE (mask) = unsigned_type;
2440 force_fit_type (mask, 0);
2441 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2442 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2444 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2446 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2447 convert (unsigned_type, and_mask), mask));
2450 *pand_mask = and_mask;
2454 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2458 all_ones_mask_p (mask, size)
2462 tree type = TREE_TYPE (mask);
2463 int precision = TYPE_PRECISION (type);
2466 tmask = build_int_2 (~0, ~0);
2467 TREE_TYPE (tmask) = signed_type (type);
2468 force_fit_type (tmask, 0);
2470 tree_int_cst_equal (mask,
2471 const_binop (RSHIFT_EXPR,
2472 const_binop (LSHIFT_EXPR, tmask,
2473 size_int (precision - size),
2475 size_int (precision - size), 0));
2478 /* Subroutine for fold_truthop: determine if an operand is simple enough
2479 to be evaluated unconditionally. */
2482 simple_operand_p (exp)
2485 /* Strip any conversions that don't change the machine mode. */
2486 while ((TREE_CODE (exp) == NOP_EXPR
2487 || TREE_CODE (exp) == CONVERT_EXPR)
2488 && (TYPE_MODE (TREE_TYPE (exp))
2489 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2490 exp = TREE_OPERAND (exp, 0);
2492 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2493 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2494 && ! TREE_ADDRESSABLE (exp)
2495 && ! TREE_THIS_VOLATILE (exp)
2496 && ! DECL_NONLOCAL (exp)
2497 /* Don't regard global variables as simple. They may be
2498 allocated in ways unknown to the compiler (shared memory,
2499 #pragma weak, etc). */
2500 && ! TREE_PUBLIC (exp)
2501 && ! DECL_EXTERNAL (exp)
2502 /* Loading a static variable is unduly expensive, but global
2503 registers aren't expensive. */
2504 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2507 /* Subroutine for fold_truthop: try to optimize a range test.
2509 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2511 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2512 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2513 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2516 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2517 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2518 larger than HI_CST (they may be equal).
2520 We return the simplified tree or 0 if no optimization is possible. */
2523 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2524 enum tree_code jcode, lo_code, hi_code;
2525 tree type, var, lo_cst, hi_cst;
2528 enum tree_code rcode;
2530 /* See if this is a range test and normalize the constant terms. */
2532 if (jcode == TRUTH_AND_EXPR)
2537 /* See if we have VAR != CST && VAR != CST+1. */
2538 if (! (hi_code == NE_EXPR
2539 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2540 && tree_int_cst_equal (integer_one_node,
2541 const_binop (MINUS_EXPR,
2542 hi_cst, lo_cst, 0))))
2550 if (hi_code == LT_EXPR)
2551 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2552 else if (hi_code != LE_EXPR)
2555 if (lo_code == GT_EXPR)
2556 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2558 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2571 /* See if we have VAR == CST || VAR == CST+1. */
2572 if (! (hi_code == EQ_EXPR
2573 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2574 && tree_int_cst_equal (integer_one_node,
2575 const_binop (MINUS_EXPR,
2576 hi_cst, lo_cst, 0))))
2584 if (hi_code == GE_EXPR)
2585 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2586 else if (hi_code != GT_EXPR)
2589 if (lo_code == LE_EXPR)
2590 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2592 /* We now have VAR < LO_CST || VAR > HI_CST. */
2601 /* When normalizing, it is possible to both increment the smaller constant
2602 and decrement the larger constant. See if they are still ordered. */
2603 if (tree_int_cst_lt (hi_cst, lo_cst))
2606 /* Fail if VAR isn't an integer. */
2607 utype = TREE_TYPE (var);
2608 if (! INTEGRAL_TYPE_P (utype))
2611 /* The range test is invalid if subtracting the two constants results
2612 in overflow. This can happen in traditional mode. */
2613 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2614 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2617 if (! TREE_UNSIGNED (utype))
2619 utype = unsigned_type (utype);
2620 return fold (convert (type,
2621 build (rcode, utype,
2623 build (MINUS_EXPR, TREE_TYPE (var),
2626 const_binop (MINUS_EXPR, hi_cst,
2630 return fold (convert (type,
2631 build (rcode, utype,
2632 build (MINUS_EXPR, utype, var, lo_cst),
2633 const_binop (MINUS_EXPR, hi_cst,
2637 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
2638 bit value. Arrange things so the extra bits will be set to zero if and
2639 only if C is signed-extended to its full width. If MASK is nonzero,
2640 it is an INTEGER_CST that should be AND'ed with the extra bits. */
2643 unextend (c, p, unsignedp, mask)
2649 tree type = TREE_TYPE (c);
2650 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
2653 if (p == modesize || unsignedp)
2656 if (TREE_UNSIGNED (type))
2657 c = convert (signed_type (type), c);
2659 /* We work by getting just the sign bit into the low-order bit, then
2660 into the high-order bit, then sign-extend. We then XOR that value
2662 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
2663 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
2664 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
2665 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
2667 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
2669 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
2672 /* Find ways of folding logical expressions of LHS and RHS:
2673 Try to merge two comparisons to the same innermost item.
2674 Look for range tests like "ch >= '0' && ch <= '9'".
2675 Look for combinations of simple terms on machines with expensive branches
2676 and evaluate the RHS unconditionally.
2678 For example, if we have p->a == 2 && p->b == 4 and we can make an
2679 object large enough to span both A and B, we can do this with a comparison
2680 against the object ANDed with the a mask.
2682 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2683 operations to do this with one comparison.
2685 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2686 function and the one above.
2688 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2689 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2691 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2694 We return the simplified tree or 0 if no optimization is possible. */
2697 fold_truthop (code, truth_type, lhs, rhs)
2698 enum tree_code code;
2699 tree truth_type, lhs, rhs;
2701 /* If this is the "or" of two comparisons, we can do something if we
2702 the comparisons are NE_EXPR. If this is the "and", we can do something
2703 if the comparisons are EQ_EXPR. I.e.,
2704 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2706 WANTED_CODE is this operation code. For single bit fields, we can
2707 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2708 comparison for one-bit fields. */
2710 enum tree_code wanted_code;
2711 enum tree_code lcode, rcode;
2712 tree ll_arg, lr_arg, rl_arg, rr_arg;
2713 tree ll_inner, lr_inner, rl_inner, rr_inner;
2714 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2715 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2716 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2717 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2718 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2719 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2720 enum machine_mode lnmode, rnmode;
2721 tree ll_mask, lr_mask, rl_mask, rr_mask;
2722 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
2723 tree l_const, r_const;
2725 int first_bit, end_bit;
2728 /* Start by getting the comparison codes and seeing if this looks like
2729 a range test. Fail if anything is volatile. If one operand is a
2730 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2733 if (TREE_SIDE_EFFECTS (lhs)
2734 || TREE_SIDE_EFFECTS (rhs))
2737 lcode = TREE_CODE (lhs);
2738 rcode = TREE_CODE (rhs);
2740 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2741 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2743 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2744 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2746 if (TREE_CODE_CLASS (lcode) != '<'
2747 || TREE_CODE_CLASS (rcode) != '<')
2750 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2751 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2753 ll_arg = TREE_OPERAND (lhs, 0);
2754 lr_arg = TREE_OPERAND (lhs, 1);
2755 rl_arg = TREE_OPERAND (rhs, 0);
2756 rr_arg = TREE_OPERAND (rhs, 1);
2758 if (TREE_CODE (lr_arg) == INTEGER_CST
2759 && TREE_CODE (rr_arg) == INTEGER_CST
2760 && operand_equal_p (ll_arg, rl_arg, 0))
2762 if (tree_int_cst_lt (lr_arg, rr_arg))
2763 result = range_test (code, truth_type, lcode, rcode,
2764 ll_arg, lr_arg, rr_arg);
2766 result = range_test (code, truth_type, rcode, lcode,
2767 ll_arg, rr_arg, lr_arg);
2769 /* If this isn't a range test, it also isn't a comparison that
2770 can be merged. However, it wins to evaluate the RHS unconditionally
2771 on machines with expensive branches. */
2773 if (result == 0 && BRANCH_COST >= 2)
2775 if (TREE_CODE (ll_arg) != VAR_DECL
2776 && TREE_CODE (ll_arg) != PARM_DECL)
2778 /* Avoid evaluating the variable part twice. */
2779 ll_arg = save_expr (ll_arg);
2780 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2781 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2783 return build (code, truth_type, lhs, rhs);
2788 /* If the RHS can be evaluated unconditionally and its operands are
2789 simple, it wins to evaluate the RHS unconditionally on machines
2790 with expensive branches. In this case, this isn't a comparison
2791 that can be merged. */
2793 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2794 are with zero (tmw). */
2796 if (BRANCH_COST >= 2
2797 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2798 && simple_operand_p (rl_arg)
2799 && simple_operand_p (rr_arg))
2800 return build (code, truth_type, lhs, rhs);
2802 /* See if the comparisons can be merged. Then get all the parameters for
2805 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2806 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2810 ll_inner = decode_field_reference (ll_arg,
2811 &ll_bitsize, &ll_bitpos, &ll_mode,
2812 &ll_unsignedp, &volatilep, &ll_mask,
2814 lr_inner = decode_field_reference (lr_arg,
2815 &lr_bitsize, &lr_bitpos, &lr_mode,
2816 &lr_unsignedp, &volatilep, &lr_mask,
2818 rl_inner = decode_field_reference (rl_arg,
2819 &rl_bitsize, &rl_bitpos, &rl_mode,
2820 &rl_unsignedp, &volatilep, &rl_mask,
2822 rr_inner = decode_field_reference (rr_arg,
2823 &rr_bitsize, &rr_bitpos, &rr_mode,
2824 &rr_unsignedp, &volatilep, &rr_mask,
2827 /* It must be true that the inner operation on the lhs of each
2828 comparison must be the same if we are to be able to do anything.
2829 Then see if we have constants. If not, the same must be true for
2831 if (volatilep || ll_inner == 0 || rl_inner == 0
2832 || ! operand_equal_p (ll_inner, rl_inner, 0))
2835 if (TREE_CODE (lr_arg) == INTEGER_CST
2836 && TREE_CODE (rr_arg) == INTEGER_CST)
2837 l_const = lr_arg, r_const = rr_arg;
2838 else if (lr_inner == 0 || rr_inner == 0
2839 || ! operand_equal_p (lr_inner, rr_inner, 0))
2842 l_const = r_const = 0;
2844 /* If either comparison code is not correct for our logical operation,
2845 fail. However, we can convert a one-bit comparison against zero into
2846 the opposite comparison against that bit being set in the field. */
2848 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2849 if (lcode != wanted_code)
2851 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2857 if (rcode != wanted_code)
2859 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2865 /* See if we can find a mode that contains both fields being compared on
2866 the left. If we can't, fail. Otherwise, update all constants and masks
2867 to be relative to a field of that size. */
2868 first_bit = MIN (ll_bitpos, rl_bitpos);
2869 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2870 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2871 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2873 if (lnmode == VOIDmode)
2876 lnbitsize = GET_MODE_BITSIZE (lnmode);
2877 lnbitpos = first_bit & ~ (lnbitsize - 1);
2878 type = type_for_size (lnbitsize, 1);
2879 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2881 if (BYTES_BIG_ENDIAN)
2883 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2884 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2887 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2888 size_int (xll_bitpos), 0);
2889 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2890 size_int (xrl_bitpos), 0);
2894 l_const = convert (type, l_const);
2895 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
2896 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
2897 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
2898 fold (build1 (BIT_NOT_EXPR,
2902 warning ("comparison is always %s",
2903 wanted_code == NE_EXPR ? "one" : "zero");
2905 return convert (truth_type,
2906 wanted_code == NE_EXPR
2907 ? integer_one_node : integer_zero_node);
2912 r_const = convert (type, r_const);
2913 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
2914 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
2915 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
2916 fold (build1 (BIT_NOT_EXPR,
2920 warning ("comparison is always %s",
2921 wanted_code == NE_EXPR ? "one" : "zero");
2923 return convert (truth_type,
2924 wanted_code == NE_EXPR
2925 ? integer_one_node : integer_zero_node);
2929 /* If the right sides are not constant, do the same for it. Also,
2930 disallow this optimization if a size or signedness mismatch occurs
2931 between the left and right sides. */
2934 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2935 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2936 /* Make sure the two fields on the right
2937 correspond to the left without being swapped. */
2938 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2941 first_bit = MIN (lr_bitpos, rr_bitpos);
2942 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2943 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2944 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2946 if (rnmode == VOIDmode)
2949 rnbitsize = GET_MODE_BITSIZE (rnmode);
2950 rnbitpos = first_bit & ~ (rnbitsize - 1);
2951 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2953 if (BYTES_BIG_ENDIAN)
2955 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2956 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2959 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2960 size_int (xlr_bitpos), 0);
2961 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2962 size_int (xrr_bitpos), 0);
2964 /* Make a mask that corresponds to both fields being compared.
2965 Do this for both items being compared. If the masks agree,
2966 we can do this by masking both and comparing the masked
2968 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2969 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
2970 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2972 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2973 ll_unsignedp || rl_unsignedp);
2974 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2975 lr_unsignedp || rr_unsignedp);
2976 if (! all_ones_mask_p (ll_mask, lnbitsize))
2978 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2979 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2981 return build (wanted_code, truth_type, lhs, rhs);
2984 /* There is still another way we can do something: If both pairs of
2985 fields being compared are adjacent, we may be able to make a wider
2986 field containing them both. */
2987 if ((ll_bitsize + ll_bitpos == rl_bitpos
2988 && lr_bitsize + lr_bitpos == rr_bitpos)
2989 || (ll_bitpos == rl_bitpos + rl_bitsize
2990 && lr_bitpos == rr_bitpos + rr_bitsize))
2991 return build (wanted_code, truth_type,
2992 make_bit_field_ref (ll_inner, type,
2993 ll_bitsize + rl_bitsize,
2994 MIN (ll_bitpos, rl_bitpos),
2996 make_bit_field_ref (lr_inner, type,
2997 lr_bitsize + rr_bitsize,
2998 MIN (lr_bitpos, rr_bitpos),
3004 /* Handle the case of comparisons with constants. If there is something in
3005 common between the masks, those bits of the constants must be the same.
3006 If not, the condition is always false. Test for this to avoid generating
3007 incorrect code below. */
3008 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3009 if (! integer_zerop (result)
3010 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3011 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3013 if (wanted_code == NE_EXPR)
3015 warning ("`or' of unmatched not-equal tests is always 1");
3016 return convert (truth_type, integer_one_node);
3020 warning ("`and' of mutually exclusive equal-tests is always zero");
3021 return convert (truth_type, integer_zero_node);
3025 /* Construct the expression we will return. First get the component
3026 reference we will make. Unless the mask is all ones the width of
3027 that field, perform the mask operation. Then compare with the
3029 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3030 ll_unsignedp || rl_unsignedp);
3032 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3033 if (! all_ones_mask_p (ll_mask, lnbitsize))
3034 result = build (BIT_AND_EXPR, type, result, ll_mask);
3036 return build (wanted_code, truth_type, result,
3037 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3040 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3041 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3042 that we may sometimes modify the tree. */
3045 strip_compound_expr (t, s)
3049 tree type = TREE_TYPE (t);
3050 enum tree_code code = TREE_CODE (t);
3052 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3053 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3054 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3055 return TREE_OPERAND (t, 1);
3057 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3058 don't bother handling any other types. */
3059 else if (code == COND_EXPR)
3061 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3062 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3063 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3065 else if (TREE_CODE_CLASS (code) == '1')
3066 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3067 else if (TREE_CODE_CLASS (code) == '<'
3068 || TREE_CODE_CLASS (code) == '2')
3070 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3071 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3077 /* Perform constant folding and related simplification of EXPR.
3078 The related simplifications include x*1 => x, x*0 => 0, etc.,
3079 and application of the associative law.
3080 NOP_EXPR conversions may be removed freely (as long as we
3081 are careful not to change the C type of the overall expression)
3082 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3083 but we can constant-fold them if they have constant operands. */
3089 register tree t = expr;
3090 tree t1 = NULL_TREE;
3092 tree type = TREE_TYPE (expr);
3093 register tree arg0, arg1;
3094 register enum tree_code code = TREE_CODE (t);
3098 /* WINS will be nonzero when the switch is done
3099 if all operands are constant. */
3103 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3104 Likewise for a SAVE_EXPR that's already been evaluated. */
3105 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3108 /* Return right away if already constant. */
3109 if (TREE_CONSTANT (t))
3111 if (code == CONST_DECL)
3112 return DECL_INITIAL (t);
3116 kind = TREE_CODE_CLASS (code);
3117 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3121 /* Special case for conversion ops that can have fixed point args. */
3122 arg0 = TREE_OPERAND (t, 0);
3124 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3126 STRIP_TYPE_NOPS (arg0);
3128 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3129 subop = TREE_REALPART (arg0);
3133 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3134 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3135 && TREE_CODE (subop) != REAL_CST
3136 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3138 /* Note that TREE_CONSTANT isn't enough:
3139 static var addresses are constant but we can't
3140 do arithmetic on them. */
3143 else if (kind == 'e' || kind == '<'
3144 || kind == '1' || kind == '2' || kind == 'r')
3146 register int len = tree_code_length[(int) code];
3148 for (i = 0; i < len; i++)
3150 tree op = TREE_OPERAND (t, i);
3154 continue; /* Valid for CALL_EXPR, at least. */
3156 if (kind == '<' || code == RSHIFT_EXPR)
3158 /* Signedness matters here. Perhaps we can refine this
3160 STRIP_TYPE_NOPS (op);
3164 /* Strip any conversions that don't change the mode. */
3168 if (TREE_CODE (op) == COMPLEX_CST)
3169 subop = TREE_REALPART (op);
3173 if (TREE_CODE (subop) != INTEGER_CST
3174 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3175 && TREE_CODE (subop) != REAL_CST
3176 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3178 /* Note that TREE_CONSTANT isn't enough:
3179 static var addresses are constant but we can't
3180 do arithmetic on them. */
3190 /* If this is a commutative operation, and ARG0 is a constant, move it
3191 to ARG1 to reduce the number of tests below. */
3192 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3193 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3194 || code == BIT_AND_EXPR)
3195 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3197 tem = arg0; arg0 = arg1; arg1 = tem;
3199 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3200 TREE_OPERAND (t, 1) = tem;
3203 /* Now WINS is set as described above,
3204 ARG0 is the first operand of EXPR,
3205 and ARG1 is the second operand (if it has more than one operand).
3207 First check for cases where an arithmetic operation is applied to a
3208 compound, conditional, or comparison operation. Push the arithmetic
3209 operation inside the compound or conditional to see if any folding
3210 can then be done. Convert comparison to conditional for this purpose.
3211 The also optimizes non-constant cases that used to be done in
3214 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3215 one of the operands is a comparison and the other is a comparison, a
3216 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3217 code below would make the expression more complex. Change it to a
3218 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3219 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3221 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3222 || code == EQ_EXPR || code == NE_EXPR)
3223 && ((truth_value_p (TREE_CODE (arg0))
3224 && (truth_value_p (TREE_CODE (arg1))
3225 || (TREE_CODE (arg1) == BIT_AND_EXPR
3226 && integer_onep (TREE_OPERAND (arg1, 1)))))
3227 || (truth_value_p (TREE_CODE (arg1))
3228 && (truth_value_p (TREE_CODE (arg0))
3229 || (TREE_CODE (arg0) == BIT_AND_EXPR
3230 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3232 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3233 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3237 if (code == EQ_EXPR)
3238 t = invert_truthvalue (t);
3243 if (TREE_CODE_CLASS (code) == '1')
3245 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3246 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3247 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3248 else if (TREE_CODE (arg0) == COND_EXPR)
3250 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3251 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3252 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3254 /* If this was a conversion, and all we did was to move into
3255 inside the COND_EXPR, bring it back out. But leave it if
3256 it is a conversion from integer to integer and the
3257 result precision is no wider than a word since such a
3258 conversion is cheap and may be optimized away by combine,
3259 while it couldn't if it were outside the COND_EXPR. Then return
3260 so we don't get into an infinite recursion loop taking the
3261 conversion out and then back in. */
3263 if ((code == NOP_EXPR || code == CONVERT_EXPR
3264 || code == NON_LVALUE_EXPR)
3265 && TREE_CODE (t) == COND_EXPR
3266 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3267 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3268 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3269 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3270 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3271 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3272 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3273 t = build1 (code, type,
3275 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3276 TREE_OPERAND (t, 0),
3277 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3278 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3281 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3282 return fold (build (COND_EXPR, type, arg0,
3283 fold (build1 (code, type, integer_one_node)),
3284 fold (build1 (code, type, integer_zero_node))));
3286 else if (TREE_CODE_CLASS (code) == '2'
3287 || TREE_CODE_CLASS (code) == '<')
3289 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3290 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3291 fold (build (code, type,
3292 arg0, TREE_OPERAND (arg1, 1))));
3293 else if (TREE_CODE (arg1) == COND_EXPR
3294 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3296 tree test, true_value, false_value;
3298 if (TREE_CODE (arg1) == COND_EXPR)
3300 test = TREE_OPERAND (arg1, 0);
3301 true_value = TREE_OPERAND (arg1, 1);
3302 false_value = TREE_OPERAND (arg1, 2);
3306 tree testtype = TREE_TYPE (arg1);
3308 true_value = convert (testtype, integer_one_node);
3309 false_value = convert (testtype, integer_zero_node);
3312 /* If ARG0 is complex we want to make sure we only evaluate
3313 it once. Though this is only required if it is volatile, it
3314 might be more efficient even if it is not. However, if we
3315 succeed in folding one part to a constant, we do not need
3316 to make this SAVE_EXPR. Since we do this optimization
3317 primarily to see if we do end up with constant and this
3318 SAVE_EXPR interferes with later optimizations, suppressing
3319 it when we can is important. */
3321 if (TREE_CODE (arg0) != SAVE_EXPR
3322 && ((TREE_CODE (arg0) != VAR_DECL
3323 && TREE_CODE (arg0) != PARM_DECL)
3324 || TREE_SIDE_EFFECTS (arg0)))
3326 tree lhs = fold (build (code, type, arg0, true_value));
3327 tree rhs = fold (build (code, type, arg0, false_value));
3329 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3330 return fold (build (COND_EXPR, type, test, lhs, rhs));
3332 arg0 = save_expr (arg0);
3335 test = fold (build (COND_EXPR, type, test,
3336 fold (build (code, type, arg0, true_value)),
3337 fold (build (code, type, arg0, false_value))));
3338 if (TREE_CODE (arg0) == SAVE_EXPR)
3339 return build (COMPOUND_EXPR, type,
3340 convert (void_type_node, arg0),
3341 strip_compound_expr (test, arg0));
3343 return convert (type, test);
3346 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3347 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3348 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3349 else if (TREE_CODE (arg0) == COND_EXPR
3350 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3352 tree test, true_value, false_value;
3354 if (TREE_CODE (arg0) == COND_EXPR)
3356 test = TREE_OPERAND (arg0, 0);
3357 true_value = TREE_OPERAND (arg0, 1);
3358 false_value = TREE_OPERAND (arg0, 2);
3362 tree testtype = TREE_TYPE (arg0);
3364 true_value = convert (testtype, integer_one_node);
3365 false_value = convert (testtype, integer_zero_node);
3368 if (TREE_CODE (arg1) != SAVE_EXPR
3369 && ((TREE_CODE (arg1) != VAR_DECL
3370 && TREE_CODE (arg1) != PARM_DECL)
3371 || TREE_SIDE_EFFECTS (arg1)))
3373 tree lhs = fold (build (code, type, true_value, arg1));
3374 tree rhs = fold (build (code, type, false_value, arg1));
3376 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3377 || TREE_CONSTANT (arg1))
3378 return fold (build (COND_EXPR, type, test, lhs, rhs));
3380 arg1 = save_expr (arg1);
3383 test = fold (build (COND_EXPR, type, test,
3384 fold (build (code, type, true_value, arg1)),
3385 fold (build (code, type, false_value, arg1))));
3386 if (TREE_CODE (arg1) == SAVE_EXPR)
3387 return build (COMPOUND_EXPR, type,
3388 convert (void_type_node, arg1),
3389 strip_compound_expr (test, arg1));
3391 return convert (type, test);
3394 else if (TREE_CODE_CLASS (code) == '<'
3395 && TREE_CODE (arg0) == COMPOUND_EXPR)
3396 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3397 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3398 else if (TREE_CODE_CLASS (code) == '<'
3399 && TREE_CODE (arg1) == COMPOUND_EXPR)
3400 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3401 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3413 return fold (DECL_INITIAL (t));
3418 case FIX_TRUNC_EXPR:
3419 /* Other kinds of FIX are not handled properly by fold_convert. */
3421 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
3422 return TREE_OPERAND (t, 0);
3424 /* Handle cases of two conversions in a row. */
3425 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3426 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3428 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3429 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
3430 tree final_type = TREE_TYPE (t);
3431 int inside_int = INTEGRAL_TYPE_P (inside_type);
3432 int inside_ptr = POINTER_TYPE_P (inside_type);
3433 int inside_float = FLOAT_TYPE_P (inside_type);
3434 int inside_prec = TYPE_PRECISION (inside_type);
3435 int inside_unsignedp = TREE_UNSIGNED (inside_type);
3436 int inter_int = INTEGRAL_TYPE_P (inter_type);
3437 int inter_ptr = POINTER_TYPE_P (inter_type);
3438 int inter_float = FLOAT_TYPE_P (inter_type);
3439 int inter_prec = TYPE_PRECISION (inter_type);
3440 int inter_unsignedp = TREE_UNSIGNED (inter_type);
3441 int final_int = INTEGRAL_TYPE_P (final_type);
3442 int final_ptr = POINTER_TYPE_P (final_type);
3443 int final_float = FLOAT_TYPE_P (final_type);
3444 int final_prec = TYPE_PRECISION (final_type);
3445 int final_unsignedp = TREE_UNSIGNED (final_type);
3447 /* In addition to the cases of two conversions in a row
3448 handled below, if we are converting something to its own
3449 type via an object of identical or wider precision, neither
3450 conversion is needed. */
3451 if (inside_type == final_type
3452 && ((inter_int && final_int) || (inter_float && final_float))
3453 && inter_prec >= final_prec)
3454 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3456 /* Likewise, if the intermediate and final types are either both
3457 float or both integer, we don't need the middle conversion if
3458 it is wider than the final type and doesn't change the signedness
3459 (for integers). Avoid this if the final type is a pointer
3460 since then we sometimes need the inner conversion. Likewise if
3461 the outer has a precision not equal to the size of its mode. */
3462 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
3463 || (inter_float && inside_float))
3464 && inter_prec >= inside_prec
3465 && (inter_float || inter_unsignedp == inside_unsignedp)
3466 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3467 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3469 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3471 /* Two conversions in a row are not needed unless:
3472 - some conversion is floating-point (overstrict for now), or
3473 - the intermediate type is narrower than both initial and
3475 - the intermediate type and innermost type differ in signedness,
3476 and the outermost type is wider than the intermediate, or
3477 - the initial type is a pointer type and the precisions of the
3478 intermediate and final types differ, or
3479 - the final type is a pointer type and the precisions of the
3480 initial and intermediate types differ. */
3481 if (! inside_float && ! inter_float && ! final_float
3482 && (inter_prec > inside_prec || inter_prec > final_prec)
3483 && ! (inside_int && inter_int
3484 && inter_unsignedp != inside_unsignedp
3485 && inter_prec < final_prec)
3486 && ((inter_unsignedp && inter_prec > inside_prec)
3487 == (final_unsignedp && final_prec > inter_prec))
3488 && ! (inside_ptr && inter_prec != final_prec)
3489 && ! (final_ptr && inside_prec != inter_prec)
3490 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3491 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3493 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3496 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3497 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3498 /* Detect assigning a bitfield. */
3499 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3500 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3502 /* Don't leave an assignment inside a conversion
3503 unless assigning a bitfield. */
3504 tree prev = TREE_OPERAND (t, 0);
3505 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3506 /* First do the assignment, then return converted constant. */
3507 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3513 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3516 return fold_convert (t, arg0);
3518 #if 0 /* This loses on &"foo"[0]. */
3523 /* Fold an expression like: "foo"[2] */
3524 if (TREE_CODE (arg0) == STRING_CST
3525 && TREE_CODE (arg1) == INTEGER_CST
3526 && !TREE_INT_CST_HIGH (arg1)
3527 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3529 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3530 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3531 force_fit_type (t, 0);
3538 if (TREE_CODE (arg0) == CONSTRUCTOR)
3540 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
3547 TREE_CONSTANT (t) = wins;
3553 if (TREE_CODE (arg0) == INTEGER_CST)
3555 HOST_WIDE_INT low, high;
3556 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3557 TREE_INT_CST_HIGH (arg0),
3559 t = build_int_2 (low, high);
3560 TREE_TYPE (t) = type;
3562 = (TREE_OVERFLOW (arg0)
3563 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
3564 TREE_CONSTANT_OVERFLOW (t)
3565 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3567 else if (TREE_CODE (arg0) == REAL_CST)
3568 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3569 TREE_TYPE (t) = type;
3571 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3572 return TREE_OPERAND (arg0, 0);
3574 /* Convert - (a - b) to (b - a) for non-floating-point. */
3575 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3576 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3577 TREE_OPERAND (arg0, 0));
3584 if (TREE_CODE (arg0) == INTEGER_CST)
3586 if (! TREE_UNSIGNED (type)
3587 && TREE_INT_CST_HIGH (arg0) < 0)
3589 HOST_WIDE_INT low, high;
3590 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3591 TREE_INT_CST_HIGH (arg0),
3593 t = build_int_2 (low, high);
3594 TREE_TYPE (t) = type;
3596 = (TREE_OVERFLOW (arg0)
3597 | force_fit_type (t, overflow));
3598 TREE_CONSTANT_OVERFLOW (t)
3599 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3602 else if (TREE_CODE (arg0) == REAL_CST)
3604 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3605 t = build_real (type,
3606 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3608 TREE_TYPE (t) = type;
3610 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3611 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3615 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3617 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3618 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3619 TREE_OPERAND (arg0, 0),
3620 fold (build1 (NEGATE_EXPR,
3621 TREE_TYPE (TREE_TYPE (arg0)),
3622 TREE_OPERAND (arg0, 1))));
3623 else if (TREE_CODE (arg0) == COMPLEX_CST)
3624 return build_complex (TREE_OPERAND (arg0, 0),
3625 fold (build1 (NEGATE_EXPR,
3626 TREE_TYPE (TREE_TYPE (arg0)),
3627 TREE_OPERAND (arg0, 1))));
3628 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3629 return fold (build (TREE_CODE (arg0), type,
3630 fold (build1 (CONJ_EXPR, type,
3631 TREE_OPERAND (arg0, 0))),
3632 fold (build1 (CONJ_EXPR,
3633 type, TREE_OPERAND (arg0, 1)))));
3634 else if (TREE_CODE (arg0) == CONJ_EXPR)
3635 return TREE_OPERAND (arg0, 0);
3641 if (TREE_CODE (arg0) == INTEGER_CST)
3642 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3643 ~ TREE_INT_CST_HIGH (arg0));
3644 TREE_TYPE (t) = type;
3645 force_fit_type (t, 0);
3646 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3647 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3649 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3650 return TREE_OPERAND (arg0, 0);
3654 /* A + (-B) -> A - B */
3655 if (TREE_CODE (arg1) == NEGATE_EXPR)
3656 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3657 else if (! FLOAT_TYPE_P (type))
3659 if (integer_zerop (arg1))
3660 return non_lvalue (convert (type, arg0));
3662 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3663 with a constant, and the two constants have no bits in common,
3664 we should treat this as a BIT_IOR_EXPR since this may produce more
3666 if (TREE_CODE (arg0) == BIT_AND_EXPR
3667 && TREE_CODE (arg1) == BIT_AND_EXPR
3668 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3669 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3670 && integer_zerop (const_binop (BIT_AND_EXPR,
3671 TREE_OPERAND (arg0, 1),
3672 TREE_OPERAND (arg1, 1), 0)))
3674 code = BIT_IOR_EXPR;
3678 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3679 about the case where C is a constant, just try one of the
3680 four possibilities. */
3682 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3683 && operand_equal_p (TREE_OPERAND (arg0, 1),
3684 TREE_OPERAND (arg1, 1), 0))
3685 return fold (build (MULT_EXPR, type,
3686 fold (build (PLUS_EXPR, type,
3687 TREE_OPERAND (arg0, 0),
3688 TREE_OPERAND (arg1, 0))),
3689 TREE_OPERAND (arg0, 1)));
3691 /* In IEEE floating point, x+0 may not equal x. */
3692 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3694 && real_zerop (arg1))
3695 return non_lvalue (convert (type, arg0));
3697 /* In most languages, can't associate operations on floats
3698 through parentheses. Rather than remember where the parentheses
3699 were, we don't associate floats at all. It shouldn't matter much.
3700 However, associating multiplications is only very slightly
3701 inaccurate, so do that if -ffast-math is specified. */
3702 if (FLOAT_TYPE_P (type)
3703 && ! (flag_fast_math && code == MULT_EXPR))
3706 /* The varsign == -1 cases happen only for addition and subtraction.
3707 It says that the arg that was split was really CON minus VAR.
3708 The rest of the code applies to all associative operations. */
3714 if (split_tree (arg0, code, &var, &con, &varsign))
3718 /* EXPR is (CON-VAR) +- ARG1. */
3719 /* If it is + and VAR==ARG1, return just CONST. */
3720 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3721 return convert (TREE_TYPE (t), con);
3723 /* If ARG0 is a constant, don't change things around;
3724 instead keep all the constant computations together. */
3726 if (TREE_CONSTANT (arg0))
3729 /* Otherwise return (CON +- ARG1) - VAR. */
3730 t = build (MINUS_EXPR, type,
3731 fold (build (code, type, con, arg1)), var);
3735 /* EXPR is (VAR+CON) +- ARG1. */
3736 /* If it is - and VAR==ARG1, return just CONST. */
3737 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3738 return convert (TREE_TYPE (t), con);
3740 /* If ARG0 is a constant, don't change things around;
3741 instead keep all the constant computations together. */
3743 if (TREE_CONSTANT (arg0))
3746 /* Otherwise return VAR +- (ARG1 +- CON). */
3747 tem = fold (build (code, type, arg1, con));
3748 t = build (code, type, var, tem);
3750 if (integer_zerop (tem)
3751 && (code == PLUS_EXPR || code == MINUS_EXPR))
3752 return convert (type, var);
3753 /* If we have x +/- (c - d) [c an explicit integer]
3754 change it to x -/+ (d - c) since if d is relocatable
3755 then the latter can be a single immediate insn
3756 and the former cannot. */
3757 if (TREE_CODE (tem) == MINUS_EXPR
3758 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3760 tree tem1 = TREE_OPERAND (tem, 1);
3761 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3762 TREE_OPERAND (tem, 0) = tem1;
3764 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3770 if (split_tree (arg1, code, &var, &con, &varsign))
3772 if (TREE_CONSTANT (arg1))
3777 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3779 /* EXPR is ARG0 +- (CON +- VAR). */
3780 if (TREE_CODE (t) == MINUS_EXPR
3781 && operand_equal_p (var, arg0, 0))
3783 /* If VAR and ARG0 cancel, return just CON or -CON. */
3784 if (code == PLUS_EXPR)
3785 return convert (TREE_TYPE (t), con);
3786 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3787 convert (TREE_TYPE (t), con)));
3790 t = build (TREE_CODE (t), type,
3791 fold (build (code, TREE_TYPE (t), arg0, con)), var);
3793 if (integer_zerop (TREE_OPERAND (t, 0))
3794 && TREE_CODE (t) == PLUS_EXPR)
3795 return convert (TREE_TYPE (t), var);
3800 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3801 if (TREE_CODE (arg1) == REAL_CST)
3803 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3805 t1 = const_binop (code, arg0, arg1, 0);
3806 if (t1 != NULL_TREE)
3808 /* The return value should always have
3809 the same type as the original expression. */
3810 TREE_TYPE (t1) = TREE_TYPE (t);
3816 if (! FLOAT_TYPE_P (type))
3818 if (! wins && integer_zerop (arg0))
3819 return build1 (NEGATE_EXPR, type, arg1);
3820 if (integer_zerop (arg1))
3821 return non_lvalue (convert (type, arg0));
3823 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3824 about the case where C is a constant, just try one of the
3825 four possibilities. */
3827 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3828 && operand_equal_p (TREE_OPERAND (arg0, 1),
3829 TREE_OPERAND (arg1, 1), 0))
3830 return fold (build (MULT_EXPR, type,
3831 fold (build (MINUS_EXPR, type,
3832 TREE_OPERAND (arg0, 0),
3833 TREE_OPERAND (arg1, 0))),
3834 TREE_OPERAND (arg0, 1)));
3836 /* Convert A - (-B) to A + B. */
3837 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3838 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3840 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3843 /* Except with IEEE floating point, 0-x equals -x. */
3844 if (! wins && real_zerop (arg0))
3845 return build1 (NEGATE_EXPR, type, arg1);
3846 /* Except with IEEE floating point, x-0 equals x. */
3847 if (real_zerop (arg1))
3848 return non_lvalue (convert (type, arg0));
3851 /* Fold &x - &x. This can happen from &x.foo - &x.
3852 This is unsafe for certain floats even in non-IEEE formats.
3853 In IEEE, it is unsafe because it does wrong for NaNs.
3854 Also note that operand_equal_p is always false if an operand
3857 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
3858 && operand_equal_p (arg0, arg1, 0))
3859 return convert (type, integer_zero_node);
3864 if (! FLOAT_TYPE_P (type))
3866 if (integer_zerop (arg1))
3867 return omit_one_operand (type, arg1, arg0);
3868 if (integer_onep (arg1))
3869 return non_lvalue (convert (type, arg0));
3871 /* ((A / C) * C) is A if the division is an
3872 EXACT_DIV_EXPR. Since C is normally a constant,
3873 just check for one of the four possibilities. */
3875 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3876 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3877 return TREE_OPERAND (arg0, 0);
3879 /* (a * (1 << b)) is (a << b) */
3880 if (TREE_CODE (arg1) == LSHIFT_EXPR
3881 && integer_onep (TREE_OPERAND (arg1, 0)))
3882 return fold (build (LSHIFT_EXPR, type, arg0,
3883 TREE_OPERAND (arg1, 1)));
3884 if (TREE_CODE (arg0) == LSHIFT_EXPR
3885 && integer_onep (TREE_OPERAND (arg0, 0)))
3886 return fold (build (LSHIFT_EXPR, type, arg1,
3887 TREE_OPERAND (arg0, 1)));
3891 /* x*0 is 0, except for IEEE floating point. */
3892 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3894 && real_zerop (arg1))
3895 return omit_one_operand (type, arg1, arg0);
3896 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3897 However, ANSI says we can drop signals,
3898 so we can do this anyway. */
3899 if (real_onep (arg1))
3900 return non_lvalue (convert (type, arg0));
3902 if (! wins && real_twop (arg1))
3904 tree arg = save_expr (arg0);
3905 return build (PLUS_EXPR, type, arg, arg);
3913 register enum tree_code code0, code1;
3915 if (integer_all_onesp (arg1))
3916 return omit_one_operand (type, arg1, arg0);
3917 if (integer_zerop (arg1))
3918 return non_lvalue (convert (type, arg0));
3919 t1 = distribute_bit_expr (code, type, arg0, arg1);
3920 if (t1 != NULL_TREE)
3923 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
3924 is a rotate of A by C1 bits. */
3925 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
3926 is a rotate of A by B bits. */
3928 code0 = TREE_CODE (arg0);
3929 code1 = TREE_CODE (arg1);
3930 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
3931 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
3932 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3933 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3935 register tree tree01, tree11;
3936 register enum tree_code code01, code11;
3938 tree01 = TREE_OPERAND (arg0, 1);
3939 tree11 = TREE_OPERAND (arg1, 1);
3940 code01 = TREE_CODE (tree01);
3941 code11 = TREE_CODE (tree11);
3942 if (code01 == INTEGER_CST
3943 && code11 == INTEGER_CST
3944 && TREE_INT_CST_HIGH (tree01) == 0
3945 && TREE_INT_CST_HIGH (tree11) == 0
3946 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
3947 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3948 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3949 code0 == LSHIFT_EXPR ? tree01 : tree11);
3950 else if (code11 == MINUS_EXPR
3951 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
3952 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
3953 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
3954 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3955 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
3956 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
3957 type, TREE_OPERAND (arg0, 0), tree01);
3958 else if (code01 == MINUS_EXPR
3959 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
3960 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
3961 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
3962 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3963 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
3964 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
3965 type, TREE_OPERAND (arg0, 0), tree11);
3972 if (integer_zerop (arg1))
3973 return non_lvalue (convert (type, arg0));
3974 if (integer_all_onesp (arg1))
3975 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3980 if (integer_all_onesp (arg1))
3981 return non_lvalue (convert (type, arg0));
3982 if (integer_zerop (arg1))
3983 return omit_one_operand (type, arg1, arg0);
3984 t1 = distribute_bit_expr (code, type, arg0, arg1);
3985 if (t1 != NULL_TREE)
3987 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3988 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3989 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3991 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3992 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3993 && (~TREE_INT_CST_LOW (arg0)
3994 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3995 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3997 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3998 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4000 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4001 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4002 && (~TREE_INT_CST_LOW (arg1)
4003 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4004 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4008 case BIT_ANDTC_EXPR:
4009 if (integer_all_onesp (arg0))
4010 return non_lvalue (convert (type, arg1));
4011 if (integer_zerop (arg0))
4012 return omit_one_operand (type, arg0, arg1);
4013 if (TREE_CODE (arg1) == INTEGER_CST)
4015 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4016 code = BIT_AND_EXPR;
4022 /* In most cases, do nothing with a divide by zero. */
4023 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4024 #ifndef REAL_INFINITY
4025 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4028 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4030 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4031 However, ANSI says we can drop signals, so we can do this anyway. */
4032 if (real_onep (arg1))
4033 return non_lvalue (convert (type, arg0));
4035 /* If ARG1 is a constant, we can convert this to a multiply by the
4036 reciprocal. This does not have the same rounding properties,
4037 so only do this if -ffast-math. We can actually always safely
4038 do it if ARG1 is a power of two, but it's hard to tell if it is
4039 or not in a portable manner. */
4040 if (TREE_CODE (arg1) == REAL_CST && flag_fast_math
4041 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4043 return fold (build (MULT_EXPR, type, arg0, tem));
4047 case TRUNC_DIV_EXPR:
4048 case ROUND_DIV_EXPR:
4049 case FLOOR_DIV_EXPR:
4051 case EXACT_DIV_EXPR:
4052 if (integer_onep (arg1))
4053 return non_lvalue (convert (type, arg0));
4054 if (integer_zerop (arg1))
4057 /* If we have ((a / C1) / C2) where both division are the same type, try
4058 to simplify. First see if C1 * C2 overflows or not. */
4059 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4060 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4064 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4065 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4067 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4068 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4070 /* If no overflow, divide by C1*C2. */
4071 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4075 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4076 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4077 expressions, which often appear in the offsets or sizes of
4078 objects with a varying size. Only deal with positive divisors
4079 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4081 Look for NOPs and SAVE_EXPRs inside. */
4083 if (TREE_CODE (arg1) == INTEGER_CST
4084 && tree_int_cst_sgn (arg1) >= 0)
4086 int have_save_expr = 0;
4087 tree c2 = integer_zero_node;
4090 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4091 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4095 if (TREE_CODE (xarg0) == PLUS_EXPR
4096 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4097 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4098 else if (TREE_CODE (xarg0) == MINUS_EXPR
4099 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4100 /* If we are doing this computation unsigned, the negate
4102 && ! TREE_UNSIGNED (type))
4104 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4105 xarg0 = TREE_OPERAND (xarg0, 0);
4108 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4109 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4113 if (TREE_CODE (xarg0) == MULT_EXPR
4114 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4115 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4116 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4117 TREE_OPERAND (xarg0, 1), arg1, 1))
4118 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4119 TREE_OPERAND (xarg0, 1), 1)))
4120 && (tree_int_cst_sgn (c2) >= 0
4121 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4124 tree outer_div = integer_one_node;
4125 tree c1 = TREE_OPERAND (xarg0, 1);
4128 /* If C3 > C1, set them equal and do a divide by
4129 C3/C1 at the end of the operation. */
4130 if (tree_int_cst_lt (c1, c3))
4131 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4133 /* The result is A * (C1/C3) + (C2/C3). */
4134 t = fold (build (PLUS_EXPR, type,
4135 fold (build (MULT_EXPR, type,
4136 TREE_OPERAND (xarg0, 0),
4137 const_binop (code, c1, c3, 1))),
4138 const_binop (code, c2, c3, 1)));
4140 if (! integer_onep (outer_div))
4141 t = fold (build (code, type, t, convert (type, outer_div)));
4153 case FLOOR_MOD_EXPR:
4154 case ROUND_MOD_EXPR:
4155 case TRUNC_MOD_EXPR:
4156 if (integer_onep (arg1))
4157 return omit_one_operand (type, integer_zero_node, arg0);
4158 if (integer_zerop (arg1))
4161 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4162 where C1 % C3 == 0. Handle similarly to the division case,
4163 but don't bother with SAVE_EXPRs. */
4165 if (TREE_CODE (arg1) == INTEGER_CST
4166 && ! integer_zerop (arg1))
4168 tree c2 = integer_zero_node;
4171 if (TREE_CODE (xarg0) == PLUS_EXPR
4172 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4173 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4174 else if (TREE_CODE (xarg0) == MINUS_EXPR
4175 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4176 && ! TREE_UNSIGNED (type))
4178 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4179 xarg0 = TREE_OPERAND (xarg0, 0);
4184 if (TREE_CODE (xarg0) == MULT_EXPR
4185 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4186 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4187 TREE_OPERAND (xarg0, 1),
4189 && tree_int_cst_sgn (c2) >= 0)
4190 /* The result is (C2%C3). */
4191 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4192 TREE_OPERAND (xarg0, 0));
4201 if (integer_zerop (arg1))
4202 return non_lvalue (convert (type, arg0));
4203 /* Since negative shift count is not well-defined,
4204 don't try to compute it in the compiler. */
4205 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4207 /* Rewrite an LROTATE_EXPR by a constant into an
4208 RROTATE_EXPR by a new constant. */
4209 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4211 TREE_SET_CODE (t, RROTATE_EXPR);
4212 code = RROTATE_EXPR;
4213 TREE_OPERAND (t, 1) = arg1
4216 convert (TREE_TYPE (arg1),
4217 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4219 if (tree_int_cst_sgn (arg1) < 0)
4223 /* If we have a rotate of a bit operation with the rotate count and
4224 the second operand of the bit operation both constant,
4225 permute the two operations. */
4226 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4227 && (TREE_CODE (arg0) == BIT_AND_EXPR
4228 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4229 || TREE_CODE (arg0) == BIT_IOR_EXPR
4230 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4231 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4232 return fold (build (TREE_CODE (arg0), type,
4233 fold (build (code, type,
4234 TREE_OPERAND (arg0, 0), arg1)),
4235 fold (build (code, type,
4236 TREE_OPERAND (arg0, 1), arg1))));
4238 /* Two consecutive rotates adding up to the width of the mode can
4240 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4241 && TREE_CODE (arg0) == RROTATE_EXPR
4242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4243 && TREE_INT_CST_HIGH (arg1) == 0
4244 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4245 && ((TREE_INT_CST_LOW (arg1)
4246 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4247 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4248 return TREE_OPERAND (arg0, 0);
4253 if (operand_equal_p (arg0, arg1, 0))
4255 if (INTEGRAL_TYPE_P (type)
4256 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4257 return omit_one_operand (type, arg1, arg0);
4261 if (operand_equal_p (arg0, arg1, 0))
4263 if (INTEGRAL_TYPE_P (type)
4264 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4265 return omit_one_operand (type, arg1, arg0);
4268 case TRUTH_NOT_EXPR:
4269 /* Note that the operand of this must be an int
4270 and its values must be 0 or 1.
4271 ("true" is a fixed value perhaps depending on the language,
4272 but we don't handle values other than 1 correctly yet.) */
4273 tem = invert_truthvalue (arg0);
4274 /* Avoid infinite recursion. */
4275 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4277 return convert (type, tem);
4279 case TRUTH_ANDIF_EXPR:
4280 /* Note that the operands of this must be ints
4281 and their values must be 0 or 1.
4282 ("true" is a fixed value perhaps depending on the language.) */
4283 /* If first arg is constant zero, return it. */
4284 if (integer_zerop (arg0))
4286 case TRUTH_AND_EXPR:
4287 /* If either arg is constant true, drop it. */
4288 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4289 return non_lvalue (arg1);
4290 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4291 return non_lvalue (arg0);
4292 /* If second arg is constant zero, result is zero, but first arg
4293 must be evaluated. */
4294 if (integer_zerop (arg1))
4295 return omit_one_operand (type, arg1, arg0);
4298 /* We only do these simplifications if we are optimizing. */
4302 /* Check for things like (A || B) && (A || C). We can convert this
4303 to A || (B && C). Note that either operator can be any of the four
4304 truth and/or operations and the transformation will still be
4305 valid. Also note that we only care about order for the
4306 ANDIF and ORIF operators. */
4307 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4308 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4309 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4310 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4311 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4313 tree a00 = TREE_OPERAND (arg0, 0);
4314 tree a01 = TREE_OPERAND (arg0, 1);
4315 tree a10 = TREE_OPERAND (arg1, 0);
4316 tree a11 = TREE_OPERAND (arg1, 1);
4317 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4318 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4319 && (code == TRUTH_AND_EXPR
4320 || code == TRUTH_OR_EXPR));
4322 if (operand_equal_p (a00, a10, 0))
4323 return fold (build (TREE_CODE (arg0), type, a00,
4324 fold (build (code, type, a01, a11))));
4325 else if (commutative && operand_equal_p (a00, a11, 0))
4326 return fold (build (TREE_CODE (arg0), type, a00,
4327 fold (build (code, type, a01, a10))));
4328 else if (commutative && operand_equal_p (a01, a10, 0))
4329 return fold (build (TREE_CODE (arg0), type, a01,
4330 fold (build (code, type, a00, a11))));
4332 /* This case if tricky because we must either have commutative
4333 operators or else A10 must not have side-effects. */
4335 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4336 && operand_equal_p (a01, a11, 0))
4337 return fold (build (TREE_CODE (arg0), type,
4338 fold (build (code, type, a00, a10)),
4342 /* Check for the possibility of merging component references. If our
4343 lhs is another similar operation, try to merge its rhs with our
4344 rhs. Then try to merge our lhs and rhs. */
4345 if (TREE_CODE (arg0) == code
4346 && 0 != (tem = fold_truthop (code, type,
4347 TREE_OPERAND (arg0, 1), arg1)))
4348 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4350 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4355 case TRUTH_ORIF_EXPR:
4356 /* Note that the operands of this must be ints
4357 and their values must be 0 or true.
4358 ("true" is a fixed value perhaps depending on the language.) */
4359 /* If first arg is constant true, return it. */
4360 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4363 /* If either arg is constant zero, drop it. */
4364 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4365 return non_lvalue (arg1);
4366 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4367 return non_lvalue (arg0);
4368 /* If second arg is constant true, result is true, but we must
4369 evaluate first arg. */
4370 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4371 return omit_one_operand (type, arg1, arg0);
4374 case TRUTH_XOR_EXPR:
4375 /* If either arg is constant zero, drop it. */
4376 if (integer_zerop (arg0))
4377 return non_lvalue (arg1);
4378 if (integer_zerop (arg1))
4379 return non_lvalue (arg0);
4380 /* If either arg is constant true, this is a logical inversion. */
4381 if (integer_onep (arg0))
4382 return non_lvalue (invert_truthvalue (arg1));
4383 if (integer_onep (arg1))
4384 return non_lvalue (invert_truthvalue (arg0));
4393 /* If one arg is a constant integer, put it last. */
4394 if (TREE_CODE (arg0) == INTEGER_CST
4395 && TREE_CODE (arg1) != INTEGER_CST)
4397 TREE_OPERAND (t, 0) = arg1;
4398 TREE_OPERAND (t, 1) = arg0;
4399 arg0 = TREE_OPERAND (t, 0);
4400 arg1 = TREE_OPERAND (t, 1);
4401 code = swap_tree_comparison (code);
4402 TREE_SET_CODE (t, code);
4405 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4406 First, see if one arg is constant; find the constant arg
4407 and the other one. */
4409 tree constop = 0, varop;
4410 int constopnum = -1;
4412 if (TREE_CONSTANT (arg1))
4413 constopnum = 1, constop = arg1, varop = arg0;
4414 if (TREE_CONSTANT (arg0))
4415 constopnum = 0, constop = arg0, varop = arg1;
4417 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4419 /* This optimization is invalid for ordered comparisons
4420 if CONST+INCR overflows or if foo+incr might overflow.
4421 This optimization is invalid for floating point due to rounding.
4422 For pointer types we assume overflow doesn't happen. */
4423 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4424 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4425 && (code == EQ_EXPR || code == NE_EXPR)))
4428 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4429 constop, TREE_OPERAND (varop, 1)));
4430 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4432 /* If VAROP is a reference to a bitfield, we must mask
4433 the constant by the width of the field. */
4434 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
4435 && DECL_BIT_FIELD(TREE_OPERAND
4436 (TREE_OPERAND (varop, 0), 1)))
4439 = TREE_INT_CST_LOW (DECL_SIZE
4441 (TREE_OPERAND (varop, 0), 1)));
4443 newconst = fold (build (BIT_AND_EXPR,
4444 TREE_TYPE (varop), newconst,
4445 convert (TREE_TYPE (varop),
4446 build_int_2 (size, 0))));
4450 t = build (code, type, TREE_OPERAND (t, 0),
4451 TREE_OPERAND (t, 1));
4452 TREE_OPERAND (t, constopnum) = newconst;
4456 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4458 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4459 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4460 && (code == EQ_EXPR || code == NE_EXPR)))
4463 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4464 constop, TREE_OPERAND (varop, 1)));
4465 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4467 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
4468 && DECL_BIT_FIELD(TREE_OPERAND
4469 (TREE_OPERAND (varop, 0), 1)))
4472 = TREE_INT_CST_LOW (DECL_SIZE
4474 (TREE_OPERAND (varop, 0), 1)));
4476 newconst = fold (build (BIT_AND_EXPR,
4477 TREE_TYPE (varop), newconst,
4478 convert (TREE_TYPE (varop),
4479 build_int_2 (size, 0))));
4483 t = build (code, type, TREE_OPERAND (t, 0),
4484 TREE_OPERAND (t, 1));
4485 TREE_OPERAND (t, constopnum) = newconst;
4491 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4492 if (TREE_CODE (arg1) == INTEGER_CST
4493 && TREE_CODE (arg0) != INTEGER_CST
4494 && tree_int_cst_sgn (arg1) > 0)
4496 switch (TREE_CODE (t))
4500 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4501 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4506 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4507 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4512 /* If this is an EQ or NE comparison with zero and ARG0 is
4513 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4514 two operations, but the latter can be done in one less insn
4515 one machine that have only two-operand insns or on which a
4516 constant cannot be the first operand. */
4517 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4518 && TREE_CODE (arg0) == BIT_AND_EXPR)
4520 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4521 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4523 fold (build (code, type,
4524 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4526 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4527 TREE_OPERAND (arg0, 1),
4528 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4529 convert (TREE_TYPE (arg0),
4532 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4533 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4535 fold (build (code, type,
4536 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4538 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4539 TREE_OPERAND (arg0, 0),
4540 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4541 convert (TREE_TYPE (arg0),
4546 /* If this is an NE or EQ comparison of zero against the result of a
4547 signed MOD operation whose second operand is a power of 2, make
4548 the MOD operation unsigned since it is simpler and equivalent. */
4549 if ((code == NE_EXPR || code == EQ_EXPR)
4550 && integer_zerop (arg1)
4551 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4552 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4553 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4554 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4555 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4556 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4558 tree newtype = unsigned_type (TREE_TYPE (arg0));
4559 tree newmod = build (TREE_CODE (arg0), newtype,
4560 convert (newtype, TREE_OPERAND (arg0, 0)),
4561 convert (newtype, TREE_OPERAND (arg0, 1)));
4563 return build (code, type, newmod, convert (newtype, arg1));
4566 /* If this is an NE comparison of zero with an AND of one, remove the
4567 comparison since the AND will give the correct value. */
4568 if (code == NE_EXPR && integer_zerop (arg1)
4569 && TREE_CODE (arg0) == BIT_AND_EXPR
4570 && integer_onep (TREE_OPERAND (arg0, 1)))
4571 return convert (type, arg0);
4573 /* If we have (A & C) == C where C is a power of 2, convert this into
4574 (A & C) != 0. Similarly for NE_EXPR. */
4575 if ((code == EQ_EXPR || code == NE_EXPR)
4576 && TREE_CODE (arg0) == BIT_AND_EXPR
4577 && integer_pow2p (TREE_OPERAND (arg0, 1))
4578 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4579 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4580 arg0, integer_zero_node);
4582 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4583 and similarly for >= into !=. */
4584 if ((code == LT_EXPR || code == GE_EXPR)
4585 && TREE_UNSIGNED (TREE_TYPE (arg0))
4586 && TREE_CODE (arg1) == LSHIFT_EXPR
4587 && integer_onep (TREE_OPERAND (arg1, 0)))
4588 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4589 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4590 TREE_OPERAND (arg1, 1)),
4591 convert (TREE_TYPE (arg0), integer_zero_node));
4593 else if ((code == LT_EXPR || code == GE_EXPR)
4594 && TREE_UNSIGNED (TREE_TYPE (arg0))
4595 && (TREE_CODE (arg1) == NOP_EXPR
4596 || TREE_CODE (arg1) == CONVERT_EXPR)
4597 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
4598 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
4600 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4601 convert (TREE_TYPE (arg0),
4602 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4603 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
4604 convert (TREE_TYPE (arg0), integer_zero_node));
4606 /* Simplify comparison of something with itself. (For IEEE
4607 floating-point, we can only do some of these simplifications.) */
4608 if (operand_equal_p (arg0, arg1, 0))
4615 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4617 t = build_int_2 (1, 0);
4618 TREE_TYPE (t) = type;
4622 TREE_SET_CODE (t, code);
4626 /* For NE, we can only do this simplification if integer. */
4627 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4629 /* ... fall through ... */
4632 t = build_int_2 (0, 0);
4633 TREE_TYPE (t) = type;
4638 /* An unsigned comparison against 0 can be simplified. */
4639 if (integer_zerop (arg1)
4640 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4641 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4642 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4644 switch (TREE_CODE (t))
4648 TREE_SET_CODE (t, NE_EXPR);
4652 TREE_SET_CODE (t, EQ_EXPR);
4655 return omit_one_operand (type,
4656 convert (type, integer_one_node),
4659 return omit_one_operand (type,
4660 convert (type, integer_zero_node),
4665 /* If we are comparing an expression that just has comparisons
4666 of two integer values, arithmetic expressions of those comparisons,
4667 and constants, we can simplify it. There are only three cases
4668 to check: the two values can either be equal, the first can be
4669 greater, or the second can be greater. Fold the expression for
4670 those three values. Since each value must be 0 or 1, we have
4671 eight possibilities, each of which corresponds to the constant 0
4672 or 1 or one of the six possible comparisons.
4674 This handles common cases like (a > b) == 0 but also handles
4675 expressions like ((x > y) - (y > x)) > 0, which supposedly
4676 occur in macroized code. */
4678 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4680 tree cval1 = 0, cval2 = 0;
4683 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4684 /* Don't handle degenerate cases here; they should already
4685 have been handled anyway. */
4686 && cval1 != 0 && cval2 != 0
4687 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4688 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4689 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4690 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4691 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4693 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4694 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4696 /* We can't just pass T to eval_subst in case cval1 or cval2
4697 was the same as ARG1. */
4700 = fold (build (code, type,
4701 eval_subst (arg0, cval1, maxval, cval2, minval),
4704 = fold (build (code, type,
4705 eval_subst (arg0, cval1, maxval, cval2, maxval),
4708 = fold (build (code, type,
4709 eval_subst (arg0, cval1, minval, cval2, maxval),
4712 /* All three of these results should be 0 or 1. Confirm they
4713 are. Then use those values to select the proper code
4716 if ((integer_zerop (high_result)
4717 || integer_onep (high_result))
4718 && (integer_zerop (equal_result)
4719 || integer_onep (equal_result))
4720 && (integer_zerop (low_result)
4721 || integer_onep (low_result)))
4723 /* Make a 3-bit mask with the high-order bit being the
4724 value for `>', the next for '=', and the low for '<'. */
4725 switch ((integer_onep (high_result) * 4)
4726 + (integer_onep (equal_result) * 2)
4727 + integer_onep (low_result))
4731 return omit_one_operand (type, integer_zero_node, arg0);
4752 return omit_one_operand (type, integer_one_node, arg0);
4755 t = build (code, type, cval1, cval2);
4757 return save_expr (t);
4764 /* If this is a comparison of a field, we may be able to simplify it. */
4765 if ((TREE_CODE (arg0) == COMPONENT_REF
4766 || TREE_CODE (arg0) == BIT_FIELD_REF)
4767 && (code == EQ_EXPR || code == NE_EXPR)
4768 /* Handle the constant case even without -O
4769 to make sure the warnings are given. */
4770 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4772 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4776 /* If this is a comparison of complex values and either or both
4777 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4778 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4779 may prevent needless evaluations. */
4780 if ((code == EQ_EXPR || code == NE_EXPR)
4781 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4782 && (TREE_CODE (arg0) == COMPLEX_EXPR
4783 || TREE_CODE (arg1) == COMPLEX_EXPR))
4785 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4786 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4787 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4788 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4789 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4791 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4794 fold (build (code, type, real0, real1)),
4795 fold (build (code, type, imag0, imag1))));
4798 /* From here on, the only cases we handle are when the result is
4799 known to be a constant.
4801 To compute GT, swap the arguments and do LT.
4802 To compute GE, do LT and invert the result.
4803 To compute LE, swap the arguments, do LT and invert the result.
4804 To compute NE, do EQ and invert the result.
4806 Therefore, the code below must handle only EQ and LT. */
4808 if (code == LE_EXPR || code == GT_EXPR)
4810 tem = arg0, arg0 = arg1, arg1 = tem;
4811 code = swap_tree_comparison (code);
4814 /* Note that it is safe to invert for real values here because we
4815 will check below in the one case that it matters. */
4818 if (code == NE_EXPR || code == GE_EXPR)
4821 code = invert_tree_comparison (code);
4824 /* Compute a result for LT or EQ if args permit;
4825 otherwise return T. */
4826 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4828 if (code == EQ_EXPR)
4829 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4830 == TREE_INT_CST_LOW (arg1))
4831 && (TREE_INT_CST_HIGH (arg0)
4832 == TREE_INT_CST_HIGH (arg1)),
4835 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4836 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4837 : INT_CST_LT (arg0, arg1)),
4841 /* Assume a nonexplicit constant cannot equal an explicit one,
4842 since such code would be undefined anyway.
4843 Exception: on sysvr4, using #pragma weak,
4844 a label can come out as 0. */
4845 else if (TREE_CODE (arg1) == INTEGER_CST
4846 && !integer_zerop (arg1)
4847 && TREE_CONSTANT (arg0)
4848 && TREE_CODE (arg0) == ADDR_EXPR
4850 t1 = build_int_2 (0, 0);
4852 /* Two real constants can be compared explicitly. */
4853 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4855 /* If either operand is a NaN, the result is false with two
4856 exceptions: First, an NE_EXPR is true on NaNs, but that case
4857 is already handled correctly since we will be inverting the
4858 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4859 or a GE_EXPR into a LT_EXPR, we must return true so that it
4860 will be inverted into false. */
4862 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4863 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4864 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4866 else if (code == EQ_EXPR)
4867 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4868 TREE_REAL_CST (arg1)),
4871 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4872 TREE_REAL_CST (arg1)),
4876 if (t1 == NULL_TREE)
4880 TREE_INT_CST_LOW (t1) ^= 1;
4882 TREE_TYPE (t1) = type;
4886 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4887 so all simple results must be passed through pedantic_non_lvalue. */
4888 if (TREE_CODE (arg0) == INTEGER_CST)
4889 return pedantic_non_lvalue
4890 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4891 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4892 return pedantic_omit_one_operand (type, arg1, arg0);
4894 /* If the second operand is zero, invert the comparison and swap
4895 the second and third operands. Likewise if the second operand
4896 is constant and the third is not or if the third operand is
4897 equivalent to the first operand of the comparison. */
4899 if (integer_zerop (arg1)
4900 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4901 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4902 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4903 TREE_OPERAND (t, 2),
4904 TREE_OPERAND (arg0, 1))))
4906 /* See if this can be inverted. If it can't, possibly because
4907 it was a floating-point inequality comparison, don't do
4909 tem = invert_truthvalue (arg0);
4911 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4913 t = build (code, type, tem,
4914 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
4916 arg1 = TREE_OPERAND (t, 2);
4921 /* If we have A op B ? A : C, we may be able to convert this to a
4922 simpler expression, depending on the operation and the values
4923 of B and C. IEEE floating point prevents this though,
4924 because A or B might be -0.0 or a NaN. */
4926 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4927 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4928 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4930 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4931 arg1, TREE_OPERAND (arg0, 1)))
4933 tree arg2 = TREE_OPERAND (t, 2);
4934 enum tree_code comp_code = TREE_CODE (arg0);
4938 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4939 depending on the comparison operation. */
4940 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
4941 ? real_zerop (TREE_OPERAND (arg0, 1))
4942 : integer_zerop (TREE_OPERAND (arg0, 1)))
4943 && TREE_CODE (arg2) == NEGATE_EXPR
4944 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4948 return pedantic_non_lvalue
4949 (fold (build1 (NEGATE_EXPR, type, arg1)));
4951 return pedantic_non_lvalue (convert (type, arg1));
4954 return pedantic_non_lvalue
4955 (convert (type, fold (build1 (ABS_EXPR,
4956 TREE_TYPE (arg1), arg1))));
4959 return pedantic_non_lvalue
4960 (fold (build1 (NEGATE_EXPR, type,
4962 fold (build1 (ABS_EXPR,
4967 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4970 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4972 if (comp_code == NE_EXPR)
4973 return pedantic_non_lvalue (convert (type, arg1));
4974 else if (comp_code == EQ_EXPR)
4975 return pedantic_non_lvalue (convert (type, integer_zero_node));
4978 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4979 or max (A, B), depending on the operation. */
4981 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4982 arg2, TREE_OPERAND (arg0, 0)))
4984 tree comp_op0 = TREE_OPERAND (arg0, 0);
4985 tree comp_op1 = TREE_OPERAND (arg0, 1);
4986 tree comp_type = TREE_TYPE (comp_op0);
4991 return pedantic_non_lvalue (convert (type, arg2));
4993 return pedantic_non_lvalue (convert (type, arg1));
4996 return pedantic_non_lvalue
4997 (convert (type, (fold (build (MIN_EXPR, comp_type,
4998 comp_op0, comp_op1)))));
5001 return pedantic_non_lvalue
5002 (convert (type, fold (build (MAX_EXPR, comp_type,
5003 comp_op0, comp_op1))));
5007 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5008 we might still be able to simplify this. For example,
5009 if C1 is one less or one more than C2, this might have started
5010 out as a MIN or MAX and been transformed by this function.
5011 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5013 if (INTEGRAL_TYPE_P (type)
5014 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5015 && TREE_CODE (arg2) == INTEGER_CST)
5019 /* We can replace A with C1 in this case. */
5020 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5021 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5022 TREE_OPERAND (t, 2));
5026 /* If C1 is C2 + 1, this is min(A, C2). */
5027 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5028 && operand_equal_p (TREE_OPERAND (arg0, 1),
5029 const_binop (PLUS_EXPR, arg2,
5030 integer_one_node, 0), 1))
5031 return pedantic_non_lvalue
5032 (fold (build (MIN_EXPR, type, arg1, arg2)));
5036 /* If C1 is C2 - 1, this is min(A, C2). */
5037 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5038 && operand_equal_p (TREE_OPERAND (arg0, 1),
5039 const_binop (MINUS_EXPR, arg2,
5040 integer_one_node, 0), 1))
5041 return pedantic_non_lvalue
5042 (fold (build (MIN_EXPR, type, arg1, arg2)));
5046 /* If C1 is C2 - 1, this is max(A, C2). */
5047 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5048 && operand_equal_p (TREE_OPERAND (arg0, 1),
5049 const_binop (MINUS_EXPR, arg2,
5050 integer_one_node, 0), 1))
5051 return pedantic_non_lvalue
5052 (fold (build (MAX_EXPR, type, arg1, arg2)));
5056 /* If C1 is C2 + 1, this is max(A, C2). */
5057 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5058 && operand_equal_p (TREE_OPERAND (arg0, 1),
5059 const_binop (PLUS_EXPR, arg2,
5060 integer_one_node, 0), 1))
5061 return pedantic_non_lvalue
5062 (fold (build (MAX_EXPR, type, arg1, arg2)));
5067 /* If the second operand is simpler than the third, swap them
5068 since that produces better jump optimization results. */
5069 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5070 || TREE_CODE (arg1) == SAVE_EXPR)
5071 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5072 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5073 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5075 /* See if this can be inverted. If it can't, possibly because
5076 it was a floating-point inequality comparison, don't do
5078 tem = invert_truthvalue (arg0);
5080 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5082 t = build (code, type, tem,
5083 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5085 arg1 = TREE_OPERAND (t, 2);
5090 /* Convert A ? 1 : 0 to simply A. */
5091 if (integer_onep (TREE_OPERAND (t, 1))
5092 && integer_zerop (TREE_OPERAND (t, 2))
5093 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5094 call to fold will try to move the conversion inside
5095 a COND, which will recurse. In that case, the COND_EXPR
5096 is probably the best choice, so leave it alone. */
5097 && type == TREE_TYPE (arg0))
5098 return pedantic_non_lvalue (arg0);
5100 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5101 operation is simply A & 2. */
5103 if (integer_zerop (TREE_OPERAND (t, 2))
5104 && TREE_CODE (arg0) == NE_EXPR
5105 && integer_zerop (TREE_OPERAND (arg0, 1))
5106 && integer_pow2p (arg1)
5107 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5108 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5110 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5115 /* When pedantic, a compound expression can be neither an lvalue
5116 nor an integer constant expression. */
5117 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5119 /* Don't let (0, 0) be null pointer constant. */
5120 if (integer_zerop (arg1))
5121 return non_lvalue (arg1);
5126 return build_complex (arg0, arg1);
5130 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5132 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5133 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5134 TREE_OPERAND (arg0, 1));
5135 else if (TREE_CODE (arg0) == COMPLEX_CST)
5136 return TREE_REALPART (arg0);
5137 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5138 return fold (build (TREE_CODE (arg0), type,
5139 fold (build1 (REALPART_EXPR, type,
5140 TREE_OPERAND (arg0, 0))),
5141 fold (build1 (REALPART_EXPR,
5142 type, TREE_OPERAND (arg0, 1)))));
5146 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5147 return convert (type, integer_zero_node);
5148 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5149 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5150 TREE_OPERAND (arg0, 0));
5151 else if (TREE_CODE (arg0) == COMPLEX_CST)
5152 return TREE_IMAGPART (arg0);
5153 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5154 return fold (build (TREE_CODE (arg0), type,
5155 fold (build1 (IMAGPART_EXPR, type,
5156 TREE_OPERAND (arg0, 0))),
5157 fold (build1 (IMAGPART_EXPR, type,
5158 TREE_OPERAND (arg0, 1)))));
5161 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5163 case CLEANUP_POINT_EXPR:
5164 if (! TREE_SIDE_EFFECTS (arg0))
5165 return TREE_OPERAND (t, 0);
5168 enum tree_code code0 = TREE_CODE (arg0);
5169 int kind0 = TREE_CODE_CLASS (code0);
5170 tree arg00 = TREE_OPERAND (arg0, 0);
5173 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5174 return fold (build1 (code0, type,
5175 fold (build1 (CLEANUP_POINT_EXPR,
5176 TREE_TYPE (arg00), arg00))));
5178 if (kind0 == '<' || kind0 == '2'
5179 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5180 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5181 || code0 == TRUTH_XOR_EXPR)
5183 arg01 = TREE_OPERAND (arg0, 1);
5185 if (! TREE_SIDE_EFFECTS (arg00))
5186 return fold (build (code0, type, arg00,
5187 fold (build1 (CLEANUP_POINT_EXPR,
5188 TREE_TYPE (arg01), arg01))));
5190 if (! TREE_SIDE_EFFECTS (arg01))
5191 return fold (build (code0, type,
5192 fold (build1 (CLEANUP_POINT_EXPR,
5193 TREE_TYPE (arg00), arg00)),
5202 } /* switch (code) */