1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 /*@@ Fix lossage on folding division of big integers. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int and size_binop.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'. */
48 /* Handle floating overflow for `const_binop'. */
49 static jmp_buf float_error;
51 static void encode PROTO((short *, HOST_WIDE_INT, HOST_WIDE_INT));
52 static void decode PROTO((short *, HOST_WIDE_INT *, HOST_WIDE_INT *));
53 static int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
54 HOST_WIDE_INT, HOST_WIDE_INT,
55 HOST_WIDE_INT, HOST_WIDE_INT *,
56 HOST_WIDE_INT *, HOST_WIDE_INT *,
58 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
59 static tree const_binop PROTO((enum tree_code, tree, tree, int));
60 static tree fold_convert PROTO((tree, tree));
61 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
62 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
63 static int truth_value_p PROTO((enum tree_code));
64 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
65 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
66 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
67 static tree omit_one_operand PROTO((tree, tree, tree));
68 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
69 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
70 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
72 static tree decode_field_reference PROTO((tree, int *, int *,
73 enum machine_mode *, int *,
75 static int all_ones_mask_p PROTO((tree, int));
76 static int simple_operand_p PROTO((tree));
77 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
78 enum tree_code, tree, tree, tree));
79 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
80 static tree strip_compound_expr PROTO((tree, tree));
86 /* Yield nonzero if a signed left shift of A by B bits overflows. */
87 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
89 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
90 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
91 Then this yields nonzero if overflow occurred during the addition.
92 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
93 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
94 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
96 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
97 We do that by representing the two-word integer as MAX_SHORTS shorts,
98 with only 8 bits stored in each short, as a positive number. */
100 /* Unpack a two-word integer into MAX_SHORTS shorts.
101 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
102 SHORTS points to the array of shorts. */
105 encode (shorts, low, hi)
107 HOST_WIDE_INT low, hi;
111 for (i = 0; i < MAX_SHORTS / 2; i++)
113 shorts[i] = (low >> (i * 8)) & 0xff;
114 shorts[i + MAX_SHORTS / 2] = (hi >> (i * 8) & 0xff);
118 /* Pack an array of MAX_SHORTS shorts into a two-word integer.
119 SHORTS points to the array of shorts.
120 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
123 decode (shorts, low, hi)
125 HOST_WIDE_INT *low, *hi;
128 HOST_WIDE_INT lv = 0, hv = 0;
130 for (i = 0; i < MAX_SHORTS / 2; i++)
132 lv |= (HOST_WIDE_INT) shorts[i] << (i * 8);
133 hv |= (HOST_WIDE_INT) shorts[i + MAX_SHORTS / 2] << (i * 8);
139 /* Make the integer constant T valid for its type
140 by setting to 0 or 1 all the bits in the constant
141 that don't belong in the type.
142 Yield 1 if a signed overflow occurs, 0 otherwise.
143 If OVERFLOW is nonzero, a signed overflow has already occurred
144 in calculating T, so propagate it.
146 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
150 force_fit_type (t, overflow)
154 HOST_WIDE_INT low, high;
157 if (TREE_CODE (t) == REAL_CST)
159 #ifdef CHECK_FLOAT_VALUE
160 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
166 else if (TREE_CODE (t) != INTEGER_CST)
169 low = TREE_INT_CST_LOW (t);
170 high = TREE_INT_CST_HIGH (t);
172 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
175 prec = TYPE_PRECISION (TREE_TYPE (t));
177 /* First clear all bits that are beyond the type's precision. */
179 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
181 else if (prec > HOST_BITS_PER_WIDE_INT)
183 TREE_INT_CST_HIGH (t)
184 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
188 TREE_INT_CST_HIGH (t) = 0;
189 if (prec < HOST_BITS_PER_WIDE_INT)
190 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
193 /* Unsigned types do not suffer sign extension or overflow. */
194 if (TREE_UNSIGNED (TREE_TYPE (t)))
197 /* If the value's sign bit is set, extend the sign. */
198 if (prec != 2 * HOST_BITS_PER_WIDE_INT
199 && (prec > HOST_BITS_PER_WIDE_INT
200 ? (TREE_INT_CST_HIGH (t)
201 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
202 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
204 /* Value is negative:
205 set to 1 all the bits that are outside this type's precision. */
206 if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
213 TREE_INT_CST_HIGH (t) = -1;
214 if (prec < HOST_BITS_PER_WIDE_INT)
215 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
219 /* Yield nonzero if signed overflow occurred. */
221 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
225 /* Add two doubleword integers with doubleword result.
226 Each argument is given as two `HOST_WIDE_INT' pieces.
227 One argument is L1 and H1; the other, L2 and H2.
228 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
229 We use the 8-shorts representation internally. */
232 add_double (l1, h1, l2, h2, lv, hv)
233 HOST_WIDE_INT l1, h1, l2, h2;
234 HOST_WIDE_INT *lv, *hv;
236 short arg1[MAX_SHORTS];
237 short arg2[MAX_SHORTS];
238 register int carry = 0;
241 encode (arg1, l1, h1);
242 encode (arg2, l2, h2);
244 for (i = 0; i < MAX_SHORTS; i++)
246 carry += arg1[i] + arg2[i];
247 arg1[i] = carry & 0xff;
251 decode (arg1, lv, hv);
252 return overflow_sum_sign (h1, h2, *hv);
255 /* Negate a doubleword integer with doubleword result.
256 Return nonzero if the operation overflows, assuming it's signed.
257 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
259 We use the 8-shorts representation internally. */
262 neg_double (l1, h1, lv, hv)
263 HOST_WIDE_INT l1, h1;
264 HOST_WIDE_INT *lv, *hv;
270 return (*hv & h1) < 0;
280 /* Multiply two doubleword integers with doubleword result.
281 Return nonzero if the operation overflows, assuming it's signed.
282 Each argument is given as two `HOST_WIDE_INT' pieces.
283 One argument is L1 and H1; the other, L2 and H2.
284 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
285 We use the 8-shorts representation internally. */
288 mul_double (l1, h1, l2, h2, lv, hv)
289 HOST_WIDE_INT l1, h1, l2, h2;
290 HOST_WIDE_INT *lv, *hv;
292 short arg1[MAX_SHORTS];
293 short arg2[MAX_SHORTS];
294 short prod[MAX_SHORTS * 2];
295 register int carry = 0;
296 register int i, j, k;
297 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
299 /* These cases are used extensively, arising from pointer combinations. */
304 int overflow = left_shift_overflows (h1, 1);
305 unsigned HOST_WIDE_INT temp = l1 + l1;
306 *hv = (h1 << 1) + (temp < l1);
312 int overflow = left_shift_overflows (h1, 2);
313 unsigned HOST_WIDE_INT temp = l1 + l1;
314 h1 = (h1 << 2) + ((temp < l1) << 1);
324 int overflow = left_shift_overflows (h1, 3);
325 unsigned HOST_WIDE_INT temp = l1 + l1;
326 h1 = (h1 << 3) + ((temp < l1) << 2);
329 h1 += (temp < l1) << 1;
339 encode (arg1, l1, h1);
340 encode (arg2, l2, h2);
342 bzero (prod, sizeof prod);
344 for (i = 0; i < MAX_SHORTS; i++)
345 for (j = 0; j < MAX_SHORTS; j++)
348 carry = arg1[i] * arg2[j];
352 prod[k] = carry & 0xff;
358 decode (prod, lv, hv); /* This ignores
359 prod[MAX_SHORTS] -> prod[MAX_SHORTS*2-1] */
361 /* Check for overflow by calculating the top half of the answer in full;
362 it should agree with the low half's sign bit. */
363 decode (prod+MAX_SHORTS, &toplow, &tophigh);
366 neg_double (l2, h2, &neglow, &neghigh);
367 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
371 neg_double (l1, h1, &neglow, &neghigh);
372 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
374 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
377 /* Shift the doubleword integer in L1, H1 left by COUNT places
378 keeping only PREC bits of result.
379 Shift right if COUNT is negative.
380 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
381 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
384 lshift_double (l1, h1, count, prec, lv, hv, arith)
385 HOST_WIDE_INT l1, h1, count;
387 HOST_WIDE_INT *lv, *hv;
390 short arg1[MAX_SHORTS];
396 rshift_double (l1, h1, - count, prec, lv, hv, arith);
400 encode (arg1, l1, h1);
408 for (i = 0; i < MAX_SHORTS; i++)
410 carry += arg1[i] << 1;
411 arg1[i] = carry & 0xff;
417 decode (arg1, lv, hv);
420 /* Shift the doubleword integer in L1, H1 right by COUNT places
421 keeping only PREC bits of result. COUNT must be positive.
422 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
423 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
426 rshift_double (l1, h1, count, prec, lv, hv, arith)
427 HOST_WIDE_INT l1, h1, count;
429 HOST_WIDE_INT *lv, *hv;
432 short arg1[MAX_SHORTS];
436 encode (arg1, l1, h1);
443 carry = arith && arg1[7] >> 7;
444 for (i = MAX_SHORTS - 1; i >= 0; i--)
448 arg1[i] = (carry >> 1) & 0xff;
453 decode (arg1, lv, hv);
456 /* Rotate the doubldword integer in L1, H1 left by COUNT places
457 keeping only PREC bits of result.
458 Rotate right if COUNT is negative.
459 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
462 lrotate_double (l1, h1, count, prec, lv, hv)
463 HOST_WIDE_INT l1, h1, count;
465 HOST_WIDE_INT *lv, *hv;
467 short arg1[MAX_SHORTS];
473 rrotate_double (l1, h1, - count, prec, lv, hv);
477 encode (arg1, l1, h1);
482 carry = arg1[MAX_SHORTS - 1] >> 7;
485 for (i = 0; i < MAX_SHORTS; i++)
487 carry += arg1[i] << 1;
488 arg1[i] = carry & 0xff;
494 decode (arg1, lv, hv);
497 /* Rotate the doubleword integer in L1, H1 left by COUNT places
498 keeping only PREC bits of result. COUNT must be positive.
499 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
502 rrotate_double (l1, h1, count, prec, lv, hv)
503 HOST_WIDE_INT l1, h1, count;
505 HOST_WIDE_INT *lv, *hv;
507 short arg1[MAX_SHORTS];
511 encode (arg1, l1, h1);
519 for (i = MAX_SHORTS - 1; i >= 0; i--)
523 arg1[i] = (carry >> 1) & 0xff;
528 decode (arg1, lv, hv);
531 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533 CODE is a tree code for a kind of division, one of
534 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
536 It controls how the quotient is rounded to a integer.
537 Return nonzero if the operation overflows.
538 UNS nonzero says do unsigned division. */
541 div_and_round_double (code, uns,
542 lnum_orig, hnum_orig, lden_orig, hden_orig,
543 lquo, hquo, lrem, hrem)
546 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
547 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
548 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
551 short num[MAX_SHORTS + 1]; /* extra element for scaling. */
552 short den[MAX_SHORTS], quo[MAX_SHORTS];
553 register int i, j, work;
554 register int carry = 0;
555 HOST_WIDE_INT lnum = lnum_orig;
556 HOST_WIDE_INT hnum = hnum_orig;
557 HOST_WIDE_INT lden = lden_orig;
558 HOST_WIDE_INT hden = hden_orig;
561 if ((hden == 0) && (lden == 0))
564 /* calculate quotient sign and convert operands to unsigned. */
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
577 neg_double (lden, hden, &lden, &hden);
581 if (hnum == 0 && hden == 0)
582 { /* single precision */
584 /* This unsigned division rounds toward zero. */
585 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
590 { /* trivial case: dividend < divisor */
591 /* hden != 0 already checked. */
598 bzero (quo, sizeof quo);
600 bzero (num, sizeof num); /* to zero 9th element */
601 bzero (den, sizeof den);
603 encode (num, lnum, hnum);
604 encode (den, lden, hden);
606 /* This code requires more than just hden == 0.
607 We also have to require that we don't need more than three bytes
608 to hold CARRY. If we ever did need four bytes to hold it, we
609 would lose part of it when computing WORK on the next round. */
610 if (hden == 0 && (((unsigned HOST_WIDE_INT) lden << 8) >> 8) == lden)
611 { /* simpler algorithm */
612 /* hnum != 0 already checked. */
613 for (i = MAX_SHORTS - 1; i >= 0; i--)
615 work = num[i] + (carry << 8);
616 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
617 carry = work % (unsigned HOST_WIDE_INT) lden;
620 else { /* full double precision,
621 with thanks to Don Knuth's
622 "Seminumerical Algorithms". */
624 int quo_est, scale, num_hi_sig, den_hi_sig, quo_hi_sig;
626 /* Find the highest non-zero divisor digit. */
627 for (i = MAX_SHORTS - 1; ; i--)
632 for (i = MAX_SHORTS - 1; ; i--)
637 quo_hi_sig = num_hi_sig - den_hi_sig + 1;
639 /* Insure that the first digit of the divisor is at least BASE/2.
640 This is required by the quotient digit estimation algorithm. */
642 scale = BASE / (den[den_hi_sig] + 1);
643 if (scale > 1) { /* scale divisor and dividend */
645 for (i = 0; i <= MAX_SHORTS - 1; i++) {
646 work = (num[i] * scale) + carry;
647 num[i] = work & 0xff;
649 if (num[i] != 0) num_hi_sig = i;
652 for (i = 0; i <= MAX_SHORTS - 1; i++) {
653 work = (den[i] * scale) + carry;
654 den[i] = work & 0xff;
656 if (den[i] != 0) den_hi_sig = i;
661 for (i = quo_hi_sig; i > 0; i--) {
662 /* guess the next quotient digit, quo_est, by dividing the first
663 two remaining dividend digits by the high order quotient digit.
664 quo_est is never low and is at most 2 high. */
666 int num_hi; /* index of highest remaining dividend digit */
668 num_hi = i + den_hi_sig;
670 work = (num[num_hi] * BASE) + (num_hi > 0 ? num[num_hi - 1] : 0);
671 if (num[num_hi] != den[den_hi_sig]) {
672 quo_est = work / den[den_hi_sig];
678 /* refine quo_est so it's usually correct, and at most one high. */
679 while ((den[den_hi_sig - 1] * quo_est)
680 > (((work - (quo_est * den[den_hi_sig])) * BASE)
681 + ((num_hi - 1) > 0 ? num[num_hi - 2] : 0)))
684 /* Try QUO_EST as the quotient digit, by multiplying the
685 divisor by QUO_EST and subtracting from the remaining dividend.
686 Keep in mind that QUO_EST is the I - 1st digit. */
690 for (j = 0; j <= den_hi_sig; j++)
694 work = num[i + j - 1] - (quo_est * den[j]) + carry;
702 num[i + j - 1] = digit;
705 /* if quo_est was high by one, then num[i] went negative and
706 we need to correct things. */
711 carry = 0; /* add divisor back in */
712 for (j = 0; j <= den_hi_sig; j++)
714 work = num[i + j - 1] + den[j] + carry;
724 num[i + j - 1] = work;
726 num [num_hi] += carry;
729 /* store the quotient digit. */
730 quo[i - 1] = quo_est;
734 decode (quo, lquo, hquo);
737 /* if result is negative, make it so. */
739 neg_double (*lquo, *hquo, lquo, hquo);
741 /* compute trial remainder: rem = num - (quo * den) */
742 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
743 neg_double (*lrem, *hrem, lrem, hrem);
744 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
749 case TRUNC_MOD_EXPR: /* round toward zero */
750 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
754 case FLOOR_MOD_EXPR: /* round toward negative infinity */
755 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
758 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
761 else return overflow;
765 case CEIL_MOD_EXPR: /* round toward positive infinity */
766 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
768 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
771 else return overflow;
775 case ROUND_MOD_EXPR: /* round to closest integer */
777 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
778 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
780 /* get absolute values */
781 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
782 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
784 /* if (2 * abs (lrem) >= abs (lden)) */
785 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
786 labs_rem, habs_rem, <wice, &htwice);
787 if (((unsigned HOST_WIDE_INT) habs_den
788 < (unsigned HOST_WIDE_INT) htwice)
789 || (((unsigned HOST_WIDE_INT) habs_den
790 == (unsigned HOST_WIDE_INT) htwice)
791 && ((HOST_WIDE_INT unsigned) labs_den
792 < (unsigned HOST_WIDE_INT) ltwice)))
796 add_double (*lquo, *hquo,
797 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
800 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
803 else return overflow;
811 /* compute true remainder: rem = num - (quo * den) */
812 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
813 neg_double (*lrem, *hrem, lrem, hrem);
814 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
818 #ifndef REAL_ARITHMETIC
819 /* Effectively truncate a real value to represent the nearest possible value
820 in a narrower mode. The result is actually represented in the same data
821 type as the argument, but its value is usually different.
823 A trap may occur during the FP operations and it is the responsibility
824 of the calling function to have a handler established. */
827 real_value_truncate (mode, arg)
828 enum machine_mode mode;
831 return REAL_VALUE_TRUNCATE (mode, arg);
834 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
836 /* Check for infinity in an IEEE double precision number. */
842 /* The IEEE 64-bit double format. */
847 unsigned exponent : 11;
848 unsigned mantissa1 : 20;
853 unsigned mantissa1 : 20;
854 unsigned exponent : 11;
860 if (u.big_endian.sign == 1)
863 return (u.big_endian.exponent == 2047
864 && u.big_endian.mantissa1 == 0
865 && u.big_endian.mantissa2 == 0);
870 return (u.little_endian.exponent == 2047
871 && u.little_endian.mantissa1 == 0
872 && u.little_endian.mantissa2 == 0);
876 /* Check whether an IEEE double precision number is a NaN. */
882 /* The IEEE 64-bit double format. */
887 unsigned exponent : 11;
888 unsigned mantissa1 : 20;
893 unsigned mantissa1 : 20;
894 unsigned exponent : 11;
900 if (u.big_endian.sign == 1)
903 return (u.big_endian.exponent == 2047
904 && (u.big_endian.mantissa1 != 0
905 || u.big_endian.mantissa2 != 0));
910 return (u.little_endian.exponent == 2047
911 && (u.little_endian.mantissa1 != 0
912 || u.little_endian.mantissa2 != 0));
916 /* Check for a negative IEEE double precision number. */
922 /* The IEEE 64-bit double format. */
927 unsigned exponent : 11;
928 unsigned mantissa1 : 20;
933 unsigned mantissa1 : 20;
934 unsigned exponent : 11;
940 if (u.big_endian.sign == 1)
943 return u.big_endian.sign;
948 return u.little_endian.sign;
951 #else /* Target not IEEE */
953 /* Let's assume other float formats don't have infinity.
954 (This can be overridden by redefining REAL_VALUE_ISINF.) */
962 /* Let's assume other float formats don't have NaNs.
963 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
971 /* Let's assume other float formats don't have minus zero.
972 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
979 #endif /* Target not IEEE */
980 #endif /* no REAL_ARITHMETIC */
982 /* Split a tree IN into a constant and a variable part
983 that could be combined with CODE to make IN.
984 CODE must be a commutative arithmetic operation.
985 Store the constant part into *CONP and the variable in &VARP.
986 Return 1 if this was done; zero means the tree IN did not decompose
989 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
990 Therefore, we must tell the caller whether the variable part
991 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
992 The value stored is the coefficient for the variable term.
993 The constant term we return should always be added;
994 we negate it if necessary. */
997 split_tree (in, code, varp, conp, varsignp)
1003 register tree outtype = TREE_TYPE (in);
1007 /* Strip any conversions that don't change the machine mode. */
1008 while ((TREE_CODE (in) == NOP_EXPR
1009 || TREE_CODE (in) == CONVERT_EXPR)
1010 && (TYPE_MODE (TREE_TYPE (in))
1011 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
1012 in = TREE_OPERAND (in, 0);
1014 if (TREE_CODE (in) == code
1015 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1016 /* We can associate addition and subtraction together
1017 (even though the C standard doesn't say so)
1018 for integers because the value is not affected.
1019 For reals, the value might be affected, so we can't. */
1020 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1021 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1023 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1024 if (code == INTEGER_CST)
1026 *conp = TREE_OPERAND (in, 0);
1027 *varp = TREE_OPERAND (in, 1);
1028 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1029 && TREE_TYPE (*varp) != outtype)
1030 *varp = convert (outtype, *varp);
1031 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1034 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1036 *conp = TREE_OPERAND (in, 1);
1037 *varp = TREE_OPERAND (in, 0);
1039 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1040 && TREE_TYPE (*varp) != outtype)
1041 *varp = convert (outtype, *varp);
1042 if (TREE_CODE (in) == MINUS_EXPR)
1044 /* If operation is subtraction and constant is second,
1045 must negate it to get an additive constant.
1046 And this cannot be done unless it is a manifest constant.
1047 It could also be the address of a static variable.
1048 We cannot negate that, so give up. */
1049 if (TREE_CODE (*conp) == INTEGER_CST)
1050 /* Subtracting from integer_zero_node loses for long long. */
1051 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1057 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1059 *conp = TREE_OPERAND (in, 0);
1060 *varp = TREE_OPERAND (in, 1);
1061 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1062 && TREE_TYPE (*varp) != outtype)
1063 *varp = convert (outtype, *varp);
1064 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1071 /* Combine two constants NUM and ARG2 under operation CODE
1072 to produce a new constant.
1073 We assume ARG1 and ARG2 have the same data type,
1074 or at least are the same kind of constant and the same machine mode.
1076 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1079 const_binop (code, arg1, arg2, notrunc)
1080 enum tree_code code;
1081 register tree arg1, arg2;
1084 if (TREE_CODE (arg1) == INTEGER_CST)
1086 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
1087 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
1088 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
1089 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
1090 HOST_WIDE_INT low, hi;
1091 HOST_WIDE_INT garbagel, garbageh;
1093 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1099 t = build_int_2 (int1l | int2l, int1h | int2h);
1103 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
1107 t = build_int_2 (int1l & int2l, int1h & int2h);
1110 case BIT_ANDTC_EXPR:
1111 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
1117 /* It's unclear from the C standard whether shifts can overflow.
1118 The following code ignores overflow; perhaps a C standard
1119 interpretation ruling is needed. */
1120 lshift_double (int1l, int1h, int2l,
1121 TYPE_PRECISION (TREE_TYPE (arg1)),
1124 t = build_int_2 (low, hi);
1125 TREE_TYPE (t) = TREE_TYPE (arg1);
1127 force_fit_type (t, 0);
1128 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1129 TREE_CONSTANT_OVERFLOW (t)
1130 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1136 lrotate_double (int1l, int1h, int2l,
1137 TYPE_PRECISION (TREE_TYPE (arg1)),
1139 t = build_int_2 (low, hi);
1146 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1149 overflow = int2h < hi;
1151 t = build_int_2 (int2l, int2h);
1157 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1160 overflow = int1h < hi;
1162 t = build_int_2 (int1l, int1h);
1165 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1166 t = build_int_2 (low, hi);
1170 if (int2h == 0 && int2l == 0)
1172 t = build_int_2 (int1l, int1h);
1175 neg_double (int2l, int2h, &low, &hi);
1176 add_double (int1l, int1h, low, hi, &low, &hi);
1177 overflow = overflow_sum_sign (hi, int2h, int1h);
1178 t = build_int_2 (low, hi);
1182 /* Optimize simple cases. */
1185 unsigned HOST_WIDE_INT temp;
1190 t = build_int_2 (0, 0);
1193 t = build_int_2 (int2l, int2h);
1196 overflow = left_shift_overflows (int2h, 1);
1197 temp = int2l + int2l;
1198 int2h = (int2h << 1) + (temp < int2l);
1199 t = build_int_2 (temp, int2h);
1201 #if 0 /* This code can lose carries. */
1203 temp = int2l + int2l + int2l;
1204 int2h = int2h * 3 + (temp < int2l);
1205 t = build_int_2 (temp, int2h);
1209 overflow = left_shift_overflows (int2h, 2);
1210 temp = int2l + int2l;
1211 int2h = (int2h << 2) + ((temp < int2l) << 1);
1214 int2h += (temp < int2l);
1215 t = build_int_2 (temp, int2h);
1218 overflow = left_shift_overflows (int2h, 3);
1219 temp = int2l + int2l;
1220 int2h = (int2h << 3) + ((temp < int2l) << 2);
1223 int2h += (temp < int2l) << 1;
1226 int2h += (temp < int2l);
1227 t = build_int_2 (temp, int2h);
1238 t = build_int_2 (0, 0);
1243 t = build_int_2 (int1l, int1h);
1248 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1249 t = build_int_2 (low, hi);
1252 case TRUNC_DIV_EXPR:
1253 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1254 case EXACT_DIV_EXPR:
1255 /* This is a shortcut for a common special case.
1256 It reduces the number of tree nodes generated
1258 if (int2h == 0 && int2l > 0
1259 && TREE_TYPE (arg1) == sizetype
1260 && int1h == 0 && int1l >= 0)
1262 if (code == CEIL_DIV_EXPR)
1264 return size_int (int1l / int2l);
1266 case ROUND_DIV_EXPR:
1267 if (int2h == 0 && int2l == 1)
1269 t = build_int_2 (int1l, int1h);
1272 if (int1l == int2l && int1h == int2h)
1274 if ((int1l | int1h) == 0)
1276 t = build_int_2 (1, 0);
1279 overflow = div_and_round_double (code, uns,
1280 int1l, int1h, int2l, int2h,
1281 &low, &hi, &garbagel, &garbageh);
1282 t = build_int_2 (low, hi);
1285 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1286 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1287 overflow = div_and_round_double (code, uns,
1288 int1l, int1h, int2l, int2h,
1289 &garbagel, &garbageh, &low, &hi);
1290 t = build_int_2 (low, hi);
1297 low = (((unsigned HOST_WIDE_INT) int1h
1298 < (unsigned HOST_WIDE_INT) int2h)
1299 || (((unsigned HOST_WIDE_INT) int1h
1300 == (unsigned HOST_WIDE_INT) int2h)
1301 && ((unsigned HOST_WIDE_INT) int1l
1302 < (unsigned HOST_WIDE_INT) int2l)));
1306 low = ((int1h < int2h)
1307 || ((int1h == int2h)
1308 && ((unsigned HOST_WIDE_INT) int1l
1309 < (unsigned HOST_WIDE_INT) int2l)));
1311 if (low == (code == MIN_EXPR))
1312 t = build_int_2 (int1l, int1h);
1314 t = build_int_2 (int2l, int2h);
1321 TREE_TYPE (t) = TREE_TYPE (arg1);
1323 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow))
1324 | TREE_OVERFLOW (arg1)
1325 | TREE_OVERFLOW (arg2));
1326 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1327 | TREE_CONSTANT_OVERFLOW (arg1)
1328 | TREE_CONSTANT_OVERFLOW (arg2));
1331 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1332 if (TREE_CODE (arg1) == REAL_CST)
1337 REAL_VALUE_TYPE value;
1340 d1 = TREE_REAL_CST (arg1);
1341 d2 = TREE_REAL_CST (arg2);
1343 /* If either operand is a NaN, just return it. Otherwise, set up
1344 for floating-point trap; we return an overflow. */
1345 if (REAL_VALUE_ISNAN (d1))
1347 else if (REAL_VALUE_ISNAN (d2))
1349 else if (setjmp (float_error))
1351 t = copy_node (arg1);
1356 set_float_handler (float_error);
1358 #ifdef REAL_ARITHMETIC
1359 REAL_ARITHMETIC (value, code, d1, d2);
1376 #ifndef REAL_INFINITY
1385 value = MIN (d1, d2);
1389 value = MAX (d1, d2);
1395 #endif /* no REAL_ARITHMETIC */
1396 t = build_real (TREE_TYPE (arg1),
1397 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1399 set_float_handler (NULL_PTR);
1402 = (force_fit_type (t, overflow)
1403 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1404 TREE_CONSTANT_OVERFLOW (t)
1406 | TREE_CONSTANT_OVERFLOW (arg1)
1407 | TREE_CONSTANT_OVERFLOW (arg2);
1410 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1411 if (TREE_CODE (arg1) == COMPLEX_CST)
1413 register tree r1 = TREE_REALPART (arg1);
1414 register tree i1 = TREE_IMAGPART (arg1);
1415 register tree r2 = TREE_REALPART (arg2);
1416 register tree i2 = TREE_IMAGPART (arg2);
1422 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1423 const_binop (PLUS_EXPR, i1, i2, notrunc));
1427 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1428 const_binop (MINUS_EXPR, i1, i2, notrunc));
1432 t = build_complex (const_binop (MINUS_EXPR,
1433 const_binop (MULT_EXPR,
1435 const_binop (MULT_EXPR,
1438 const_binop (PLUS_EXPR,
1439 const_binop (MULT_EXPR,
1441 const_binop (MULT_EXPR,
1448 register tree magsquared
1449 = const_binop (PLUS_EXPR,
1450 const_binop (MULT_EXPR, r2, r2, notrunc),
1451 const_binop (MULT_EXPR, i2, i2, notrunc),
1455 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1456 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1457 const_binop (PLUS_EXPR,
1458 const_binop (MULT_EXPR, r1, r2,
1460 const_binop (MULT_EXPR, i1, i2,
1463 magsquared, notrunc),
1464 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1465 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1466 const_binop (MINUS_EXPR,
1467 const_binop (MULT_EXPR, i1, r2,
1469 const_binop (MULT_EXPR, r1, i2,
1472 magsquared, notrunc));
1479 TREE_TYPE (t) = TREE_TYPE (arg1);
1485 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1489 unsigned int number;
1492 /* Type-size nodes already made for small sizes. */
1493 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1495 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1496 && size_table[number] != 0)
1497 return size_table[number];
1498 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1500 push_obstacks_nochange ();
1501 /* Make this a permanent node. */
1502 end_temporary_allocation ();
1503 t = build_int_2 (number, 0);
1504 TREE_TYPE (t) = sizetype;
1505 size_table[number] = t;
1510 t = build_int_2 (number, 0);
1511 TREE_TYPE (t) = sizetype;
1516 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1517 CODE is a tree code. Data type is taken from `sizetype',
1518 If the operands are constant, so is the result. */
1521 size_binop (code, arg0, arg1)
1522 enum tree_code code;
1525 /* Handle the special case of two integer constants faster. */
1526 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1528 /* And some specific cases even faster than that. */
1529 if (code == PLUS_EXPR
1530 && TREE_INT_CST_LOW (arg0) == 0
1531 && TREE_INT_CST_HIGH (arg0) == 0)
1533 if (code == MINUS_EXPR
1534 && TREE_INT_CST_LOW (arg1) == 0
1535 && TREE_INT_CST_HIGH (arg1) == 0)
1537 if (code == MULT_EXPR
1538 && TREE_INT_CST_LOW (arg0) == 1
1539 && TREE_INT_CST_HIGH (arg0) == 0)
1541 /* Handle general case of two integer constants. */
1542 return const_binop (code, arg0, arg1, 1);
1545 if (arg0 == error_mark_node || arg1 == error_mark_node)
1546 return error_mark_node;
1548 return fold (build (code, sizetype, arg0, arg1));
1551 /* Given T, a tree representing type conversion of ARG1, a constant,
1552 return a constant tree representing the result of conversion. */
1555 fold_convert (t, arg1)
1559 register tree type = TREE_TYPE (t);
1562 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1564 if (TREE_CODE (arg1) == INTEGER_CST)
1566 /* Given an integer constant, make new constant with new type,
1567 appropriately sign-extended or truncated. */
1568 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1569 TREE_INT_CST_HIGH (arg1));
1570 TREE_TYPE (t) = type;
1571 /* Indicate an overflow if (1) ARG1 already overflowed,
1572 or (2) force_fit_type indicates an overflow.
1573 Tell force_fit_type that an overflow has already occurred
1574 if ARG1 is a too-large unsigned value and T is signed. */
1576 = (TREE_OVERFLOW (arg1)
1577 | force_fit_type (t,
1578 (TREE_INT_CST_HIGH (arg1) < 0
1579 & (TREE_UNSIGNED (type)
1580 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1581 TREE_CONSTANT_OVERFLOW (t)
1582 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1584 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1585 else if (TREE_CODE (arg1) == REAL_CST)
1587 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1588 REAL_VALUE_TYPE l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
1589 REAL_VALUE_TYPE u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1591 /* See if X will be in range after truncation towards 0.
1592 To compensate for truncation, move the bounds away from 0,
1593 but reject if X exactly equals the adjusted bounds. */
1594 #ifdef REAL_ARITHMETIC
1595 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1596 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1601 /* If X is a NaN, use zero instead and show we have an overflow.
1602 Otherwise, range check. */
1603 if (REAL_VALUE_ISNAN (x))
1604 overflow = 1, x = dconst0;
1605 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1608 #ifndef REAL_ARITHMETIC
1610 HOST_WIDE_INT low, high;
1611 HOST_WIDE_INT half_word
1612 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1617 high = (HOST_WIDE_INT) (x / half_word / half_word);
1618 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1619 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1621 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1622 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1625 low = (HOST_WIDE_INT) x;
1626 if (TREE_REAL_CST (arg1) < 0)
1627 neg_double (low, high, &low, &high);
1628 t = build_int_2 (low, high);
1632 HOST_WIDE_INT low, high;
1633 REAL_VALUE_TO_INT (&low, &high, x);
1634 t = build_int_2 (low, high);
1637 TREE_TYPE (t) = type;
1639 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1640 TREE_CONSTANT_OVERFLOW (t)
1641 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1643 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1644 TREE_TYPE (t) = type;
1646 else if (TREE_CODE (type) == REAL_TYPE)
1648 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1649 if (TREE_CODE (arg1) == INTEGER_CST)
1650 return build_real_from_int_cst (type, arg1);
1651 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1652 if (TREE_CODE (arg1) == REAL_CST)
1654 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1656 else if (setjmp (float_error))
1659 t = copy_node (arg1);
1662 set_float_handler (float_error);
1664 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1665 TREE_REAL_CST (arg1)));
1666 set_float_handler (NULL_PTR);
1670 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1671 TREE_CONSTANT_OVERFLOW (t)
1672 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1676 TREE_CONSTANT (t) = 1;
1680 /* Return an expr equal to X but certainly not valid as an lvalue.
1681 Also make sure it is not valid as an null pointer constant. */
1689 /* These things are certainly not lvalues. */
1690 if (TREE_CODE (x) == NON_LVALUE_EXPR
1691 || TREE_CODE (x) == INTEGER_CST
1692 || TREE_CODE (x) == REAL_CST
1693 || TREE_CODE (x) == STRING_CST
1694 || TREE_CODE (x) == ADDR_EXPR)
1696 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1698 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1699 so convert_for_assignment won't strip it.
1700 This is so this 0 won't be treated as a null pointer constant. */
1701 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1702 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1708 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1709 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1713 /* When pedantic, return an expr equal to X but certainly not valid as a
1714 pedantic lvalue. Otherwise, return X. */
1717 pedantic_non_lvalue (x)
1721 return non_lvalue (x);
1726 /* Given a tree comparison code, return the code that is the logical inverse
1727 of the given code. It is not safe to do this for floating-point
1728 comparisons, except for NE_EXPR and EQ_EXPR. */
1730 static enum tree_code
1731 invert_tree_comparison (code)
1732 enum tree_code code;
1753 /* Similar, but return the comparison that results if the operands are
1754 swapped. This is safe for floating-point. */
1756 static enum tree_code
1757 swap_tree_comparison (code)
1758 enum tree_code code;
1778 /* Return nonzero if CODE is a tree code that represents a truth value. */
1781 truth_value_p (code)
1782 enum tree_code code;
1784 return (TREE_CODE_CLASS (code) == '<'
1785 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1786 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1787 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1790 /* Return nonzero if two operands are necessarily equal.
1791 If ONLY_CONST is non-zero, only return non-zero for constants.
1792 This function tests whether the operands are indistinguishable;
1793 it does not test whether they are equal using C's == operation.
1794 The distinction is important for IEEE floating point, because
1795 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1796 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1799 operand_equal_p (arg0, arg1, only_const)
1803 /* If both types don't have the same signedness, then we can't consider
1804 them equal. We must check this before the STRIP_NOPS calls
1805 because they may change the signedness of the arguments. */
1806 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1812 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1813 We don't care about side effects in that case because the SAVE_EXPR
1814 takes care of that for us. */
1815 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1816 return ! only_const;
1818 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1821 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1822 && TREE_CODE (arg0) == ADDR_EXPR
1823 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1826 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1827 && TREE_CODE (arg0) == INTEGER_CST
1828 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1829 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1832 /* Detect when real constants are equal. */
1833 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1834 && TREE_CODE (arg0) == REAL_CST)
1835 return !bcmp (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1),
1836 sizeof (REAL_VALUE_TYPE));
1844 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1846 /* This is needed for conversions and for COMPONENT_REF.
1847 Might as well play it safe and always test this. */
1848 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1851 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1854 /* Two conversions are equal only if signedness and modes match. */
1855 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1856 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1857 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1860 return operand_equal_p (TREE_OPERAND (arg0, 0),
1861 TREE_OPERAND (arg1, 0), 0);
1865 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1866 TREE_OPERAND (arg1, 0), 0)
1867 && operand_equal_p (TREE_OPERAND (arg0, 1),
1868 TREE_OPERAND (arg1, 1), 0));
1871 switch (TREE_CODE (arg0))
1874 return operand_equal_p (TREE_OPERAND (arg0, 0),
1875 TREE_OPERAND (arg1, 0), 0);
1879 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1880 TREE_OPERAND (arg1, 0), 0)
1881 && operand_equal_p (TREE_OPERAND (arg0, 1),
1882 TREE_OPERAND (arg1, 1), 0));
1885 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1886 TREE_OPERAND (arg1, 0), 0)
1887 && operand_equal_p (TREE_OPERAND (arg0, 1),
1888 TREE_OPERAND (arg1, 1), 0)
1889 && operand_equal_p (TREE_OPERAND (arg0, 2),
1890 TREE_OPERAND (arg1, 2), 0));
1898 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1899 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1901 When in doubt, return 0. */
1904 operand_equal_for_comparison_p (arg0, arg1, other)
1908 int unsignedp1, unsignedpo;
1909 tree primarg1, primother;
1910 unsigned correct_width;
1912 if (operand_equal_p (arg0, arg1, 0))
1915 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
1918 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1919 actual comparison operand, ARG0.
1921 First throw away any conversions to wider types
1922 already present in the operands. */
1924 primarg1 = get_narrower (arg1, &unsignedp1);
1925 primother = get_narrower (other, &unsignedpo);
1927 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1928 if (unsignedp1 == unsignedpo
1929 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1930 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1932 tree type = TREE_TYPE (arg0);
1934 /* Make sure shorter operand is extended the right way
1935 to match the longer operand. */
1936 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1937 TREE_TYPE (primarg1)),
1940 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1947 /* See if ARG is an expression that is either a comparison or is performing
1948 arithmetic on comparisons. The comparisons must only be comparing
1949 two different values, which will be stored in *CVAL1 and *CVAL2; if
1950 they are non-zero it means that some operands have already been found.
1951 No variables may be used anywhere else in the expression except in the
1952 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1953 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1955 If this is true, return 1. Otherwise, return zero. */
1958 twoval_comparison_p (arg, cval1, cval2, save_p)
1960 tree *cval1, *cval2;
1963 enum tree_code code = TREE_CODE (arg);
1964 char class = TREE_CODE_CLASS (code);
1966 /* We can handle some of the 'e' cases here. */
1967 if (class == 'e' && code == TRUTH_NOT_EXPR)
1969 else if (class == 'e'
1970 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1971 || code == COMPOUND_EXPR))
1974 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1975 the expression. There may be no way to make this work, but it needs
1976 to be looked at again for 2.6. */
1978 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1980 /* If we've already found a CVAL1 or CVAL2, this expression is
1981 two complex to handle. */
1982 if (*cval1 || *cval2)
1993 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1996 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1997 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1998 cval1, cval2, save_p));
2004 if (code == COND_EXPR)
2005 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2006 cval1, cval2, save_p)
2007 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2008 cval1, cval2, save_p)
2009 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2010 cval1, cval2, save_p));
2014 /* First see if we can handle the first operand, then the second. For
2015 the second operand, we know *CVAL1 can't be zero. It must be that
2016 one side of the comparison is each of the values; test for the
2017 case where this isn't true by failing if the two operands
2020 if (operand_equal_p (TREE_OPERAND (arg, 0),
2021 TREE_OPERAND (arg, 1), 0))
2025 *cval1 = TREE_OPERAND (arg, 0);
2026 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2028 else if (*cval2 == 0)
2029 *cval2 = TREE_OPERAND (arg, 0);
2030 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2035 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2037 else if (*cval2 == 0)
2038 *cval2 = TREE_OPERAND (arg, 1);
2039 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2050 /* ARG is a tree that is known to contain just arithmetic operations and
2051 comparisons. Evaluate the operations in the tree substituting NEW0 for
2052 any occurrence of OLD0 as an operand of a comparison and likewise for
2056 eval_subst (arg, old0, new0, old1, new1)
2058 tree old0, new0, old1, new1;
2060 tree type = TREE_TYPE (arg);
2061 enum tree_code code = TREE_CODE (arg);
2062 char class = TREE_CODE_CLASS (code);
2064 /* We can handle some of the 'e' cases here. */
2065 if (class == 'e' && code == TRUTH_NOT_EXPR)
2067 else if (class == 'e'
2068 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2074 return fold (build1 (code, type,
2075 eval_subst (TREE_OPERAND (arg, 0),
2076 old0, new0, old1, new1)));
2079 return fold (build (code, type,
2080 eval_subst (TREE_OPERAND (arg, 0),
2081 old0, new0, old1, new1),
2082 eval_subst (TREE_OPERAND (arg, 1),
2083 old0, new0, old1, new1)));
2089 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2092 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2095 return fold (build (code, type,
2096 eval_subst (TREE_OPERAND (arg, 0),
2097 old0, new0, old1, new1),
2098 eval_subst (TREE_OPERAND (arg, 1),
2099 old0, new0, old1, new1),
2100 eval_subst (TREE_OPERAND (arg, 2),
2101 old0, new0, old1, new1)));
2106 tree arg0 = TREE_OPERAND (arg, 0);
2107 tree arg1 = TREE_OPERAND (arg, 1);
2109 /* We need to check both for exact equality and tree equality. The
2110 former will be true if the operand has a side-effect. In that
2111 case, we know the operand occurred exactly once. */
2113 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2115 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2118 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2120 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2123 return fold (build (code, type, arg0, arg1));
2130 /* Return a tree for the case when the result of an expression is RESULT
2131 converted to TYPE and OMITTED was previously an operand of the expression
2132 but is now not needed (e.g., we folded OMITTED * 0).
2134 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2135 the conversion of RESULT to TYPE. */
2138 omit_one_operand (type, result, omitted)
2139 tree type, result, omitted;
2141 tree t = convert (type, result);
2143 if (TREE_SIDE_EFFECTS (omitted))
2144 return build (COMPOUND_EXPR, type, omitted, t);
2146 return non_lvalue (t);
2149 /* Return a simplified tree node for the truth-negation of ARG. This
2150 never alters ARG itself. We assume that ARG is an operation that
2151 returns a truth value (0 or 1). */
2154 invert_truthvalue (arg)
2157 tree type = TREE_TYPE (arg);
2158 enum tree_code code = TREE_CODE (arg);
2160 if (code == ERROR_MARK)
2163 /* If this is a comparison, we can simply invert it, except for
2164 floating-point non-equality comparisons, in which case we just
2165 enclose a TRUTH_NOT_EXPR around what we have. */
2167 if (TREE_CODE_CLASS (code) == '<')
2169 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2170 && code != NE_EXPR && code != EQ_EXPR)
2171 return build1 (TRUTH_NOT_EXPR, type, arg);
2173 return build (invert_tree_comparison (code), type,
2174 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2180 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2181 && TREE_INT_CST_HIGH (arg) == 0, 0));
2183 case TRUTH_AND_EXPR:
2184 return build (TRUTH_OR_EXPR, type,
2185 invert_truthvalue (TREE_OPERAND (arg, 0)),
2186 invert_truthvalue (TREE_OPERAND (arg, 1)));
2189 return build (TRUTH_AND_EXPR, type,
2190 invert_truthvalue (TREE_OPERAND (arg, 0)),
2191 invert_truthvalue (TREE_OPERAND (arg, 1)));
2193 case TRUTH_XOR_EXPR:
2194 /* Here we can invert either operand. We invert the first operand
2195 unless the second operand is a TRUTH_NOT_EXPR in which case our
2196 result is the XOR of the first operand with the inside of the
2197 negation of the second operand. */
2199 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2200 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2201 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2203 return build (TRUTH_XOR_EXPR, type,
2204 invert_truthvalue (TREE_OPERAND (arg, 0)),
2205 TREE_OPERAND (arg, 1));
2207 case TRUTH_ANDIF_EXPR:
2208 return build (TRUTH_ORIF_EXPR, type,
2209 invert_truthvalue (TREE_OPERAND (arg, 0)),
2210 invert_truthvalue (TREE_OPERAND (arg, 1)));
2212 case TRUTH_ORIF_EXPR:
2213 return build (TRUTH_ANDIF_EXPR, type,
2214 invert_truthvalue (TREE_OPERAND (arg, 0)),
2215 invert_truthvalue (TREE_OPERAND (arg, 1)));
2217 case TRUTH_NOT_EXPR:
2218 return TREE_OPERAND (arg, 0);
2221 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2222 invert_truthvalue (TREE_OPERAND (arg, 1)),
2223 invert_truthvalue (TREE_OPERAND (arg, 2)));
2226 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2227 invert_truthvalue (TREE_OPERAND (arg, 1)));
2229 case NON_LVALUE_EXPR:
2230 return invert_truthvalue (TREE_OPERAND (arg, 0));
2235 return build1 (TREE_CODE (arg), type,
2236 invert_truthvalue (TREE_OPERAND (arg, 0)));
2239 if (!integer_onep (TREE_OPERAND (arg, 1)))
2241 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2244 return build1 (TRUTH_NOT_EXPR, type, arg);
2246 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2248 return build1 (TRUTH_NOT_EXPR, type, arg);
2251 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2252 operands are another bit-wise operation with a common input. If so,
2253 distribute the bit operations to save an operation and possibly two if
2254 constants are involved. For example, convert
2255 (A | B) & (A | C) into A | (B & C)
2256 Further simplification will occur if B and C are constants.
2258 If this optimization cannot be done, 0 will be returned. */
2261 distribute_bit_expr (code, type, arg0, arg1)
2262 enum tree_code code;
2269 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2270 || TREE_CODE (arg0) == code
2271 || (TREE_CODE (arg0) != BIT_AND_EXPR
2272 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2275 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2277 common = TREE_OPERAND (arg0, 0);
2278 left = TREE_OPERAND (arg0, 1);
2279 right = TREE_OPERAND (arg1, 1);
2281 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2283 common = TREE_OPERAND (arg0, 0);
2284 left = TREE_OPERAND (arg0, 1);
2285 right = TREE_OPERAND (arg1, 0);
2287 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2289 common = TREE_OPERAND (arg0, 1);
2290 left = TREE_OPERAND (arg0, 0);
2291 right = TREE_OPERAND (arg1, 1);
2293 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2295 common = TREE_OPERAND (arg0, 1);
2296 left = TREE_OPERAND (arg0, 0);
2297 right = TREE_OPERAND (arg1, 0);
2302 return fold (build (TREE_CODE (arg0), type, common,
2303 fold (build (code, type, left, right))));
2306 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2307 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2310 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2313 int bitsize, bitpos;
2316 tree result = build (BIT_FIELD_REF, type, inner,
2317 size_int (bitsize), size_int (bitpos));
2319 TREE_UNSIGNED (result) = unsignedp;
2324 /* Optimize a bit-field compare.
2326 There are two cases: First is a compare against a constant and the
2327 second is a comparison of two items where the fields are at the same
2328 bit position relative to the start of a chunk (byte, halfword, word)
2329 large enough to contain it. In these cases we can avoid the shift
2330 implicit in bitfield extractions.
2332 For constants, we emit a compare of the shifted constant with the
2333 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2334 compared. For two fields at the same position, we do the ANDs with the
2335 similar mask and compare the result of the ANDs.
2337 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2338 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2339 are the left and right operands of the comparison, respectively.
2341 If the optimization described above can be done, we return the resulting
2342 tree. Otherwise we return zero. */
2345 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2346 enum tree_code code;
2350 int lbitpos, lbitsize, rbitpos, rbitsize;
2351 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2352 tree type = TREE_TYPE (lhs);
2353 tree signed_type, unsigned_type;
2354 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2355 enum machine_mode lmode, rmode, lnmode, rnmode;
2356 int lunsignedp, runsignedp;
2357 int lvolatilep = 0, rvolatilep = 0;
2358 tree linner, rinner;
2362 /* Get all the information about the extractions being done. If the bit size
2363 if the same as the size of the underlying object, we aren't doing an
2364 extraction at all and so can do nothing. */
2365 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2366 &lunsignedp, &lvolatilep);
2367 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2373 /* If this is not a constant, we can only do something if bit positions,
2374 sizes, and signedness are the same. */
2375 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2376 &rmode, &runsignedp, &rvolatilep);
2378 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2379 || lunsignedp != runsignedp || offset != 0)
2383 /* See if we can find a mode to refer to this field. We should be able to,
2384 but fail if we can't. */
2385 lnmode = get_best_mode (lbitsize, lbitpos,
2386 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2388 if (lnmode == VOIDmode)
2391 /* Set signed and unsigned types of the precision of this mode for the
2393 signed_type = type_for_mode (lnmode, 0);
2394 unsigned_type = type_for_mode (lnmode, 1);
2398 rnmode = get_best_mode (rbitsize, rbitpos,
2399 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2401 if (rnmode == VOIDmode)
2405 /* Compute the bit position and size for the new reference and our offset
2406 within it. If the new reference is the same size as the original, we
2407 won't optimize anything, so return zero. */
2408 lnbitsize = GET_MODE_BITSIZE (lnmode);
2409 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2410 lbitpos -= lnbitpos;
2411 if (lnbitsize == lbitsize)
2416 rnbitsize = GET_MODE_BITSIZE (rnmode);
2417 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2418 rbitpos -= rnbitpos;
2419 if (rnbitsize == rbitsize)
2423 #if BYTES_BIG_ENDIAN
2424 lbitpos = lnbitsize - lbitsize - lbitpos;
2427 /* Make the mask to be used against the extracted field. */
2428 mask = build_int_2 (~0, ~0);
2429 TREE_TYPE (mask) = unsigned_type;
2430 force_fit_type (mask, 0);
2431 mask = convert (unsigned_type, mask);
2432 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2433 mask = const_binop (RSHIFT_EXPR, mask,
2434 size_int (lnbitsize - lbitsize - lbitpos), 0);
2437 /* If not comparing with constant, just rework the comparison
2439 return build (code, compare_type,
2440 build (BIT_AND_EXPR, unsigned_type,
2441 make_bit_field_ref (linner, unsigned_type,
2442 lnbitsize, lnbitpos, 1),
2444 build (BIT_AND_EXPR, unsigned_type,
2445 make_bit_field_ref (rinner, unsigned_type,
2446 rnbitsize, rnbitpos, 1),
2449 /* Otherwise, we are handling the constant case. See if the constant is too
2450 big for the field. Warn and return a tree of for 0 (false) if so. We do
2451 this not only for its own sake, but to avoid having to test for this
2452 error case below. If we didn't, we might generate wrong code.
2454 For unsigned fields, the constant shifted right by the field length should
2455 be all zero. For signed fields, the high-order bits should agree with
2460 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2461 convert (unsigned_type, rhs),
2462 size_int (lbitsize), 0)))
2464 warning ("comparison is always %s due to width of bitfield",
2465 code == NE_EXPR ? "one" : "zero");
2466 return convert (compare_type,
2468 ? integer_one_node : integer_zero_node));
2473 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2474 size_int (lbitsize - 1), 0);
2475 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2477 warning ("comparison is always %s due to width of bitfield",
2478 code == NE_EXPR ? "one" : "zero");
2479 return convert (compare_type,
2481 ? integer_one_node : integer_zero_node));
2485 /* Single-bit compares should always be against zero. */
2486 if (lbitsize == 1 && ! integer_zerop (rhs))
2488 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2489 rhs = convert (type, integer_zero_node);
2492 /* Make a new bitfield reference, shift the constant over the
2493 appropriate number of bits and mask it with the computed mask
2494 (in case this was a signed field). If we changed it, make a new one. */
2495 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2498 TREE_SIDE_EFFECTS (lhs) = 1;
2499 TREE_THIS_VOLATILE (lhs) = 1;
2502 rhs = fold (const_binop (BIT_AND_EXPR,
2503 const_binop (LSHIFT_EXPR,
2504 convert (unsigned_type, rhs),
2505 size_int (lbitpos), 0),
2508 return build (code, compare_type,
2509 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2513 /* Subroutine for fold_truthop: decode a field reference.
2515 If EXP is a comparison reference, we return the innermost reference.
2517 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2518 set to the starting bit number.
2520 If the innermost field can be completely contained in a mode-sized
2521 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2523 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2524 otherwise it is not changed.
2526 *PUNSIGNEDP is set to the signedness of the field.
2528 *PMASK is set to the mask used. This is either contained in a
2529 BIT_AND_EXPR or derived from the width of the field.
2531 Return 0 if this is not a component reference or is one that we can't
2532 do anything with. */
2535 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2538 int *pbitsize, *pbitpos;
2539 enum machine_mode *pmode;
2540 int *punsignedp, *pvolatilep;
2547 /* All the optimizations using this function assume integer fields.
2548 There are problems with FP fields since the type_for_size call
2549 below can fail for, e.g., XFmode. */
2550 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2555 if (TREE_CODE (exp) == BIT_AND_EXPR)
2557 mask = TREE_OPERAND (exp, 1);
2558 exp = TREE_OPERAND (exp, 0);
2559 STRIP_NOPS (exp); STRIP_NOPS (mask);
2560 if (TREE_CODE (mask) != INTEGER_CST)
2564 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
2565 && TREE_CODE (exp) != BIT_FIELD_REF)
2568 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2569 punsignedp, pvolatilep);
2570 if (inner == exp || *pbitsize < 0 || offset != 0)
2575 tree unsigned_type = type_for_size (*pbitsize, 1);
2576 int precision = TYPE_PRECISION (unsigned_type);
2578 mask = build_int_2 (~0, ~0);
2579 TREE_TYPE (mask) = unsigned_type;
2580 force_fit_type (mask, 0);
2581 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2582 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2589 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2593 all_ones_mask_p (mask, size)
2597 tree type = TREE_TYPE (mask);
2598 int precision = TYPE_PRECISION (type);
2601 tmask = build_int_2 (~0, ~0);
2602 TREE_TYPE (tmask) = signed_type (type);
2603 force_fit_type (tmask, 0);
2605 operand_equal_p (mask,
2606 const_binop (RSHIFT_EXPR,
2607 const_binop (LSHIFT_EXPR, tmask,
2608 size_int (precision - size), 0),
2609 size_int (precision - size), 0),
2613 /* Subroutine for fold_truthop: determine if an operand is simple enough
2614 to be evaluated unconditionally. */
2617 simple_operand_p (exp)
2620 /* Strip any conversions that don't change the machine mode. */
2621 while ((TREE_CODE (exp) == NOP_EXPR
2622 || TREE_CODE (exp) == CONVERT_EXPR)
2623 && (TYPE_MODE (TREE_TYPE (exp))
2624 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2625 exp = TREE_OPERAND (exp, 0);
2627 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2628 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2629 && ! TREE_ADDRESSABLE (exp)
2630 && ! TREE_THIS_VOLATILE (exp)
2631 && ! DECL_NONLOCAL (exp)
2632 /* Don't regard global variables as simple. They may be
2633 allocated in ways unknown to the compiler (shared memory,
2634 #pragma weak, etc). */
2635 && ! TREE_PUBLIC (exp)
2636 && ! DECL_EXTERNAL (exp)
2637 /* Loading a static variable is unduly expensive, but global
2638 registers aren't expensive. */
2639 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2642 /* Subroutine for fold_truthop: try to optimize a range test.
2644 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2646 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2647 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2648 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2651 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2652 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2653 larger than HI_CST (they may be equal).
2655 We return the simplified tree or 0 if no optimization is possible. */
2658 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2659 enum tree_code jcode, lo_code, hi_code;
2660 tree type, var, lo_cst, hi_cst;
2663 enum tree_code rcode;
2665 /* See if this is a range test and normalize the constant terms. */
2667 if (jcode == TRUTH_AND_EXPR)
2672 /* See if we have VAR != CST && VAR != CST+1. */
2673 if (! (hi_code == NE_EXPR
2674 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2675 && tree_int_cst_equal (integer_one_node,
2676 const_binop (MINUS_EXPR,
2677 hi_cst, lo_cst, 0))))
2685 if (hi_code == LT_EXPR)
2686 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2687 else if (hi_code != LE_EXPR)
2690 if (lo_code == GT_EXPR)
2691 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2693 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2706 /* See if we have VAR == CST || VAR == CST+1. */
2707 if (! (hi_code == EQ_EXPR
2708 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2709 && tree_int_cst_equal (integer_one_node,
2710 const_binop (MINUS_EXPR,
2711 hi_cst, lo_cst, 0))))
2719 if (hi_code == GE_EXPR)
2720 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2721 else if (hi_code != GT_EXPR)
2724 if (lo_code == LE_EXPR)
2725 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2727 /* We now have VAR < LO_CST || VAR > HI_CST. */
2736 /* When normalizing, it is possible to both increment the smaller constant
2737 and decrement the larger constant. See if they are still ordered. */
2738 if (tree_int_cst_lt (hi_cst, lo_cst))
2741 /* Fail if VAR isn't an integer. */
2742 utype = TREE_TYPE (var);
2743 if (! INTEGRAL_TYPE_P (utype))
2746 /* The range test is invalid if subtracting the two constants results
2747 in overflow. This can happen in traditional mode. */
2748 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2749 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2752 if (! TREE_UNSIGNED (utype))
2754 utype = unsigned_type (utype);
2755 var = convert (utype, var);
2756 lo_cst = convert (utype, lo_cst);
2757 hi_cst = convert (utype, hi_cst);
2760 return fold (convert (type,
2761 build (rcode, utype,
2762 build (MINUS_EXPR, utype, var, lo_cst),
2763 const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
2766 /* Find ways of folding logical expressions of LHS and RHS:
2767 Try to merge two comparisons to the same innermost item.
2768 Look for range tests like "ch >= '0' && ch <= '9'".
2769 Look for combinations of simple terms on machines with expensive branches
2770 and evaluate the RHS unconditionally.
2772 For example, if we have p->a == 2 && p->b == 4 and we can make an
2773 object large enough to span both A and B, we can do this with a comparison
2774 against the object ANDed with the a mask.
2776 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2777 operations to do this with one comparison.
2779 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2780 function and the one above.
2782 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2783 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2785 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2788 We return the simplified tree or 0 if no optimization is possible. */
2791 fold_truthop (code, truth_type, lhs, rhs)
2792 enum tree_code code;
2793 tree truth_type, lhs, rhs;
2795 /* If this is the "or" of two comparisons, we can do something if we
2796 the comparisons are NE_EXPR. If this is the "and", we can do something
2797 if the comparisons are EQ_EXPR. I.e.,
2798 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2800 WANTED_CODE is this operation code. For single bit fields, we can
2801 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2802 comparison for one-bit fields. */
2804 enum tree_code wanted_code;
2805 enum tree_code lcode, rcode;
2806 tree ll_arg, lr_arg, rl_arg, rr_arg;
2807 tree ll_inner, lr_inner, rl_inner, rr_inner;
2808 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2809 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2810 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2811 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2812 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2813 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2814 enum machine_mode lnmode, rnmode;
2815 tree ll_mask, lr_mask, rl_mask, rr_mask;
2816 tree l_const, r_const;
2818 int first_bit, end_bit;
2821 /* Start by getting the comparison codes and seeing if this looks like
2822 a range test. Fail if anything is volatile. If one operand is a
2823 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2826 if (TREE_SIDE_EFFECTS (lhs)
2827 || TREE_SIDE_EFFECTS (rhs))
2830 lcode = TREE_CODE (lhs);
2831 rcode = TREE_CODE (rhs);
2833 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2834 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2836 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2837 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2839 if (TREE_CODE_CLASS (lcode) != '<'
2840 || TREE_CODE_CLASS (rcode) != '<')
2843 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2844 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2846 ll_arg = TREE_OPERAND (lhs, 0);
2847 lr_arg = TREE_OPERAND (lhs, 1);
2848 rl_arg = TREE_OPERAND (rhs, 0);
2849 rr_arg = TREE_OPERAND (rhs, 1);
2851 if (TREE_CODE (lr_arg) == INTEGER_CST
2852 && TREE_CODE (rr_arg) == INTEGER_CST
2853 && operand_equal_p (ll_arg, rl_arg, 0))
2855 if (tree_int_cst_lt (lr_arg, rr_arg))
2856 result = range_test (code, truth_type, lcode, rcode,
2857 ll_arg, lr_arg, rr_arg);
2859 result = range_test (code, truth_type, rcode, lcode,
2860 ll_arg, rr_arg, lr_arg);
2862 /* If this isn't a range test, it also isn't a comparison that
2863 can be merged. However, it wins to evaluate the RHS unconditionally
2864 on machines with expensive branches. */
2866 if (result == 0 && BRANCH_COST >= 2)
2868 if (TREE_CODE (ll_arg) != VAR_DECL
2869 && TREE_CODE (ll_arg) != PARM_DECL)
2871 /* Avoid evaluating the variable part twice. */
2872 ll_arg = save_expr (ll_arg);
2873 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2874 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2876 return build (code, truth_type, lhs, rhs);
2881 /* If the RHS can be evaluated unconditionally and its operands are
2882 simple, it wins to evaluate the RHS unconditionally on machines
2883 with expensive branches. In this case, this isn't a comparison
2884 that can be merged. */
2886 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2887 are with zero (tmw). */
2889 if (BRANCH_COST >= 2
2890 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2891 && simple_operand_p (rl_arg)
2892 && simple_operand_p (rr_arg))
2893 return build (code, truth_type, lhs, rhs);
2895 /* See if the comparisons can be merged. Then get all the parameters for
2898 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2899 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2903 ll_inner = decode_field_reference (ll_arg,
2904 &ll_bitsize, &ll_bitpos, &ll_mode,
2905 &ll_unsignedp, &volatilep, &ll_mask);
2906 lr_inner = decode_field_reference (lr_arg,
2907 &lr_bitsize, &lr_bitpos, &lr_mode,
2908 &lr_unsignedp, &volatilep, &lr_mask);
2909 rl_inner = decode_field_reference (rl_arg,
2910 &rl_bitsize, &rl_bitpos, &rl_mode,
2911 &rl_unsignedp, &volatilep, &rl_mask);
2912 rr_inner = decode_field_reference (rr_arg,
2913 &rr_bitsize, &rr_bitpos, &rr_mode,
2914 &rr_unsignedp, &volatilep, &rr_mask);
2916 /* It must be true that the inner operation on the lhs of each
2917 comparison must be the same if we are to be able to do anything.
2918 Then see if we have constants. If not, the same must be true for
2920 if (volatilep || ll_inner == 0 || rl_inner == 0
2921 || ! operand_equal_p (ll_inner, rl_inner, 0))
2924 if (TREE_CODE (lr_arg) == INTEGER_CST
2925 && TREE_CODE (rr_arg) == INTEGER_CST)
2926 l_const = lr_arg, r_const = rr_arg;
2927 else if (lr_inner == 0 || rr_inner == 0
2928 || ! operand_equal_p (lr_inner, rr_inner, 0))
2931 l_const = r_const = 0;
2933 /* If either comparison code is not correct for our logical operation,
2934 fail. However, we can convert a one-bit comparison against zero into
2935 the opposite comparison against that bit being set in the field. */
2937 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2938 if (lcode != wanted_code)
2940 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2946 if (rcode != wanted_code)
2948 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2954 /* See if we can find a mode that contains both fields being compared on
2955 the left. If we can't, fail. Otherwise, update all constants and masks
2956 to be relative to a field of that size. */
2957 first_bit = MIN (ll_bitpos, rl_bitpos);
2958 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2959 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2960 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2962 if (lnmode == VOIDmode)
2965 lnbitsize = GET_MODE_BITSIZE (lnmode);
2966 lnbitpos = first_bit & ~ (lnbitsize - 1);
2967 type = type_for_size (lnbitsize, 1);
2968 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2970 #if BYTES_BIG_ENDIAN
2971 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2972 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2975 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2976 size_int (xll_bitpos), 0);
2977 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2978 size_int (xrl_bitpos), 0);
2980 /* Make sure the constants are interpreted as unsigned, so we
2981 don't have sign bits outside the range of their type. */
2985 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2986 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2987 size_int (xll_bitpos), 0);
2991 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2992 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2993 size_int (xrl_bitpos), 0);
2996 /* If the right sides are not constant, do the same for it. Also,
2997 disallow this optimization if a size or signedness mismatch occurs
2998 between the left and right sides. */
3001 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3002 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3003 /* Make sure the two fields on the right
3004 correspond to the left without being swapped. */
3005 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3008 first_bit = MIN (lr_bitpos, rr_bitpos);
3009 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3010 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3011 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3013 if (rnmode == VOIDmode)
3016 rnbitsize = GET_MODE_BITSIZE (rnmode);
3017 rnbitpos = first_bit & ~ (rnbitsize - 1);
3018 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3020 #if BYTES_BIG_ENDIAN
3021 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3022 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3025 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3026 size_int (xlr_bitpos), 0);
3027 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3028 size_int (xrr_bitpos), 0);
3030 /* Make a mask that corresponds to both fields being compared.
3031 Do this for both items being compared. If the masks agree,
3032 we can do this by masking both and comparing the masked
3034 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3035 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3036 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3038 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3039 ll_unsignedp || rl_unsignedp);
3040 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3041 lr_unsignedp || rr_unsignedp);
3042 if (! all_ones_mask_p (ll_mask, lnbitsize))
3044 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3045 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3047 return build (wanted_code, truth_type, lhs, rhs);
3050 /* There is still another way we can do something: If both pairs of
3051 fields being compared are adjacent, we may be able to make a wider
3052 field containing them both. */
3053 if ((ll_bitsize + ll_bitpos == rl_bitpos
3054 && lr_bitsize + lr_bitpos == rr_bitpos)
3055 || (ll_bitpos == rl_bitpos + rl_bitsize
3056 && lr_bitpos == rr_bitpos + rr_bitsize))
3057 return build (wanted_code, truth_type,
3058 make_bit_field_ref (ll_inner, type,
3059 ll_bitsize + rl_bitsize,
3060 MIN (ll_bitpos, rl_bitpos),
3062 make_bit_field_ref (lr_inner, type,
3063 lr_bitsize + rr_bitsize,
3064 MIN (lr_bitpos, rr_bitpos),
3070 /* Handle the case of comparisons with constants. If there is something in
3071 common between the masks, those bits of the constants must be the same.
3072 If not, the condition is always false. Test for this to avoid generating
3073 incorrect code below. */
3074 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3075 if (! integer_zerop (result)
3076 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3077 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3079 if (wanted_code == NE_EXPR)
3081 warning ("`or' of unmatched not-equal tests is always 1");
3082 return convert (truth_type, integer_one_node);
3086 warning ("`and' of mutually exclusive equal-tests is always zero");
3087 return convert (truth_type, integer_zero_node);
3091 /* Construct the expression we will return. First get the component
3092 reference we will make. Unless the mask is all ones the width of
3093 that field, perform the mask operation. Then compare with the
3095 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3096 ll_unsignedp || rl_unsignedp);
3098 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3099 if (! all_ones_mask_p (ll_mask, lnbitsize))
3100 result = build (BIT_AND_EXPR, type, result, ll_mask);
3102 return build (wanted_code, truth_type, result,
3103 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3106 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3107 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3108 that we may sometimes modify the tree. */
3111 strip_compound_expr (t, s)
3115 tree type = TREE_TYPE (t);
3116 enum tree_code code = TREE_CODE (t);
3118 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3119 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3120 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3121 return TREE_OPERAND (t, 1);
3123 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3124 don't bother handling any other types. */
3125 else if (code == COND_EXPR)
3127 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3128 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3129 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3131 else if (TREE_CODE_CLASS (code) == '1')
3132 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3133 else if (TREE_CODE_CLASS (code) == '<'
3134 || TREE_CODE_CLASS (code) == '2')
3136 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3137 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3143 /* Perform constant folding and related simplification of EXPR.
3144 The related simplifications include x*1 => x, x*0 => 0, etc.,
3145 and application of the associative law.
3146 NOP_EXPR conversions may be removed freely (as long as we
3147 are careful not to change the C type of the overall expression)
3148 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3149 but we can constant-fold them if they have constant operands. */
3155 register tree t = expr;
3156 tree t1 = NULL_TREE;
3158 tree type = TREE_TYPE (expr);
3159 register tree arg0, arg1;
3160 register enum tree_code code = TREE_CODE (t);
3164 /* WINS will be nonzero when the switch is done
3165 if all operands are constant. */
3169 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3170 if (code == RTL_EXPR)
3173 /* Return right away if already constant. */
3174 if (TREE_CONSTANT (t))
3176 if (code == CONST_DECL)
3177 return DECL_INITIAL (t);
3181 kind = TREE_CODE_CLASS (code);
3182 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3186 /* Special case for conversion ops that can have fixed point args. */
3187 arg0 = TREE_OPERAND (t, 0);
3189 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3191 STRIP_TYPE_NOPS (arg0);
3193 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3194 subop = TREE_REALPART (arg0);
3198 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3199 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3200 && TREE_CODE (subop) != REAL_CST
3201 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3203 /* Note that TREE_CONSTANT isn't enough:
3204 static var addresses are constant but we can't
3205 do arithmetic on them. */
3208 else if (kind == 'e' || kind == '<'
3209 || kind == '1' || kind == '2' || kind == 'r')
3211 register int len = tree_code_length[(int) code];
3213 for (i = 0; i < len; i++)
3215 tree op = TREE_OPERAND (t, i);
3219 continue; /* Valid for CALL_EXPR, at least. */
3221 if (kind == '<' || code == RSHIFT_EXPR)
3223 /* Signedness matters here. Perhaps we can refine this
3225 STRIP_TYPE_NOPS (op);
3229 /* Strip any conversions that don't change the mode. */
3233 if (TREE_CODE (op) == COMPLEX_CST)
3234 subop = TREE_REALPART (op);
3238 if (TREE_CODE (subop) != INTEGER_CST
3239 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3240 && TREE_CODE (subop) != REAL_CST
3241 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3243 /* Note that TREE_CONSTANT isn't enough:
3244 static var addresses are constant but we can't
3245 do arithmetic on them. */
3255 /* If this is a commutative operation, and ARG0 is a constant, move it
3256 to ARG1 to reduce the number of tests below. */
3257 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3258 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3259 || code == BIT_AND_EXPR)
3260 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3262 tem = arg0; arg0 = arg1; arg1 = tem;
3264 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3265 TREE_OPERAND (t, 1) = tem;
3268 /* Now WINS is set as described above,
3269 ARG0 is the first operand of EXPR,
3270 and ARG1 is the second operand (if it has more than one operand).
3272 First check for cases where an arithmetic operation is applied to a
3273 compound, conditional, or comparison operation. Push the arithmetic
3274 operation inside the compound or conditional to see if any folding
3275 can then be done. Convert comparison to conditional for this purpose.
3276 The also optimizes non-constant cases that used to be done in
3279 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3280 one of the operands is a comparison and the other is a comparison, a
3281 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3282 code below would make the expression more complex. Change it to a
3283 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3284 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3286 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3287 || code == EQ_EXPR || code == NE_EXPR)
3288 && ((truth_value_p (TREE_CODE (arg0))
3289 && (truth_value_p (TREE_CODE (arg1))
3290 || (TREE_CODE (arg1) == BIT_AND_EXPR
3291 && integer_onep (TREE_OPERAND (arg1, 1)))))
3292 || (truth_value_p (TREE_CODE (arg1))
3293 && (truth_value_p (TREE_CODE (arg0))
3294 || (TREE_CODE (arg0) == BIT_AND_EXPR
3295 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3297 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3298 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3302 if (code == EQ_EXPR)
3303 t = invert_truthvalue (t);
3308 if (TREE_CODE_CLASS (code) == '1')
3310 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3311 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3312 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3313 else if (TREE_CODE (arg0) == COND_EXPR)
3315 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3316 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3317 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3319 /* If this was a conversion, and all we did was to move into
3320 inside the COND_EXPR, bring it back out. Then return so we
3321 don't get into an infinite recursion loop taking the conversion
3322 out and then back in. */
3324 if ((code == NOP_EXPR || code == CONVERT_EXPR
3325 || code == NON_LVALUE_EXPR)
3326 && TREE_CODE (t) == COND_EXPR
3327 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3328 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3329 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3330 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0))))
3331 t = build1 (code, type,
3333 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3334 TREE_OPERAND (t, 0),
3335 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3336 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3339 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3340 return fold (build (COND_EXPR, type, arg0,
3341 fold (build1 (code, type, integer_one_node)),
3342 fold (build1 (code, type, integer_zero_node))));
3344 else if (TREE_CODE_CLASS (code) == '2'
3345 || TREE_CODE_CLASS (code) == '<')
3347 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3348 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3349 fold (build (code, type,
3350 arg0, TREE_OPERAND (arg1, 1))));
3351 else if (TREE_CODE (arg1) == COND_EXPR
3352 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3354 tree test, true_value, false_value;
3356 if (TREE_CODE (arg1) == COND_EXPR)
3358 test = TREE_OPERAND (arg1, 0);
3359 true_value = TREE_OPERAND (arg1, 1);
3360 false_value = TREE_OPERAND (arg1, 2);
3365 true_value = integer_one_node;
3366 false_value = integer_zero_node;
3369 /* If ARG0 is complex we want to make sure we only evaluate
3370 it once. Though this is only required if it is volatile, it
3371 might be more efficient even if it is not. However, if we
3372 succeed in folding one part to a constant, we do not need
3373 to make this SAVE_EXPR. Since we do this optimization
3374 primarily to see if we do end up with constant and this
3375 SAVE_EXPR interfers with later optimizations, suppressing
3376 it when we can is important. */
3378 if (TREE_CODE (arg0) != SAVE_EXPR
3379 && ((TREE_CODE (arg0) != VAR_DECL
3380 && TREE_CODE (arg0) != PARM_DECL)
3381 || TREE_SIDE_EFFECTS (arg0)))
3383 tree lhs = fold (build (code, type, arg0, true_value));
3384 tree rhs = fold (build (code, type, arg0, false_value));
3386 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3387 return fold (build (COND_EXPR, type, test, lhs, rhs));
3389 arg0 = save_expr (arg0);
3392 test = fold (build (COND_EXPR, type, test,
3393 fold (build (code, type, arg0, true_value)),
3394 fold (build (code, type, arg0, false_value))));
3395 if (TREE_CODE (arg0) == SAVE_EXPR)
3396 return build (COMPOUND_EXPR, type,
3397 convert (void_type_node, arg0),
3398 strip_compound_expr (test, arg0));
3400 return convert (type, test);
3403 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3404 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3405 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3406 else if (TREE_CODE (arg0) == COND_EXPR
3407 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3409 tree test, true_value, false_value;
3411 if (TREE_CODE (arg0) == COND_EXPR)
3413 test = TREE_OPERAND (arg0, 0);
3414 true_value = TREE_OPERAND (arg0, 1);
3415 false_value = TREE_OPERAND (arg0, 2);
3420 true_value = integer_one_node;
3421 false_value = integer_zero_node;
3424 if (TREE_CODE (arg1) != SAVE_EXPR
3425 && ((TREE_CODE (arg1) != VAR_DECL
3426 && TREE_CODE (arg1) != PARM_DECL)
3427 || TREE_SIDE_EFFECTS (arg1)))
3429 tree lhs = fold (build (code, type, true_value, arg1));
3430 tree rhs = fold (build (code, type, false_value, arg1));
3432 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3433 || TREE_CONSTANT (arg1))
3434 return fold (build (COND_EXPR, type, test, lhs, rhs));
3436 arg1 = save_expr (arg1);
3439 test = fold (build (COND_EXPR, type, test,
3440 fold (build (code, type, true_value, arg1)),
3441 fold (build (code, type, false_value, arg1))));
3442 if (TREE_CODE (arg1) == SAVE_EXPR)
3443 return build (COMPOUND_EXPR, type,
3444 convert (void_type_node, arg1),
3445 strip_compound_expr (test, arg1));
3447 return convert (type, test);
3450 else if (TREE_CODE_CLASS (code) == '<'
3451 && TREE_CODE (arg0) == COMPOUND_EXPR)
3452 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3453 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3454 else if (TREE_CODE_CLASS (code) == '<'
3455 && TREE_CODE (arg1) == COMPOUND_EXPR)
3456 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3457 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3469 return fold (DECL_INITIAL (t));
3474 case FIX_TRUNC_EXPR:
3475 /* Other kinds of FIX are not handled properly by fold_convert. */
3477 /* In addition to the cases of two conversions in a row
3478 handled below, if we are converting something to its own
3479 type via an object of identical or wider precision, neither
3480 conversion is needed. */
3481 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3482 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3483 && TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == TREE_TYPE (t)
3484 && ((INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3485 && INTEGRAL_TYPE_P (TREE_TYPE (t)))
3486 || (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3487 && FLOAT_TYPE_P (TREE_TYPE (t))))
3488 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3489 >= TYPE_PRECISION (TREE_TYPE (t))))
3490 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3492 /* Two conversions in a row are not needed unless:
3493 - the intermediate type is narrower than both initial and final, or
3494 - the intermediate type and innermost type differ in signedness,
3495 and the outermost type is wider than the intermediate, or
3496 - the initial type is a pointer type and the precisions of the
3497 intermediate and final types differ, or
3498 - the final type is a pointer type and the precisions of the
3499 initial and intermediate types differ. */
3500 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3501 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3502 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3503 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3505 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3506 > TYPE_PRECISION (TREE_TYPE (t)))
3507 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3509 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3511 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3512 != TREE_UNSIGNED (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3513 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3514 < TYPE_PRECISION (TREE_TYPE (t))))
3515 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3516 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3517 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
3519 (TREE_UNSIGNED (TREE_TYPE (t))
3520 && (TYPE_PRECISION (TREE_TYPE (t))
3521 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3522 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3524 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3525 != TYPE_PRECISION (TREE_TYPE (t))))
3526 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
3527 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3528 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3529 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3531 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3532 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3533 /* Detect assigning a bitfield. */
3534 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3535 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3537 /* Don't leave an assignment inside a conversion
3538 unless assigning a bitfield. */
3539 tree prev = TREE_OPERAND (t, 0);
3540 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3541 /* First do the assignment, then return converted constant. */
3542 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3548 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3551 return fold_convert (t, arg0);
3553 #if 0 /* This loses on &"foo"[0]. */
3558 /* Fold an expression like: "foo"[2] */
3559 if (TREE_CODE (arg0) == STRING_CST
3560 && TREE_CODE (arg1) == INTEGER_CST
3561 && !TREE_INT_CST_HIGH (arg1)
3562 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3564 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3565 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3566 force_fit_type (t, 0);
3573 TREE_CONSTANT (t) = wins;
3579 if (TREE_CODE (arg0) == INTEGER_CST)
3581 HOST_WIDE_INT low, high;
3582 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3583 TREE_INT_CST_HIGH (arg0),
3585 t = build_int_2 (low, high);
3586 TREE_TYPE (t) = type;
3588 = (TREE_OVERFLOW (arg0)
3589 | force_fit_type (t, overflow));
3590 TREE_CONSTANT_OVERFLOW (t)
3591 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3593 else if (TREE_CODE (arg0) == REAL_CST)
3594 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3595 TREE_TYPE (t) = type;
3597 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3598 return TREE_OPERAND (arg0, 0);
3600 /* Convert - (a - b) to (b - a) for non-floating-point. */
3601 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3602 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3603 TREE_OPERAND (arg0, 0));
3610 if (TREE_CODE (arg0) == INTEGER_CST)
3612 if (! TREE_UNSIGNED (type)
3613 && TREE_INT_CST_HIGH (arg0) < 0)
3615 HOST_WIDE_INT low, high;
3616 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3617 TREE_INT_CST_HIGH (arg0),
3619 t = build_int_2 (low, high);
3620 TREE_TYPE (t) = type;
3622 = (TREE_OVERFLOW (arg0)
3623 | force_fit_type (t, overflow));
3624 TREE_CONSTANT_OVERFLOW (t)
3625 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3628 else if (TREE_CODE (arg0) == REAL_CST)
3630 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3631 t = build_real (type,
3632 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3634 TREE_TYPE (t) = type;
3636 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3637 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3641 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3643 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3644 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3645 TREE_OPERAND (arg0, 0),
3646 fold (build1 (NEGATE_EXPR,
3647 TREE_TYPE (TREE_TYPE (arg0)),
3648 TREE_OPERAND (arg0, 1))));
3649 else if (TREE_CODE (arg0) == COMPLEX_CST)
3650 return build_complex (TREE_OPERAND (arg0, 0),
3651 fold (build1 (NEGATE_EXPR,
3652 TREE_TYPE (TREE_TYPE (arg0)),
3653 TREE_OPERAND (arg0, 1))));
3654 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3655 return fold (build (TREE_CODE (arg0), type,
3656 fold (build1 (CONJ_EXPR, type,
3657 TREE_OPERAND (arg0, 0))),
3658 fold (build1 (CONJ_EXPR,
3659 type, TREE_OPERAND (arg0, 1)))));
3660 else if (TREE_CODE (arg0) == CONJ_EXPR)
3661 return TREE_OPERAND (arg0, 0);
3667 if (TREE_CODE (arg0) == INTEGER_CST)
3668 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3669 ~ TREE_INT_CST_HIGH (arg0));
3670 TREE_TYPE (t) = type;
3671 force_fit_type (t, 0);
3672 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3673 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3675 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3676 return TREE_OPERAND (arg0, 0);
3680 /* A + (-B) -> A - B */
3681 if (TREE_CODE (arg1) == NEGATE_EXPR)
3682 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3683 else if (! FLOAT_TYPE_P (type))
3685 if (integer_zerop (arg1))
3686 return non_lvalue (convert (type, arg0));
3688 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3689 with a constant, and the two constants have no bits in common,
3690 we should treat this as a BIT_IOR_EXPR since this may produce more
3692 if (TREE_CODE (arg0) == BIT_AND_EXPR
3693 && TREE_CODE (arg1) == BIT_AND_EXPR
3694 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3695 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3696 && integer_zerop (const_binop (BIT_AND_EXPR,
3697 TREE_OPERAND (arg0, 1),
3698 TREE_OPERAND (arg1, 1), 0)))
3700 code = BIT_IOR_EXPR;
3704 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3705 about the case where C is a constant, just try one of the
3706 four possibilities. */
3708 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3709 && operand_equal_p (TREE_OPERAND (arg0, 1),
3710 TREE_OPERAND (arg1, 1), 0))
3711 return fold (build (MULT_EXPR, type,
3712 fold (build (PLUS_EXPR, type,
3713 TREE_OPERAND (arg0, 0),
3714 TREE_OPERAND (arg1, 0))),
3715 TREE_OPERAND (arg0, 1)));
3717 /* In IEEE floating point, x+0 may not equal x. */
3718 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3720 && real_zerop (arg1))
3721 return non_lvalue (convert (type, arg0));
3723 /* In most languages, can't associate operations on floats
3724 through parentheses. Rather than remember where the parentheses
3725 were, we don't associate floats at all. It shouldn't matter much.
3726 However, associating multiplications is only very slightly
3727 inaccurate, so do that if -ffast-math is specified. */
3728 if (FLOAT_TYPE_P (type)
3729 && ! (flag_fast_math && code == MULT_EXPR))
3732 /* The varsign == -1 cases happen only for addition and subtraction.
3733 It says that the arg that was split was really CON minus VAR.
3734 The rest of the code applies to all associative operations. */
3740 if (split_tree (arg0, code, &var, &con, &varsign))
3744 /* EXPR is (CON-VAR) +- ARG1. */
3745 /* If it is + and VAR==ARG1, return just CONST. */
3746 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3747 return convert (TREE_TYPE (t), con);
3749 /* If ARG0 is a constant, don't change things around;
3750 instead keep all the constant computations together. */
3752 if (TREE_CONSTANT (arg0))
3755 /* Otherwise return (CON +- ARG1) - VAR. */
3756 TREE_SET_CODE (t, MINUS_EXPR);
3757 TREE_OPERAND (t, 1) = var;
3759 = fold (build (code, TREE_TYPE (t), con, arg1));
3763 /* EXPR is (VAR+CON) +- ARG1. */
3764 /* If it is - and VAR==ARG1, return just CONST. */
3765 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3766 return convert (TREE_TYPE (t), con);
3768 /* If ARG0 is a constant, don't change things around;
3769 instead keep all the constant computations together. */
3771 if (TREE_CONSTANT (arg0))
3774 /* Otherwise return VAR +- (ARG1 +- CON). */
3775 TREE_OPERAND (t, 1) = tem
3776 = fold (build (code, TREE_TYPE (t), arg1, con));
3777 TREE_OPERAND (t, 0) = var;
3778 if (integer_zerop (tem)
3779 && (code == PLUS_EXPR || code == MINUS_EXPR))
3780 return convert (type, var);
3781 /* If we have x +/- (c - d) [c an explicit integer]
3782 change it to x -/+ (d - c) since if d is relocatable
3783 then the latter can be a single immediate insn
3784 and the former cannot. */
3785 if (TREE_CODE (tem) == MINUS_EXPR
3786 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3788 tree tem1 = TREE_OPERAND (tem, 1);
3789 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3790 TREE_OPERAND (tem, 0) = tem1;
3792 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3798 if (split_tree (arg1, code, &var, &con, &varsign))
3800 if (TREE_CONSTANT (arg1))
3805 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3807 /* EXPR is ARG0 +- (CON +- VAR). */
3808 if (TREE_CODE (t) == MINUS_EXPR
3809 && operand_equal_p (var, arg0, 0))
3811 /* If VAR and ARG0 cancel, return just CON or -CON. */
3812 if (code == PLUS_EXPR)
3813 return convert (TREE_TYPE (t), con);
3814 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3815 convert (TREE_TYPE (t), con)));
3819 = fold (build (code, TREE_TYPE (t), arg0, con));
3820 TREE_OPERAND (t, 1) = var;
3821 if (integer_zerop (TREE_OPERAND (t, 0))
3822 && TREE_CODE (t) == PLUS_EXPR)
3823 return convert (TREE_TYPE (t), var);
3828 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3829 if (TREE_CODE (arg1) == REAL_CST)
3831 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3833 t1 = const_binop (code, arg0, arg1, 0);
3834 if (t1 != NULL_TREE)
3836 /* The return value should always have
3837 the same type as the original expression. */
3838 TREE_TYPE (t1) = TREE_TYPE (t);
3844 if (! FLOAT_TYPE_P (type))
3846 if (! wins && integer_zerop (arg0))
3847 return build1 (NEGATE_EXPR, type, arg1);
3848 if (integer_zerop (arg1))
3849 return non_lvalue (convert (type, arg0));
3851 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3852 about the case where C is a constant, just try one of the
3853 four possibilities. */
3855 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3856 && operand_equal_p (TREE_OPERAND (arg0, 1),
3857 TREE_OPERAND (arg1, 1), 0))
3858 return fold (build (MULT_EXPR, type,
3859 fold (build (MINUS_EXPR, type,
3860 TREE_OPERAND (arg0, 0),
3861 TREE_OPERAND (arg1, 0))),
3862 TREE_OPERAND (arg0, 1)));
3864 /* Convert A - (-B) to A + B. */
3865 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3866 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3868 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3871 /* Except with IEEE floating point, 0-x equals -x. */
3872 if (! wins && real_zerop (arg0))
3873 return build1 (NEGATE_EXPR, type, arg1);
3874 /* Except with IEEE floating point, x-0 equals x. */
3875 if (real_zerop (arg1))
3876 return non_lvalue (convert (type, arg0));
3879 /* Fold &x - &x. This can happen from &x.foo - &x.
3880 This is unsafe for certain floats even in non-IEEE formats.
3881 In IEEE, it is unsafe because it does wrong for NaNs.
3882 Also note that operand_equal_p is always false if an operand
3885 if (operand_equal_p (arg0, arg1,
3886 FLOAT_TYPE_P (type) && ! flag_fast_math))
3887 return convert (type, integer_zero_node);
3892 if (! FLOAT_TYPE_P (type))
3894 if (integer_zerop (arg1))
3895 return omit_one_operand (type, arg1, arg0);
3896 if (integer_onep (arg1))
3897 return non_lvalue (convert (type, arg0));
3899 /* ((A / C) * C) is A if the division is an
3900 EXACT_DIV_EXPR. Since C is normally a constant,
3901 just check for one of the four possibilities. */
3903 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3904 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3905 return TREE_OPERAND (arg0, 0);
3907 /* (a * (1 << b)) is (a << b) */
3908 if (TREE_CODE (arg1) == LSHIFT_EXPR
3909 && integer_onep (TREE_OPERAND (arg1, 0)))
3910 return fold (build (LSHIFT_EXPR, type, arg0,
3911 TREE_OPERAND (arg1, 1)));
3912 if (TREE_CODE (arg0) == LSHIFT_EXPR
3913 && integer_onep (TREE_OPERAND (arg0, 0)))
3914 return fold (build (LSHIFT_EXPR, type, arg1,
3915 TREE_OPERAND (arg0, 1)));
3919 /* x*0 is 0, except for IEEE floating point. */
3920 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3922 && real_zerop (arg1))
3923 return omit_one_operand (type, arg1, arg0);
3924 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3925 However, ANSI says we can drop signals,
3926 so we can do this anyway. */
3927 if (real_onep (arg1))
3928 return non_lvalue (convert (type, arg0));
3930 if (! wins && real_twop (arg1))
3932 tree arg = save_expr (arg0);
3933 return build (PLUS_EXPR, type, arg, arg);
3940 if (integer_all_onesp (arg1))
3941 return omit_one_operand (type, arg1, arg0);
3942 if (integer_zerop (arg1))
3943 return non_lvalue (convert (type, arg0));
3944 t1 = distribute_bit_expr (code, type, arg0, arg1);
3945 if (t1 != NULL_TREE)
3948 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3949 is a rotate of A by C1 bits. */
3951 if ((TREE_CODE (arg0) == RSHIFT_EXPR
3952 || TREE_CODE (arg0) == LSHIFT_EXPR)
3953 && (TREE_CODE (arg1) == RSHIFT_EXPR
3954 || TREE_CODE (arg1) == LSHIFT_EXPR)
3955 && TREE_CODE (arg0) != TREE_CODE (arg1)
3956 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3957 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3958 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3959 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3960 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
3961 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
3962 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
3963 + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
3964 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3965 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3966 TREE_CODE (arg0) == LSHIFT_EXPR
3967 ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
3972 if (integer_zerop (arg1))
3973 return non_lvalue (convert (type, arg0));
3974 if (integer_all_onesp (arg1))
3975 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3980 if (integer_all_onesp (arg1))
3981 return non_lvalue (convert (type, arg0));
3982 if (integer_zerop (arg1))
3983 return omit_one_operand (type, arg1, arg0);
3984 t1 = distribute_bit_expr (code, type, arg0, arg1);
3985 if (t1 != NULL_TREE)
3987 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3988 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3989 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3991 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3992 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3993 && (~TREE_INT_CST_LOW (arg0)
3994 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3995 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3997 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3998 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4000 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4001 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4002 && (~TREE_INT_CST_LOW (arg1)
4003 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4004 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4008 case BIT_ANDTC_EXPR:
4009 if (integer_all_onesp (arg0))
4010 return non_lvalue (convert (type, arg1));
4011 if (integer_zerop (arg0))
4012 return omit_one_operand (type, arg0, arg1);
4013 if (TREE_CODE (arg1) == INTEGER_CST)
4015 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4016 code = BIT_AND_EXPR;
4022 /* In most cases, do nothing with a divide by zero. */
4023 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4024 #ifndef REAL_INFINITY
4025 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4028 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4030 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4031 However, ANSI says we can drop signals, so we can do this anyway. */
4032 if (real_onep (arg1))
4033 return non_lvalue (convert (type, arg0));
4035 /* If ARG1 is a constant, we can convert this to a multiply by the
4036 reciprocal. This does not have the same rounding properties,
4037 so only do this if -ffast-math. We can actually always safely
4038 do it if ARG1 is a power of two, but it's hard to tell if it is
4039 or not in a portable manner. */
4040 if (TREE_CODE (arg1) == REAL_CST && flag_fast_math
4041 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4043 return fold (build (MULT_EXPR, type, arg0, tem));
4047 case TRUNC_DIV_EXPR:
4048 case ROUND_DIV_EXPR:
4049 case FLOOR_DIV_EXPR:
4051 case EXACT_DIV_EXPR:
4052 if (integer_onep (arg1))
4053 return non_lvalue (convert (type, arg0));
4054 if (integer_zerop (arg1))
4057 /* If we have ((a / C1) / C2) where both division are the same type, tr
4058 to simplify. First see if C1 * C2 overflows or not. */
4059 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4060 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4062 tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4064 /* If it overflows, the result is +/- 1 or zero, depending on
4065 the signs of the constants and remaining operand and on which
4066 division operation is being performed. */
4068 if (TREE_OVERFLOW (tem))
4070 /* 1 if C1 * C2 is negative (i.e., C1 and C2 have
4071 different signs). */
4072 int c_neg = ((tree_int_cst_sgn (arg1) < 0)
4073 == (tree_int_cst_sgn (TREE_OPERAND (arg0, 1)) < 0));
4077 case EXACT_DIV_EXPR:
4078 /* If this overflowed, it couldn't have been exact. */
4081 case TRUNC_DIV_EXPR:
4082 return omit_one_operand (type, integer_zero_node,
4083 TREE_OPERAND (arg0, 0));
4085 case FLOOR_DIV_EXPR:
4086 /* -1 or zero, depending on signs of remaining
4087 operand and constants. */
4088 tem = build (c_neg ? GE_EXPR : LE_EXPR, integer_type_node,
4089 TREE_OPERAND (arg0, 0),
4090 convert (type, integer_zero_node));
4091 return fold (build (NEGATE_EXPR, type,
4092 convert (type, fold (tem))));
4095 /* Zero or 1, depending on signs of remaining
4096 operand and constants. */
4097 tem = build (c_neg ? LE_EXPR : GE_EXPR, integer_type_node,
4098 TREE_OPERAND (arg0, 0),
4099 convert (type, integer_zero_node));
4101 return convert (type, fold (tem));
4105 /* If no overflow, divide by C1*C2. */
4106 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4109 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4110 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4111 expressions, which often appear in the offsets or sizes of
4112 objects with a varying size. Only deal with positive divisors
4113 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4115 Look for NOPs and SAVE_EXPRs inside. */
4117 if (TREE_CODE (arg1) == INTEGER_CST
4118 && tree_int_cst_sgn (arg1) >= 0)
4120 int have_save_expr = 0;
4121 tree c2 = integer_zero_node;
4124 if (TREE_CODE (xarg0) == SAVE_EXPR)
4125 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4129 if (TREE_CODE (xarg0) == PLUS_EXPR
4130 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4131 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4132 else if (TREE_CODE (xarg0) == MINUS_EXPR
4133 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4134 /* If we are doing this computation unsigned, the negate
4136 && ! TREE_UNSIGNED (type))
4138 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4139 xarg0 = TREE_OPERAND (xarg0, 0);
4142 if (TREE_CODE (xarg0) == SAVE_EXPR)
4143 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4147 if (TREE_CODE (xarg0) == MULT_EXPR
4148 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4149 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4150 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4151 TREE_OPERAND (xarg0, 1), arg1, 1))
4152 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4153 TREE_OPERAND (xarg0, 1), 1)))
4154 && (tree_int_cst_sgn (c2) >= 0
4155 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4158 tree outer_div = integer_one_node;
4159 tree c1 = TREE_OPERAND (xarg0, 1);
4162 /* If C3 > C1, set them equal and do a divide by
4163 C3/C1 at the end of the operation. */
4164 if (tree_int_cst_lt (c1, c3))
4165 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4167 /* The result is A * (C1/C3) + (C2/C3). */
4168 t = fold (build (PLUS_EXPR, type,
4169 fold (build (MULT_EXPR, type,
4170 TREE_OPERAND (xarg0, 0),
4171 const_binop (code, c1, c3, 1))),
4172 const_binop (code, c2, c3, 1)));
4174 if (! integer_onep (outer_div))
4175 t = fold (build (code, type, t, convert (type, outer_div)));
4187 case FLOOR_MOD_EXPR:
4188 case ROUND_MOD_EXPR:
4189 case TRUNC_MOD_EXPR:
4190 if (integer_onep (arg1))
4191 return omit_one_operand (type, integer_zero_node, arg0);
4192 if (integer_zerop (arg1))
4195 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4196 where C1 % C3 == 0. Handle similarly to the division case,
4197 but don't bother with SAVE_EXPRs. */
4199 if (TREE_CODE (arg1) == INTEGER_CST
4200 && ! integer_zerop (arg1))
4202 tree c2 = integer_zero_node;
4205 if (TREE_CODE (xarg0) == PLUS_EXPR
4206 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4207 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4208 else if (TREE_CODE (xarg0) == MINUS_EXPR
4209 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4210 && ! TREE_UNSIGNED (type))
4212 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4213 xarg0 = TREE_OPERAND (xarg0, 0);
4218 if (TREE_CODE (xarg0) == MULT_EXPR
4219 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4220 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4221 TREE_OPERAND (xarg0, 1),
4223 && tree_int_cst_sgn (c2) >= 0)
4224 /* The result is (C2%C3). */
4225 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4226 TREE_OPERAND (xarg0, 0));
4235 if (integer_zerop (arg1))
4236 return non_lvalue (convert (type, arg0));
4237 /* Since negative shift count is not well-defined,
4238 don't try to compute it in the compiler. */
4239 if (tree_int_cst_sgn (arg1) < 0)
4244 if (operand_equal_p (arg0, arg1, 0))
4246 if (INTEGRAL_TYPE_P (type)
4247 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4248 return omit_one_operand (type, arg1, arg0);
4252 if (operand_equal_p (arg0, arg1, 0))
4254 if (INTEGRAL_TYPE_P (type)
4255 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4256 return omit_one_operand (type, arg1, arg0);
4259 case TRUTH_NOT_EXPR:
4260 /* Note that the operand of this must be an int
4261 and its values must be 0 or 1.
4262 ("true" is a fixed value perhaps depending on the language,
4263 but we don't handle values other than 1 correctly yet.) */
4264 return invert_truthvalue (arg0);
4266 case TRUTH_ANDIF_EXPR:
4267 /* Note that the operands of this must be ints
4268 and their values must be 0 or 1.
4269 ("true" is a fixed value perhaps depending on the language.) */
4270 /* If first arg is constant zero, return it. */
4271 if (integer_zerop (arg0))
4273 case TRUTH_AND_EXPR:
4274 /* If either arg is constant true, drop it. */
4275 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4276 return non_lvalue (arg1);
4277 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4278 return non_lvalue (arg0);
4279 /* If second arg is constant zero, result is zero, but first arg
4280 must be evaluated. */
4281 if (integer_zerop (arg1))
4282 return omit_one_operand (type, arg1, arg0);
4285 /* We only do these simplifications if we are optimizing. */
4289 /* Check for things like (A || B) && (A || C). We can convert this
4290 to A || (B && C). Note that either operator can be any of the four
4291 truth and/or operations and the transformation will still be
4292 valid. Also note that we only care about order for the
4293 ANDIF and ORIF operators. */
4294 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4295 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4296 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4297 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4298 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4300 tree a00 = TREE_OPERAND (arg0, 0);
4301 tree a01 = TREE_OPERAND (arg0, 1);
4302 tree a10 = TREE_OPERAND (arg1, 0);
4303 tree a11 = TREE_OPERAND (arg1, 1);
4304 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4305 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4306 && (code == TRUTH_AND_EXPR
4307 || code == TRUTH_OR_EXPR));
4309 if (operand_equal_p (a00, a10, 0))
4310 return fold (build (TREE_CODE (arg0), type, a00,
4311 fold (build (code, type, a01, a11))));
4312 else if (commutative && operand_equal_p (a00, a11, 0))
4313 return fold (build (TREE_CODE (arg0), type, a00,
4314 fold (build (code, type, a01, a10))));
4315 else if (commutative && operand_equal_p (a01, a10, 0))
4316 return fold (build (TREE_CODE (arg0), type, a01,
4317 fold (build (code, type, a00, a11))));
4319 /* This case if tricky because we must either have commutative
4320 operators or else A10 must not have side-effects. */
4322 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4323 && operand_equal_p (a01, a11, 0))
4324 return fold (build (TREE_CODE (arg0), type,
4325 fold (build (code, type, a00, a10)),
4329 /* Check for the possibility of merging component references. If our
4330 lhs is another similar operation, try to merge its rhs with our
4331 rhs. Then try to merge our lhs and rhs. */
4332 if (TREE_CODE (arg0) == code
4333 && 0 != (tem = fold_truthop (code, type,
4334 TREE_OPERAND (arg0, 1), arg1)))
4335 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4337 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4342 case TRUTH_ORIF_EXPR:
4343 /* Note that the operands of this must be ints
4344 and their values must be 0 or true.
4345 ("true" is a fixed value perhaps depending on the language.) */
4346 /* If first arg is constant true, return it. */
4347 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4350 /* If either arg is constant zero, drop it. */
4351 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4352 return non_lvalue (arg1);
4353 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4354 return non_lvalue (arg0);
4355 /* If second arg is constant true, result is true, but we must
4356 evaluate first arg. */
4357 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4358 return omit_one_operand (type, arg1, arg0);
4361 case TRUTH_XOR_EXPR:
4362 /* If either arg is constant zero, drop it. */
4363 if (integer_zerop (arg0))
4364 return non_lvalue (arg1);
4365 if (integer_zerop (arg1))
4366 return non_lvalue (arg0);
4367 /* If either arg is constant true, this is a logical inversion. */
4368 if (integer_onep (arg0))
4369 return non_lvalue (invert_truthvalue (arg1));
4370 if (integer_onep (arg1))
4371 return non_lvalue (invert_truthvalue (arg0));
4380 /* If one arg is a constant integer, put it last. */
4381 if (TREE_CODE (arg0) == INTEGER_CST
4382 && TREE_CODE (arg1) != INTEGER_CST)
4384 TREE_OPERAND (t, 0) = arg1;
4385 TREE_OPERAND (t, 1) = arg0;
4386 arg0 = TREE_OPERAND (t, 0);
4387 arg1 = TREE_OPERAND (t, 1);
4388 code = swap_tree_comparison (code);
4389 TREE_SET_CODE (t, code);
4392 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4393 First, see if one arg is constant; find the constant arg
4394 and the other one. */
4396 tree constop = 0, varop;
4399 if (TREE_CONSTANT (arg1))
4400 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
4401 if (TREE_CONSTANT (arg0))
4402 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
4404 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4406 /* This optimization is invalid for ordered comparisons
4407 if CONST+INCR overflows or if foo+incr might overflow.
4408 This optimization is invalid for floating point due to rounding.
4409 For pointer types we assume overflow doesn't happen. */
4410 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4411 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4412 && (code == EQ_EXPR || code == NE_EXPR)))
4415 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4416 constop, TREE_OPERAND (varop, 1)));
4417 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4418 *constoploc = newconst;
4422 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4424 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4425 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4426 && (code == EQ_EXPR || code == NE_EXPR)))
4429 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4430 constop, TREE_OPERAND (varop, 1)));
4431 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4432 *constoploc = newconst;
4438 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4439 if (TREE_CODE (arg1) == INTEGER_CST
4440 && TREE_CODE (arg0) != INTEGER_CST
4441 && tree_int_cst_sgn (arg1) > 0)
4443 switch (TREE_CODE (t))
4447 TREE_SET_CODE (t, code);
4448 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4449 TREE_OPERAND (t, 1) = arg1;
4454 TREE_SET_CODE (t, code);
4455 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4456 TREE_OPERAND (t, 1) = arg1;
4460 /* If this is an EQ or NE comparison with zero and ARG0 is
4461 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4462 two operations, but the latter can be done in one less insn
4463 one machine that have only two-operand insns or on which a
4464 constant cannot be the first operand. */
4465 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4466 && TREE_CODE (arg0) == BIT_AND_EXPR)
4468 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4469 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4471 fold (build (code, type,
4472 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4474 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4475 TREE_OPERAND (arg0, 1),
4476 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4477 convert (TREE_TYPE (arg0),
4480 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4481 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4483 fold (build (code, type,
4484 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4486 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4487 TREE_OPERAND (arg0, 0),
4488 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4489 convert (TREE_TYPE (arg0),
4494 /* If this is an NE or EQ comparison of zero against the result of a
4495 signed MOD operation whose second operand is a power of 2, make
4496 the MOD operation unsigned since it is simpler and equivalent. */
4497 if ((code == NE_EXPR || code == EQ_EXPR)
4498 && integer_zerop (arg1)
4499 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4500 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4501 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4502 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4503 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4504 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4506 tree newtype = unsigned_type (TREE_TYPE (arg0));
4507 tree newmod = build (TREE_CODE (arg0), newtype,
4508 convert (newtype, TREE_OPERAND (arg0, 0)),
4509 convert (newtype, TREE_OPERAND (arg0, 1)));
4511 return build (code, type, newmod, convert (newtype, arg1));
4514 /* If this is an NE comparison of zero with an AND of one, remove the
4515 comparison since the AND will give the correct value. */
4516 if (code == NE_EXPR && integer_zerop (arg1)
4517 && TREE_CODE (arg0) == BIT_AND_EXPR
4518 && integer_onep (TREE_OPERAND (arg0, 1)))
4519 return convert (type, arg0);
4521 /* If we have (A & C) == C where C is a power of 2, convert this into
4522 (A & C) != 0. Similarly for NE_EXPR. */
4523 if ((code == EQ_EXPR || code == NE_EXPR)
4524 && TREE_CODE (arg0) == BIT_AND_EXPR
4525 && integer_pow2p (TREE_OPERAND (arg0, 1))
4526 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4527 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4528 arg0, integer_zero_node);
4530 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4531 and similarly for >= into !=. */
4532 if ((code == LT_EXPR || code == GE_EXPR)
4533 && TREE_UNSIGNED (TREE_TYPE (arg0))
4534 && TREE_CODE (arg1) == LSHIFT_EXPR
4535 && integer_onep (TREE_OPERAND (arg1, 0)))
4536 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4537 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4538 TREE_OPERAND (arg1, 1)),
4539 convert (TREE_TYPE (arg0), integer_zero_node));
4541 else if ((code == LT_EXPR || code == GE_EXPR)
4542 && TREE_UNSIGNED (TREE_TYPE (arg0))
4543 && (TREE_CODE (arg1) == NOP_EXPR
4544 || TREE_CODE (arg1) == CONVERT_EXPR)
4545 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
4546 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
4548 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4549 convert (TREE_TYPE (arg0),
4550 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4551 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
4552 convert (TREE_TYPE (arg0), integer_zero_node));
4554 /* Simplify comparison of something with itself. (For IEEE
4555 floating-point, we can only do some of these simplifications.) */
4556 if (operand_equal_p (arg0, arg1, 0))
4563 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4565 t = build_int_2 (1, 0);
4566 TREE_TYPE (t) = type;
4570 TREE_SET_CODE (t, code);
4574 /* For NE, we can only do this simplification if integer. */
4575 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4577 /* ... fall through ... */
4580 t = build_int_2 (0, 0);
4581 TREE_TYPE (t) = type;
4586 /* An unsigned comparison against 0 can be simplified. */
4587 if (integer_zerop (arg1)
4588 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4589 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4590 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4592 switch (TREE_CODE (t))
4596 TREE_SET_CODE (t, NE_EXPR);
4600 TREE_SET_CODE (t, EQ_EXPR);
4603 return omit_one_operand (type,
4604 convert (type, integer_one_node),
4607 return omit_one_operand (type,
4608 convert (type, integer_zero_node),
4613 /* If we are comparing an expression that just has comparisons
4614 of two integer values, arithmetic expressions of those comparisons,
4615 and constants, we can simplify it. There are only three cases
4616 to check: the two values can either be equal, the first can be
4617 greater, or the second can be greater. Fold the expression for
4618 those three values. Since each value must be 0 or 1, we have
4619 eight possibilities, each of which corresponds to the constant 0
4620 or 1 or one of the six possible comparisons.
4622 This handles common cases like (a > b) == 0 but also handles
4623 expressions like ((x > y) - (y > x)) > 0, which supposedly
4624 occur in macroized code. */
4626 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4628 tree cval1 = 0, cval2 = 0;
4631 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4632 /* Don't handle degenerate cases here; they should already
4633 have been handled anyway. */
4634 && cval1 != 0 && cval2 != 0
4635 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4636 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4637 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4638 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4639 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4641 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4642 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4644 /* We can't just pass T to eval_subst in case cval1 or cval2
4645 was the same as ARG1. */
4648 = fold (build (code, type,
4649 eval_subst (arg0, cval1, maxval, cval2, minval),
4652 = fold (build (code, type,
4653 eval_subst (arg0, cval1, maxval, cval2, maxval),
4656 = fold (build (code, type,
4657 eval_subst (arg0, cval1, minval, cval2, maxval),
4660 /* All three of these results should be 0 or 1. Confirm they
4661 are. Then use those values to select the proper code
4664 if ((integer_zerop (high_result)
4665 || integer_onep (high_result))
4666 && (integer_zerop (equal_result)
4667 || integer_onep (equal_result))
4668 && (integer_zerop (low_result)
4669 || integer_onep (low_result)))
4671 /* Make a 3-bit mask with the high-order bit being the
4672 value for `>', the next for '=', and the low for '<'. */
4673 switch ((integer_onep (high_result) * 4)
4674 + (integer_onep (equal_result) * 2)
4675 + integer_onep (low_result))
4679 return omit_one_operand (type, integer_zero_node, arg0);
4700 return omit_one_operand (type, integer_one_node, arg0);
4703 t = build (code, type, cval1, cval2);
4705 return save_expr (t);
4712 /* If this is a comparison of a field, we may be able to simplify it. */
4713 if ((TREE_CODE (arg0) == COMPONENT_REF
4714 || TREE_CODE (arg0) == BIT_FIELD_REF)
4715 && (code == EQ_EXPR || code == NE_EXPR)
4716 /* Handle the constant case even without -O
4717 to make sure the warnings are given. */
4718 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4720 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4724 /* If this is a comparison of complex values and either or both
4725 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4726 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4727 may prevent needless evaluations. */
4728 if ((code == EQ_EXPR || code == NE_EXPR)
4729 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4730 && (TREE_CODE (arg0) == COMPLEX_EXPR
4731 || TREE_CODE (arg1) == COMPLEX_EXPR))
4733 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4734 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4735 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4736 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4737 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4739 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4742 fold (build (code, type, real0, real1)),
4743 fold (build (code, type, imag0, imag1))));
4746 /* From here on, the only cases we handle are when the result is
4747 known to be a constant.
4749 To compute GT, swap the arguments and do LT.
4750 To compute GE, do LT and invert the result.
4751 To compute LE, swap the arguments, do LT and invert the result.
4752 To compute NE, do EQ and invert the result.
4754 Therefore, the code below must handle only EQ and LT. */
4756 if (code == LE_EXPR || code == GT_EXPR)
4758 tem = arg0, arg0 = arg1, arg1 = tem;
4759 code = swap_tree_comparison (code);
4762 /* Note that it is safe to invert for real values here because we
4763 will check below in the one case that it matters. */
4766 if (code == NE_EXPR || code == GE_EXPR)
4769 code = invert_tree_comparison (code);
4772 /* Compute a result for LT or EQ if args permit;
4773 otherwise return T. */
4774 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4776 if (code == EQ_EXPR)
4777 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4778 == TREE_INT_CST_LOW (arg1))
4779 && (TREE_INT_CST_HIGH (arg0)
4780 == TREE_INT_CST_HIGH (arg1)),
4783 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4784 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4785 : INT_CST_LT (arg0, arg1)),
4789 /* Assume a nonexplicit constant cannot equal an explicit one,
4790 since such code would be undefined anyway.
4791 Exception: on sysvr4, using #pragma weak,
4792 a label can come out as 0. */
4793 else if (TREE_CODE (arg1) == INTEGER_CST
4794 && !integer_zerop (arg1)
4795 && TREE_CONSTANT (arg0)
4796 && TREE_CODE (arg0) == ADDR_EXPR
4798 t1 = build_int_2 (0, 0);
4800 /* Two real constants can be compared explicitly. */
4801 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4803 /* If either operand is a NaN, the result is false with two
4804 exceptions: First, an NE_EXPR is true on NaNs, but that case
4805 is already handled correctly since we will be inverting the
4806 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4807 or a GE_EXPR into a LT_EXPR, we must return true so that it
4808 will be inverted into false. */
4810 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4811 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4812 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4814 else if (code == EQ_EXPR)
4815 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4816 TREE_REAL_CST (arg1)),
4819 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4820 TREE_REAL_CST (arg1)),
4824 if (t1 == NULL_TREE)
4828 TREE_INT_CST_LOW (t1) ^= 1;
4830 TREE_TYPE (t1) = type;
4834 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4835 so all simple results must be passed through pedantic_non_lvalue. */
4836 if (TREE_CODE (arg0) == INTEGER_CST)
4837 return pedantic_non_lvalue
4838 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4839 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4840 return pedantic_non_lvalue (omit_one_operand (type, arg1, arg0));
4842 /* If the second operand is zero, invert the comparison and swap
4843 the second and third operands. Likewise if the second operand
4844 is constant and the third is not or if the third operand is
4845 equivalent to the first operand of the comparison. */
4847 if (integer_zerop (arg1)
4848 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4849 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4850 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4851 TREE_OPERAND (t, 2),
4852 TREE_OPERAND (arg0, 1))))
4854 /* See if this can be inverted. If it can't, possibly because
4855 it was a floating-point inequality comparison, don't do
4857 tem = invert_truthvalue (arg0);
4859 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4861 arg0 = TREE_OPERAND (t, 0) = tem;
4862 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
4863 TREE_OPERAND (t, 2) = arg1;
4864 arg1 = TREE_OPERAND (t, 1);
4868 /* If we have A op B ? A : C, we may be able to convert this to a
4869 simpler expression, depending on the operation and the values
4870 of B and C. IEEE floating point prevents this though,
4871 because A or B might be -0.0 or a NaN. */
4873 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4874 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4875 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4877 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4878 arg1, TREE_OPERAND (arg0, 1)))
4880 tree arg2 = TREE_OPERAND (t, 2);
4881 enum tree_code comp_code = TREE_CODE (arg0);
4883 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4884 depending on the comparison operation. */
4885 if (integer_zerop (TREE_OPERAND (arg0, 1))
4886 && TREE_CODE (arg2) == NEGATE_EXPR
4887 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4891 return pedantic_non_lvalue
4892 (fold (build1 (NEGATE_EXPR, type, arg1)));
4894 return pedantic_non_lvalue (convert (type, arg1));
4897 return pedantic_non_lvalue
4898 (fold (build1 (ABS_EXPR, type, arg1)));
4901 return pedantic_non_lvalue
4902 (fold (build1 (NEGATE_EXPR, type,
4903 fold (build1 (ABS_EXPR, type, arg1)))));
4906 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4909 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4911 if (comp_code == NE_EXPR)
4912 return pedantic_non_lvalue (convert (type, arg1));
4913 else if (comp_code == EQ_EXPR)
4914 return pedantic_non_lvalue (convert (type, integer_zero_node));
4917 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4918 or max (A, B), depending on the operation. */
4920 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4921 arg2, TREE_OPERAND (arg0, 0)))
4925 return pedantic_non_lvalue (convert (type, arg2));
4927 return pedantic_non_lvalue (convert (type, arg1));
4930 return pedantic_non_lvalue
4931 (fold (build (MIN_EXPR, type, arg1, arg2)));
4934 return pedantic_non_lvalue
4935 (fold (build (MAX_EXPR, type, arg1, arg2)));
4938 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4939 we might still be able to simplify this. For example,
4940 if C1 is one less or one more than C2, this might have started
4941 out as a MIN or MAX and been transformed by this function.
4942 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4944 if (INTEGRAL_TYPE_P (type)
4945 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4946 && TREE_CODE (arg2) == INTEGER_CST)
4950 /* We can replace A with C1 in this case. */
4951 arg1 = TREE_OPERAND (t, 1)
4952 = convert (type, TREE_OPERAND (arg0, 1));
4956 /* If C1 is C2 + 1, this is min(A, C2). */
4957 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4958 && operand_equal_p (TREE_OPERAND (arg0, 1),
4959 const_binop (PLUS_EXPR, arg2,
4960 integer_one_node, 0), 1))
4961 return pedantic_non_lvalue
4962 (fold (build (MIN_EXPR, type, arg1, arg2)));
4966 /* If C1 is C2 - 1, this is min(A, C2). */
4967 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4968 && operand_equal_p (TREE_OPERAND (arg0, 1),
4969 const_binop (MINUS_EXPR, arg2,
4970 integer_one_node, 0), 1))
4971 return pedantic_non_lvalue
4972 (fold (build (MIN_EXPR, type, arg1, arg2)));
4976 /* If C1 is C2 - 1, this is max(A, C2). */
4977 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4978 && operand_equal_p (TREE_OPERAND (arg0, 1),
4979 const_binop (MINUS_EXPR, arg2,
4980 integer_one_node, 0), 1))
4981 return pedantic_non_lvalue
4982 (fold (build (MAX_EXPR, type, arg1, arg2)));
4986 /* If C1 is C2 + 1, this is max(A, C2). */
4987 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4988 && operand_equal_p (TREE_OPERAND (arg0, 1),
4989 const_binop (PLUS_EXPR, arg2,
4990 integer_one_node, 0), 1))
4991 return pedantic_non_lvalue
4992 (fold (build (MAX_EXPR, type, arg1, arg2)));
4997 /* Convert A ? 1 : 0 to simply A. */
4998 if (integer_onep (TREE_OPERAND (t, 1))
4999 && integer_zerop (TREE_OPERAND (t, 2))
5000 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5001 call to fold will try to move the conversion inside
5002 a COND, which will recurse. In that case, the COND_EXPR
5003 is probably the best choice, so leave it alone. */
5004 && type == TREE_TYPE (arg0))
5005 return pedantic_non_lvalue (arg0);
5008 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5009 operation is simply A & 2. */
5011 if (integer_zerop (TREE_OPERAND (t, 2))
5012 && TREE_CODE (arg0) == NE_EXPR
5013 && integer_zerop (TREE_OPERAND (arg0, 1))
5014 && integer_pow2p (arg1)
5015 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5016 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5018 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5023 /* When pedantic, a compound expression can be neither an lvalue
5024 nor an integer constant expression. */
5025 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5027 /* Don't let (0, 0) be null pointer constant. */
5028 if (integer_zerop (arg1))
5029 return non_lvalue (arg1);
5034 return build_complex (arg0, arg1);
5038 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5040 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5041 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5042 TREE_OPERAND (arg0, 1));
5043 else if (TREE_CODE (arg0) == COMPLEX_CST)
5044 return TREE_REALPART (arg0);
5045 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5046 return fold (build (TREE_CODE (arg0), type,
5047 fold (build1 (REALPART_EXPR, type,
5048 TREE_OPERAND (arg0, 0))),
5049 fold (build1 (REALPART_EXPR,
5050 type, TREE_OPERAND (arg0, 1)))));
5054 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5055 return convert (type, integer_zero_node);
5056 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5057 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5058 TREE_OPERAND (arg0, 0));
5059 else if (TREE_CODE (arg0) == COMPLEX_CST)
5060 return TREE_IMAGPART (arg0);
5061 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5062 return fold (build (TREE_CODE (arg0), type,
5063 fold (build1 (IMAGPART_EXPR, type,
5064 TREE_OPERAND (arg0, 0))),
5065 fold (build1 (IMAGPART_EXPR, type,
5066 TREE_OPERAND (arg0, 1)))));
5071 } /* switch (code) */