1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 /*@@ Fix lossage on folding division of big integers. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int and size_binop.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'. */
48 /* Handle floating overflow for `const_binop'. */
49 static jmp_buf float_error;
51 static void encode PROTO((short *, HOST_WIDE_INT, HOST_WIDE_INT));
52 static void decode PROTO((short *, HOST_WIDE_INT *, HOST_WIDE_INT *));
53 static int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
54 HOST_WIDE_INT, HOST_WIDE_INT,
55 HOST_WIDE_INT, HOST_WIDE_INT *,
56 HOST_WIDE_INT *, HOST_WIDE_INT *,
58 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
59 static tree const_binop PROTO((enum tree_code, tree, tree, int));
60 static tree fold_convert PROTO((tree, tree));
61 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
62 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
63 static int truth_value_p PROTO((enum tree_code));
64 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
65 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
66 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
67 static tree omit_one_operand PROTO((tree, tree, tree));
68 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
69 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
70 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
72 static tree decode_field_reference PROTO((tree, int *, int *,
73 enum machine_mode *, int *,
75 static int all_ones_mask_p PROTO((tree, int));
76 static int simple_operand_p PROTO((tree));
77 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
78 enum tree_code, tree, tree, tree));
79 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
85 /* Yield nonzero if a signed left shift of A by B bits overflows. */
86 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
88 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
89 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
90 Then this yields nonzero if overflow occurred during the addition.
91 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
92 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
93 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
95 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
96 We do that by representing the two-word integer as MAX_SHORTS shorts,
97 with only 8 bits stored in each short, as a positive number. */
99 /* Unpack a two-word integer into MAX_SHORTS shorts.
100 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
101 SHORTS points to the array of shorts. */
104 encode (shorts, low, hi)
106 HOST_WIDE_INT low, hi;
110 for (i = 0; i < MAX_SHORTS / 2; i++)
112 shorts[i] = (low >> (i * 8)) & 0xff;
113 shorts[i + MAX_SHORTS / 2] = (hi >> (i * 8) & 0xff);
117 /* Pack an array of MAX_SHORTS shorts into a two-word integer.
118 SHORTS points to the array of shorts.
119 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
122 decode (shorts, low, hi)
124 HOST_WIDE_INT *low, *hi;
127 HOST_WIDE_INT lv = 0, hv = 0;
129 for (i = 0; i < MAX_SHORTS / 2; i++)
131 lv |= (HOST_WIDE_INT) shorts[i] << (i * 8);
132 hv |= (HOST_WIDE_INT) shorts[i + MAX_SHORTS / 2] << (i * 8);
138 /* Make the integer constant T valid for its type
139 by setting to 0 or 1 all the bits in the constant
140 that don't belong in the type.
141 Yield 1 if a signed overflow occurs, 0 otherwise.
142 If OVERFLOW is nonzero, a signed overflow has already occurred
143 in calculating T, so propagate it. */
146 force_fit_type (t, overflow)
150 HOST_WIDE_INT low, high;
153 if (TREE_CODE (t) != INTEGER_CST)
156 low = TREE_INT_CST_LOW (t);
157 high = TREE_INT_CST_HIGH (t);
159 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
162 prec = TYPE_PRECISION (TREE_TYPE (t));
164 /* First clear all bits that are beyond the type's precision. */
166 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
168 else if (prec > HOST_BITS_PER_WIDE_INT)
170 TREE_INT_CST_HIGH (t)
171 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
175 TREE_INT_CST_HIGH (t) = 0;
176 if (prec < HOST_BITS_PER_WIDE_INT)
177 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
180 /* Unsigned types do not suffer sign extension or overflow. */
181 if (TREE_UNSIGNED (TREE_TYPE (t)))
184 /* If the value's sign bit is set, extend the sign. */
185 if (prec != 2 * HOST_BITS_PER_WIDE_INT
186 && (prec > HOST_BITS_PER_WIDE_INT
187 ? (TREE_INT_CST_HIGH (t)
188 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
189 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
191 /* Value is negative:
192 set to 1 all the bits that are outside this type's precision. */
193 if (prec > HOST_BITS_PER_WIDE_INT)
195 TREE_INT_CST_HIGH (t)
196 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
200 TREE_INT_CST_HIGH (t) = -1;
201 if (prec < HOST_BITS_PER_WIDE_INT)
202 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
206 /* Yield nonzero if signed overflow occurred. */
208 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
212 /* Add two doubleword integers with doubleword result.
213 Each argument is given as two `HOST_WIDE_INT' pieces.
214 One argument is L1 and H1; the other, L2 and H2.
215 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
216 We use the 8-shorts representation internally. */
219 add_double (l1, h1, l2, h2, lv, hv)
220 HOST_WIDE_INT l1, h1, l2, h2;
221 HOST_WIDE_INT *lv, *hv;
223 short arg1[MAX_SHORTS];
224 short arg2[MAX_SHORTS];
225 register int carry = 0;
228 encode (arg1, l1, h1);
229 encode (arg2, l2, h2);
231 for (i = 0; i < MAX_SHORTS; i++)
233 carry += arg1[i] + arg2[i];
234 arg1[i] = carry & 0xff;
238 decode (arg1, lv, hv);
239 return overflow_sum_sign (h1, h2, *hv);
242 /* Negate a doubleword integer with doubleword result.
243 Return nonzero if the operation overflows, assuming it's signed.
244 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
245 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
246 We use the 8-shorts representation internally. */
249 neg_double (l1, h1, lv, hv)
250 HOST_WIDE_INT l1, h1;
251 HOST_WIDE_INT *lv, *hv;
257 return (*hv & h1) < 0;
267 /* Multiply two doubleword integers with doubleword result.
268 Return nonzero if the operation overflows, assuming it's signed.
269 Each argument is given as two `HOST_WIDE_INT' pieces.
270 One argument is L1 and H1; the other, L2 and H2.
271 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
272 We use the 8-shorts representation internally. */
275 mul_double (l1, h1, l2, h2, lv, hv)
276 HOST_WIDE_INT l1, h1, l2, h2;
277 HOST_WIDE_INT *lv, *hv;
279 short arg1[MAX_SHORTS];
280 short arg2[MAX_SHORTS];
281 short prod[MAX_SHORTS * 2];
282 register int carry = 0;
283 register int i, j, k;
284 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
286 /* These cases are used extensively, arising from pointer combinations. */
291 int overflow = left_shift_overflows (h1, 1);
292 unsigned HOST_WIDE_INT temp = l1 + l1;
293 *hv = (h1 << 1) + (temp < l1);
299 int overflow = left_shift_overflows (h1, 2);
300 unsigned HOST_WIDE_INT temp = l1 + l1;
301 h1 = (h1 << 2) + ((temp < l1) << 1);
311 int overflow = left_shift_overflows (h1, 3);
312 unsigned HOST_WIDE_INT temp = l1 + l1;
313 h1 = (h1 << 3) + ((temp < l1) << 2);
316 h1 += (temp < l1) << 1;
326 encode (arg1, l1, h1);
327 encode (arg2, l2, h2);
329 bzero (prod, sizeof prod);
331 for (i = 0; i < MAX_SHORTS; i++)
332 for (j = 0; j < MAX_SHORTS; j++)
335 carry = arg1[i] * arg2[j];
339 prod[k] = carry & 0xff;
345 decode (prod, lv, hv); /* This ignores
346 prod[MAX_SHORTS] -> prod[MAX_SHORTS*2-1] */
348 /* Check for overflow by calculating the top half of the answer in full;
349 it should agree with the low half's sign bit. */
350 decode (prod+MAX_SHORTS, &toplow, &tophigh);
353 neg_double (l2, h2, &neglow, &neghigh);
354 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
358 neg_double (l1, h1, &neglow, &neghigh);
359 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
361 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
364 /* Shift the doubleword integer in L1, H1 left by COUNT places
365 keeping only PREC bits of result.
366 Shift right if COUNT is negative.
367 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
368 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 lshift_double (l1, h1, count, prec, lv, hv, arith)
372 HOST_WIDE_INT l1, h1, count;
374 HOST_WIDE_INT *lv, *hv;
377 short arg1[MAX_SHORTS];
383 rshift_double (l1, h1, - count, prec, lv, hv, arith);
387 encode (arg1, l1, h1);
395 for (i = 0; i < MAX_SHORTS; i++)
397 carry += arg1[i] << 1;
398 arg1[i] = carry & 0xff;
404 decode (arg1, lv, hv);
407 /* Shift the doubleword integer in L1, H1 right by COUNT places
408 keeping only PREC bits of result. COUNT must be positive.
409 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
410 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
413 rshift_double (l1, h1, count, prec, lv, hv, arith)
414 HOST_WIDE_INT l1, h1, count;
416 HOST_WIDE_INT *lv, *hv;
419 short arg1[MAX_SHORTS];
423 encode (arg1, l1, h1);
430 carry = arith && arg1[7] >> 7;
431 for (i = MAX_SHORTS - 1; i >= 0; i--)
435 arg1[i] = (carry >> 1) & 0xff;
440 decode (arg1, lv, hv);
443 /* Rotate the doubldword integer in L1, H1 left by COUNT places
444 keeping only PREC bits of result.
445 Rotate right if COUNT is negative.
446 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
449 lrotate_double (l1, h1, count, prec, lv, hv)
450 HOST_WIDE_INT l1, h1, count;
452 HOST_WIDE_INT *lv, *hv;
454 short arg1[MAX_SHORTS];
460 rrotate_double (l1, h1, - count, prec, lv, hv);
464 encode (arg1, l1, h1);
469 carry = arg1[MAX_SHORTS - 1] >> 7;
472 for (i = 0; i < MAX_SHORTS; i++)
474 carry += arg1[i] << 1;
475 arg1[i] = carry & 0xff;
481 decode (arg1, lv, hv);
484 /* Rotate the doubleword integer in L1, H1 left by COUNT places
485 keeping only PREC bits of result. COUNT must be positive.
486 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
489 rrotate_double (l1, h1, count, prec, lv, hv)
490 HOST_WIDE_INT l1, h1, count;
492 HOST_WIDE_INT *lv, *hv;
494 short arg1[MAX_SHORTS];
498 encode (arg1, l1, h1);
506 for (i = MAX_SHORTS - 1; i >= 0; i--)
510 arg1[i] = (carry >> 1) & 0xff;
515 decode (arg1, lv, hv);
518 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
519 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
520 CODE is a tree code for a kind of division, one of
521 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
523 It controls how the quotient is rounded to a integer.
524 Return nonzero if the operation overflows.
525 UNS nonzero says do unsigned division. */
528 div_and_round_double (code, uns,
529 lnum_orig, hnum_orig, lden_orig, hden_orig,
530 lquo, hquo, lrem, hrem)
533 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
534 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
535 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
538 short num[MAX_SHORTS + 1]; /* extra element for scaling. */
539 short den[MAX_SHORTS], quo[MAX_SHORTS];
540 register int i, j, work;
541 register int carry = 0;
542 HOST_WIDE_INT lnum = lnum_orig;
543 HOST_WIDE_INT hnum = hnum_orig;
544 HOST_WIDE_INT lden = lden_orig;
545 HOST_WIDE_INT hden = hden_orig;
548 if ((hden == 0) && (lden == 0))
551 /* calculate quotient sign and convert operands to unsigned. */
557 /* (minimum integer) / (-1) is the only overflow case. */
558 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
564 neg_double (lden, hden, &lden, &hden);
568 if (hnum == 0 && hden == 0)
569 { /* single precision */
571 /* This unsigned division rounds toward zero. */
572 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
577 { /* trivial case: dividend < divisor */
578 /* hden != 0 already checked. */
585 bzero (quo, sizeof quo);
587 bzero (num, sizeof num); /* to zero 9th element */
588 bzero (den, sizeof den);
590 encode (num, lnum, hnum);
591 encode (den, lden, hden);
593 /* This code requires more than just hden == 0.
594 We also have to require that we don't need more than three bytes
595 to hold CARRY. If we ever did need four bytes to hold it, we
596 would lose part of it when computing WORK on the next round. */
597 if (hden == 0 && (((unsigned HOST_WIDE_INT) lden << 8) >> 8) == lden)
598 { /* simpler algorithm */
599 /* hnum != 0 already checked. */
600 for (i = MAX_SHORTS - 1; i >= 0; i--)
602 work = num[i] + (carry << 8);
603 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
604 carry = work % (unsigned HOST_WIDE_INT) lden;
607 else { /* full double precision,
608 with thanks to Don Knuth's
609 "Seminumerical Algorithms". */
611 int quo_est, scale, num_hi_sig, den_hi_sig, quo_hi_sig;
613 /* Find the highest non-zero divisor digit. */
614 for (i = MAX_SHORTS - 1; ; i--)
619 for (i = MAX_SHORTS - 1; ; i--)
624 quo_hi_sig = num_hi_sig - den_hi_sig + 1;
626 /* Insure that the first digit of the divisor is at least BASE/2.
627 This is required by the quotient digit estimation algorithm. */
629 scale = BASE / (den[den_hi_sig] + 1);
630 if (scale > 1) { /* scale divisor and dividend */
632 for (i = 0; i <= MAX_SHORTS - 1; i++) {
633 work = (num[i] * scale) + carry;
634 num[i] = work & 0xff;
636 if (num[i] != 0) num_hi_sig = i;
639 for (i = 0; i <= MAX_SHORTS - 1; i++) {
640 work = (den[i] * scale) + carry;
641 den[i] = work & 0xff;
643 if (den[i] != 0) den_hi_sig = i;
648 for (i = quo_hi_sig; i > 0; i--) {
649 /* guess the next quotient digit, quo_est, by dividing the first
650 two remaining dividend digits by the high order quotient digit.
651 quo_est is never low and is at most 2 high. */
653 int num_hi; /* index of highest remaining dividend digit */
655 num_hi = i + den_hi_sig;
657 work = (num[num_hi] * BASE) + (num_hi > 0 ? num[num_hi - 1] : 0);
658 if (num[num_hi] != den[den_hi_sig]) {
659 quo_est = work / den[den_hi_sig];
665 /* refine quo_est so it's usually correct, and at most one high. */
666 while ((den[den_hi_sig - 1] * quo_est)
667 > (((work - (quo_est * den[den_hi_sig])) * BASE)
668 + ((num_hi - 1) > 0 ? num[num_hi - 2] : 0)))
671 /* Try QUO_EST as the quotient digit, by multiplying the
672 divisor by QUO_EST and subtracting from the remaining dividend.
673 Keep in mind that QUO_EST is the I - 1st digit. */
677 for (j = 0; j <= den_hi_sig; j++)
681 work = num[i + j - 1] - (quo_est * den[j]) + carry;
689 num[i + j - 1] = digit;
692 /* if quo_est was high by one, then num[i] went negative and
693 we need to correct things. */
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
701 work = num[i + j - 1] + den[j] + carry;
711 num[i + j - 1] = work;
713 num [num_hi] += carry;
716 /* store the quotient digit. */
717 quo[i - 1] = quo_est;
721 decode (quo, lquo, hquo);
724 /* if result is negative, make it so. */
726 neg_double (*lquo, *hquo, lquo, hquo);
728 /* compute trial remainder: rem = num - (quo * den) */
729 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
730 neg_double (*lrem, *hrem, lrem, hrem);
731 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
736 case TRUNC_MOD_EXPR: /* round toward zero */
737 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
741 case FLOOR_MOD_EXPR: /* round toward negative infinity */
742 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
745 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
748 else return overflow;
752 case CEIL_MOD_EXPR: /* round toward positive infinity */
753 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
755 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
758 else return overflow;
762 case ROUND_MOD_EXPR: /* round to closest integer */
764 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
765 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
767 /* get absolute values */
768 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
769 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
771 /* if (2 * abs (lrem) >= abs (lden)) */
772 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
773 labs_rem, habs_rem, <wice, &htwice);
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && ((HOST_WIDE_INT unsigned) labs_den
779 < (unsigned HOST_WIDE_INT) ltwice)))
783 add_double (*lquo, *hquo,
784 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
787 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 else return overflow;
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
805 #ifndef REAL_ARITHMETIC
806 /* Effectively truncate a real value to represent
807 the nearest possible value in a narrower mode.
808 The result is actually represented in the same data type as the argument,
809 but its value is usually different. */
812 real_value_truncate (mode, arg)
813 enum machine_mode mode;
817 /* Make sure the value is actually stored in memory before we turn off
821 REAL_VALUE_TYPE value;
822 jmp_buf handler, old_handler;
825 if (setjmp (handler))
827 error ("floating overflow");
830 handled = push_float_handler (handler, old_handler);
831 value = REAL_VALUE_TRUNCATE (mode, arg);
832 pop_float_handler (handled, old_handler);
836 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
838 /* Check for infinity in an IEEE double precision number. */
844 /* The IEEE 64-bit double format. */
849 unsigned exponent : 11;
850 unsigned mantissa1 : 20;
855 unsigned mantissa1 : 20;
856 unsigned exponent : 11;
862 if (u.big_endian.sign == 1)
865 return (u.big_endian.exponent == 2047
866 && u.big_endian.mantissa1 == 0
867 && u.big_endian.mantissa2 == 0);
872 return (u.little_endian.exponent == 2047
873 && u.little_endian.mantissa1 == 0
874 && u.little_endian.mantissa2 == 0);
878 /* Check whether an IEEE double precision number is a NaN. */
884 /* The IEEE 64-bit double format. */
889 unsigned exponent : 11;
890 unsigned mantissa1 : 20;
895 unsigned mantissa1 : 20;
896 unsigned exponent : 11;
902 if (u.big_endian.sign == 1)
905 return (u.big_endian.exponent == 2047
906 && (u.big_endian.mantissa1 != 0
907 || u.big_endian.mantissa2 != 0));
912 return (u.little_endian.exponent == 2047
913 && (u.little_endian.mantissa1 != 0
914 || u.little_endian.mantissa2 != 0));
918 /* Check for a negative IEEE double precision number. */
924 /* The IEEE 64-bit double format. */
929 unsigned exponent : 11;
930 unsigned mantissa1 : 20;
935 unsigned mantissa1 : 20;
936 unsigned exponent : 11;
942 if (u.big_endian.sign == 1)
945 return u.big_endian.sign;
950 return u.little_endian.sign;
953 #else /* Target not IEEE */
955 /* Let's assume other float formats don't have infinity.
956 (This can be overridden by redefining REAL_VALUE_ISINF.) */
964 /* Let's assume other float formats don't have NaNs.
965 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
973 /* Let's assume other float formats don't have minus zero.
974 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
981 #endif /* Target not IEEE */
982 #endif /* no REAL_ARITHMETIC */
984 /* Split a tree IN into a constant and a variable part
985 that could be combined with CODE to make IN.
986 CODE must be a commutative arithmetic operation.
987 Store the constant part into *CONP and the variable in &VARP.
988 Return 1 if this was done; zero means the tree IN did not decompose
991 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
992 Therefore, we must tell the caller whether the variable part
993 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
994 The value stored is the coefficient for the variable term.
995 The constant term we return should always be added;
996 we negate it if necessary. */
999 split_tree (in, code, varp, conp, varsignp)
1001 enum tree_code code;
1005 register tree outtype = TREE_TYPE (in);
1009 /* Strip any conversions that don't change the machine mode. */
1010 while ((TREE_CODE (in) == NOP_EXPR
1011 || TREE_CODE (in) == CONVERT_EXPR)
1012 && (TYPE_MODE (TREE_TYPE (in))
1013 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
1014 in = TREE_OPERAND (in, 0);
1016 if (TREE_CODE (in) == code
1017 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1018 /* We can associate addition and subtraction together
1019 (even though the C standard doesn't say so)
1020 for integers because the value is not affected.
1021 For reals, the value might be affected, so we can't. */
1022 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1023 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1025 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1026 if (code == INTEGER_CST)
1028 *conp = TREE_OPERAND (in, 0);
1029 *varp = TREE_OPERAND (in, 1);
1030 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1031 && TREE_TYPE (*varp) != outtype)
1032 *varp = convert (outtype, *varp);
1033 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1036 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1038 *conp = TREE_OPERAND (in, 1);
1039 *varp = TREE_OPERAND (in, 0);
1041 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1042 && TREE_TYPE (*varp) != outtype)
1043 *varp = convert (outtype, *varp);
1044 if (TREE_CODE (in) == MINUS_EXPR)
1046 /* If operation is subtraction and constant is second,
1047 must negate it to get an additive constant.
1048 And this cannot be done unless it is a manifest constant.
1049 It could also be the address of a static variable.
1050 We cannot negate that, so give up. */
1051 if (TREE_CODE (*conp) == INTEGER_CST)
1052 /* Subtracting from integer_zero_node loses for long long. */
1053 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1059 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1061 *conp = TREE_OPERAND (in, 0);
1062 *varp = TREE_OPERAND (in, 1);
1063 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1064 && TREE_TYPE (*varp) != outtype)
1065 *varp = convert (outtype, *varp);
1066 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1073 /* Combine two constants NUM and ARG2 under operation CODE
1074 to produce a new constant.
1075 We assume ARG1 and ARG2 have the same data type,
1076 or at least are the same kind of constant and the same machine mode.
1078 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1081 const_binop (code, arg1, arg2, notrunc)
1082 enum tree_code code;
1083 register tree arg1, arg2;
1086 if (TREE_CODE (arg1) == INTEGER_CST)
1088 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
1089 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
1090 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
1091 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
1092 HOST_WIDE_INT low, hi;
1093 HOST_WIDE_INT garbagel, garbageh;
1095 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1101 t = build_int_2 (int1l | int2l, int1h | int2h);
1105 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
1109 t = build_int_2 (int1l & int2l, int1h & int2h);
1112 case BIT_ANDTC_EXPR:
1113 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
1119 /* It's unclear from the C standard whether shifts can overflow.
1120 The following code ignores overflow; perhaps a C standard
1121 interpretation ruling is needed. */
1122 lshift_double (int1l, int1h, int2l,
1123 TYPE_PRECISION (TREE_TYPE (arg1)),
1126 t = build_int_2 (low, hi);
1127 TREE_TYPE (t) = TREE_TYPE (arg1);
1129 force_fit_type (t, 0);
1130 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1131 TREE_CONSTANT_OVERFLOW (t)
1132 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1138 lrotate_double (int1l, int1h, int2l,
1139 TYPE_PRECISION (TREE_TYPE (arg1)),
1141 t = build_int_2 (low, hi);
1148 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1151 overflow = int2h < hi;
1153 t = build_int_2 (int2l, int2h);
1159 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1162 overflow = int1h < hi;
1164 t = build_int_2 (int1l, int1h);
1167 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1168 t = build_int_2 (low, hi);
1172 if (int2h == 0 && int2l == 0)
1174 t = build_int_2 (int1l, int1h);
1177 neg_double (int2l, int2h, &low, &hi);
1178 add_double (int1l, int1h, low, hi, &low, &hi);
1179 overflow = overflow_sum_sign (hi, int2h, int1h);
1180 t = build_int_2 (low, hi);
1184 /* Optimize simple cases. */
1187 unsigned HOST_WIDE_INT temp;
1192 t = build_int_2 (0, 0);
1195 t = build_int_2 (int2l, int2h);
1198 overflow = left_shift_overflows (int2h, 1);
1199 temp = int2l + int2l;
1200 int2h = (int2h << 1) + (temp < int2l);
1201 t = build_int_2 (temp, int2h);
1203 #if 0 /* This code can lose carries. */
1205 temp = int2l + int2l + int2l;
1206 int2h = int2h * 3 + (temp < int2l);
1207 t = build_int_2 (temp, int2h);
1211 overflow = left_shift_overflows (int2h, 2);
1212 temp = int2l + int2l;
1213 int2h = (int2h << 2) + ((temp < int2l) << 1);
1216 int2h += (temp < int2l);
1217 t = build_int_2 (temp, int2h);
1220 overflow = left_shift_overflows (int2h, 3);
1221 temp = int2l + int2l;
1222 int2h = (int2h << 3) + ((temp < int2l) << 2);
1225 int2h += (temp < int2l) << 1;
1228 int2h += (temp < int2l);
1229 t = build_int_2 (temp, int2h);
1240 t = build_int_2 (0, 0);
1245 t = build_int_2 (int1l, int1h);
1250 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1251 t = build_int_2 (low, hi);
1254 case TRUNC_DIV_EXPR:
1255 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1256 case EXACT_DIV_EXPR:
1257 /* This is a shortcut for a common special case.
1258 It reduces the number of tree nodes generated
1260 if (int2h == 0 && int2l > 0
1261 && TREE_TYPE (arg1) == sizetype
1262 && int1h == 0 && int1l >= 0)
1264 if (code == CEIL_DIV_EXPR)
1266 return size_int (int1l / int2l);
1268 case ROUND_DIV_EXPR:
1269 if (int2h == 0 && int2l == 1)
1271 t = build_int_2 (int1l, int1h);
1274 if (int1l == int2l && int1h == int2h)
1276 if ((int1l | int1h) == 0)
1278 t = build_int_2 (1, 0);
1281 overflow = div_and_round_double (code, uns,
1282 int1l, int1h, int2l, int2h,
1283 &low, &hi, &garbagel, &garbageh);
1284 t = build_int_2 (low, hi);
1287 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1288 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289 overflow = div_and_round_double (code, uns,
1290 int1l, int1h, int2l, int2h,
1291 &garbagel, &garbageh, &low, &hi);
1292 t = build_int_2 (low, hi);
1299 low = (((unsigned HOST_WIDE_INT) int1h
1300 < (unsigned HOST_WIDE_INT) int2h)
1301 || (((unsigned HOST_WIDE_INT) int1h
1302 == (unsigned HOST_WIDE_INT) int2h)
1303 && ((unsigned HOST_WIDE_INT) int1l
1304 < (unsigned HOST_WIDE_INT) int2l)));
1308 low = ((int1h < int2h)
1309 || ((int1h == int2h)
1310 && ((unsigned HOST_WIDE_INT) int1l
1311 < (unsigned HOST_WIDE_INT) int2l)));
1313 if (low == (code == MIN_EXPR))
1314 t = build_int_2 (int1l, int1h);
1316 t = build_int_2 (int2l, int2h);
1323 TREE_TYPE (t) = TREE_TYPE (arg1);
1325 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow))
1326 | TREE_OVERFLOW (arg1)
1327 | TREE_OVERFLOW (arg2));
1328 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1329 | TREE_CONSTANT_OVERFLOW (arg1)
1330 | TREE_CONSTANT_OVERFLOW (arg2));
1333 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1334 if (TREE_CODE (arg1) == REAL_CST)
1338 REAL_VALUE_TYPE value;
1341 d1 = TREE_REAL_CST (arg1);
1342 d2 = TREE_REAL_CST (arg2);
1343 if (setjmp (float_error))
1345 pedwarn ("floating overflow in constant expression");
1346 return build (code, TREE_TYPE (arg1), arg1, arg2);
1348 set_float_handler (float_error);
1350 #ifdef REAL_ARITHMETIC
1351 REAL_ARITHMETIC (value, code, d1, d2);
1368 #ifndef REAL_INFINITY
1377 value = MIN (d1, d2);
1381 value = MAX (d1, d2);
1387 #endif /* no REAL_ARITHMETIC */
1388 t = build_real (TREE_TYPE (arg1),
1389 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1390 set_float_handler (NULL_PTR);
1393 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1394 if (TREE_CODE (arg1) == COMPLEX_CST)
1396 register tree r1 = TREE_REALPART (arg1);
1397 register tree i1 = TREE_IMAGPART (arg1);
1398 register tree r2 = TREE_REALPART (arg2);
1399 register tree i2 = TREE_IMAGPART (arg2);
1405 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1406 const_binop (PLUS_EXPR, i1, i2, notrunc));
1410 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1411 const_binop (MINUS_EXPR, i1, i2, notrunc));
1415 t = build_complex (const_binop (MINUS_EXPR,
1416 const_binop (MULT_EXPR,
1418 const_binop (MULT_EXPR,
1421 const_binop (PLUS_EXPR,
1422 const_binop (MULT_EXPR,
1424 const_binop (MULT_EXPR,
1431 register tree magsquared
1432 = const_binop (PLUS_EXPR,
1433 const_binop (MULT_EXPR, r2, r2, notrunc),
1434 const_binop (MULT_EXPR, i2, i2, notrunc),
1438 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1439 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1440 const_binop (PLUS_EXPR,
1441 const_binop (MULT_EXPR, r1, r2,
1443 const_binop (MULT_EXPR, i1, i2,
1446 magsquared, notrunc),
1447 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1448 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1449 const_binop (MINUS_EXPR,
1450 const_binop (MULT_EXPR, i1, r2,
1452 const_binop (MULT_EXPR, r1, i2,
1455 magsquared, notrunc));
1462 TREE_TYPE (t) = TREE_TYPE (arg1);
1468 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1472 unsigned int number;
1475 /* Type-size nodes already made for small sizes. */
1476 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1478 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1479 && size_table[number] != 0)
1480 return size_table[number];
1481 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1483 push_obstacks_nochange ();
1484 /* Make this a permanent node. */
1485 end_temporary_allocation ();
1486 t = build_int_2 (number, 0);
1487 TREE_TYPE (t) = sizetype;
1488 size_table[number] = t;
1493 t = build_int_2 (number, 0);
1494 TREE_TYPE (t) = sizetype;
1499 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1500 CODE is a tree code. Data type is taken from `sizetype',
1501 If the operands are constant, so is the result. */
1504 size_binop (code, arg0, arg1)
1505 enum tree_code code;
1508 /* Handle the special case of two integer constants faster. */
1509 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1511 /* And some specific cases even faster than that. */
1512 if (code == PLUS_EXPR
1513 && TREE_INT_CST_LOW (arg0) == 0
1514 && TREE_INT_CST_HIGH (arg0) == 0)
1516 if (code == MINUS_EXPR
1517 && TREE_INT_CST_LOW (arg1) == 0
1518 && TREE_INT_CST_HIGH (arg1) == 0)
1520 if (code == MULT_EXPR
1521 && TREE_INT_CST_LOW (arg0) == 1
1522 && TREE_INT_CST_HIGH (arg0) == 0)
1524 /* Handle general case of two integer constants. */
1525 return const_binop (code, arg0, arg1, 1);
1528 if (arg0 == error_mark_node || arg1 == error_mark_node)
1529 return error_mark_node;
1531 return fold (build (code, sizetype, arg0, arg1));
1534 /* Given T, a tree representing type conversion of ARG1, a constant,
1535 return a constant tree representing the result of conversion. */
1538 fold_convert (t, arg1)
1542 register tree type = TREE_TYPE (t);
1544 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1546 if (TREE_CODE (arg1) == INTEGER_CST)
1548 /* Given an integer constant, make new constant with new type,
1549 appropriately sign-extended or truncated. */
1550 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1551 TREE_INT_CST_HIGH (arg1));
1552 TREE_TYPE (t) = type;
1553 /* Indicate an overflow if (1) ARG1 already overflowed,
1554 or (2) force_fit_type indicates an overflow.
1555 Tell force_fit_type that an overflow has already occurred
1556 if ARG1 is a too-large unsigned value and T is signed. */
1558 = (TREE_OVERFLOW (arg1)
1559 | force_fit_type (t,
1560 (TREE_INT_CST_HIGH (arg1) < 0
1561 & (TREE_UNSIGNED (type)
1562 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1563 TREE_CONSTANT_OVERFLOW (t)
1564 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1566 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1567 else if (TREE_CODE (arg1) == REAL_CST)
1569 REAL_VALUE_TYPE l, x, u;
1571 l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
1572 x = TREE_REAL_CST (arg1);
1573 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1575 /* See if X will be in range after truncation towards 0.
1576 To compensate for truncation, move the bounds away from 0,
1577 but reject if X exactly equals the adjusted bounds. */
1578 #ifdef REAL_ARITHMETIC
1579 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1580 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1585 if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1587 pedwarn ("real constant out of range for integer conversion");
1590 #ifndef REAL_ARITHMETIC
1593 HOST_WIDE_INT low, high;
1594 HOST_WIDE_INT half_word
1595 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1597 d = TREE_REAL_CST (arg1);
1601 high = (HOST_WIDE_INT) (d / half_word / half_word);
1602 d -= (REAL_VALUE_TYPE) high * half_word * half_word;
1603 if (d >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1605 low = d - (REAL_VALUE_TYPE) half_word * half_word / 2;
1606 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1609 low = (HOST_WIDE_INT) d;
1610 if (TREE_REAL_CST (arg1) < 0)
1611 neg_double (low, high, &low, &high);
1612 t = build_int_2 (low, high);
1616 HOST_WIDE_INT low, high;
1617 REAL_VALUE_TO_INT (&low, &high, (TREE_REAL_CST (arg1)));
1618 t = build_int_2 (low, high);
1621 TREE_TYPE (t) = type;
1622 force_fit_type (t, 0);
1624 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1625 TREE_TYPE (t) = type;
1627 else if (TREE_CODE (type) == REAL_TYPE)
1629 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1630 if (TREE_CODE (arg1) == INTEGER_CST)
1631 return build_real_from_int_cst (type, arg1);
1632 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1633 if (TREE_CODE (arg1) == REAL_CST)
1635 if (setjmp (float_error))
1637 pedwarn ("floating overflow in constant expression");
1640 set_float_handler (float_error);
1642 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1643 TREE_REAL_CST (arg1)));
1644 set_float_handler (NULL_PTR);
1648 TREE_CONSTANT (t) = 1;
1652 /* Return an expr equal to X but certainly not valid as an lvalue.
1653 Also make sure it is not valid as an null pointer constant. */
1661 /* These things are certainly not lvalues. */
1662 if (TREE_CODE (x) == NON_LVALUE_EXPR
1663 || TREE_CODE (x) == INTEGER_CST
1664 || TREE_CODE (x) == REAL_CST
1665 || TREE_CODE (x) == STRING_CST
1666 || TREE_CODE (x) == ADDR_EXPR)
1668 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1670 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1671 so convert_for_assignment won't strip it.
1672 This is so this 0 won't be treated as a null pointer constant. */
1673 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1674 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1680 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1681 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1685 /* When pedantic, return an expr equal to X but certainly not valid as a
1686 pedantic lvalue. Otherwise, return X. */
1689 pedantic_non_lvalue (x)
1693 return non_lvalue (x);
1698 /* Given a tree comparison code, return the code that is the logical inverse
1699 of the given code. It is not safe to do this for floating-point
1700 comparisons, except for NE_EXPR and EQ_EXPR. */
1702 static enum tree_code
1703 invert_tree_comparison (code)
1704 enum tree_code code;
1725 /* Similar, but return the comparison that results if the operands are
1726 swapped. This is safe for floating-point. */
1728 static enum tree_code
1729 swap_tree_comparison (code)
1730 enum tree_code code;
1750 /* Return nonzero if CODE is a tree code that represents a truth value. */
1753 truth_value_p (code)
1754 enum tree_code code;
1756 return (TREE_CODE_CLASS (code) == '<'
1757 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1758 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1759 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1762 /* Return nonzero if two operands are necessarily equal.
1763 If ONLY_CONST is non-zero, only return non-zero for constants.
1764 This function tests whether the operands are indistinguishable;
1765 it does not test whether they are equal using C's == operation.
1766 The distinction is important for IEEE floating point, because
1767 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1768 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1771 operand_equal_p (arg0, arg1, only_const)
1775 /* If both types don't have the same signedness, then we can't consider
1776 them equal. We must check this before the STRIP_NOPS calls
1777 because they may change the signedness of the arguments. */
1778 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1784 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1785 We don't care about side effects in that case because the SAVE_EXPR
1786 takes care of that for us. */
1787 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1788 return ! only_const;
1790 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1793 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1794 && TREE_CODE (arg0) == ADDR_EXPR
1795 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1798 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1799 && TREE_CODE (arg0) == INTEGER_CST
1800 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1801 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1804 /* Detect when real constants are equal. */
1805 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1806 && TREE_CODE (arg0) == REAL_CST)
1807 return !bcmp (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1),
1808 sizeof (REAL_VALUE_TYPE));
1816 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1818 /* This is needed for conversions and for COMPONENT_REF.
1819 Might as well play it safe and always test this. */
1820 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1823 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1826 /* Two conversions are equal only if signedness and modes match. */
1827 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1828 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1829 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1832 return operand_equal_p (TREE_OPERAND (arg0, 0),
1833 TREE_OPERAND (arg1, 0), 0);
1837 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1838 TREE_OPERAND (arg1, 0), 0)
1839 && operand_equal_p (TREE_OPERAND (arg0, 1),
1840 TREE_OPERAND (arg1, 1), 0));
1843 switch (TREE_CODE (arg0))
1846 return operand_equal_p (TREE_OPERAND (arg0, 0),
1847 TREE_OPERAND (arg1, 0), 0);
1851 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1852 TREE_OPERAND (arg1, 0), 0)
1853 && operand_equal_p (TREE_OPERAND (arg0, 1),
1854 TREE_OPERAND (arg1, 1), 0));
1857 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1858 TREE_OPERAND (arg1, 0), 0)
1859 && operand_equal_p (TREE_OPERAND (arg0, 1),
1860 TREE_OPERAND (arg1, 1), 0)
1861 && operand_equal_p (TREE_OPERAND (arg0, 2),
1862 TREE_OPERAND (arg1, 2), 0));
1870 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1871 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1873 When in doubt, return 0. */
1876 operand_equal_for_comparison_p (arg0, arg1, other)
1880 int unsignedp1, unsignedpo;
1881 tree primarg1, primother;
1882 unsigned correct_width;
1884 if (operand_equal_p (arg0, arg1, 0))
1887 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
1890 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1891 actual comparison operand, ARG0.
1893 First throw away any conversions to wider types
1894 already present in the operands. */
1896 primarg1 = get_narrower (arg1, &unsignedp1);
1897 primother = get_narrower (other, &unsignedpo);
1899 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1900 if (unsignedp1 == unsignedpo
1901 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1902 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1904 tree type = TREE_TYPE (arg0);
1906 /* Make sure shorter operand is extended the right way
1907 to match the longer operand. */
1908 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1909 TREE_TYPE (primarg1)),
1912 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1919 /* See if ARG is an expression that is either a comparison or is performing
1920 arithmetic on comparisons. The comparisons must only be comparing
1921 two different values, which will be stored in *CVAL1 and *CVAL2; if
1922 they are non-zero it means that some operands have already been found.
1923 No variables may be used anywhere else in the expression except in the
1924 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1925 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1927 If this is true, return 1. Otherwise, return zero. */
1930 twoval_comparison_p (arg, cval1, cval2, save_p)
1932 tree *cval1, *cval2;
1935 enum tree_code code = TREE_CODE (arg);
1936 char class = TREE_CODE_CLASS (code);
1938 /* We can handle some of the 'e' cases here. */
1939 if (class == 'e' && code == TRUTH_NOT_EXPR)
1941 else if (class == 'e'
1942 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1943 || code == COMPOUND_EXPR))
1946 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1947 the expression. There may be no way to make this work, but it needs
1948 to be looked at again for 2.6. */
1950 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1952 /* If we've already found a CVAL1 or CVAL2, this expression is
1953 two complex to handle. */
1954 if (*cval1 || *cval2)
1965 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1968 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1969 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1970 cval1, cval2, save_p));
1976 if (code == COND_EXPR)
1977 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1978 cval1, cval2, save_p)
1979 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1980 cval1, cval2, save_p)
1981 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1982 cval1, cval2, save_p));
1986 /* First see if we can handle the first operand, then the second. For
1987 the second operand, we know *CVAL1 can't be zero. It must be that
1988 one side of the comparison is each of the values; test for the
1989 case where this isn't true by failing if the two operands
1992 if (operand_equal_p (TREE_OPERAND (arg, 0),
1993 TREE_OPERAND (arg, 1), 0))
1997 *cval1 = TREE_OPERAND (arg, 0);
1998 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2000 else if (*cval2 == 0)
2001 *cval2 = TREE_OPERAND (arg, 0);
2002 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2007 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2009 else if (*cval2 == 0)
2010 *cval2 = TREE_OPERAND (arg, 1);
2011 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2022 /* ARG is a tree that is known to contain just arithmetic operations and
2023 comparisons. Evaluate the operations in the tree substituting NEW0 for
2024 any occurrence of OLD0 as an operand of a comparison and likewise for
2028 eval_subst (arg, old0, new0, old1, new1)
2030 tree old0, new0, old1, new1;
2032 tree type = TREE_TYPE (arg);
2033 enum tree_code code = TREE_CODE (arg);
2034 char class = TREE_CODE_CLASS (code);
2036 /* We can handle some of the 'e' cases here. */
2037 if (class == 'e' && code == TRUTH_NOT_EXPR)
2039 else if (class == 'e'
2040 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2046 return fold (build1 (code, type,
2047 eval_subst (TREE_OPERAND (arg, 0),
2048 old0, new0, old1, new1)));
2051 return fold (build (code, type,
2052 eval_subst (TREE_OPERAND (arg, 0),
2053 old0, new0, old1, new1),
2054 eval_subst (TREE_OPERAND (arg, 1),
2055 old0, new0, old1, new1)));
2061 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2064 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2067 return fold (build (code, type,
2068 eval_subst (TREE_OPERAND (arg, 0),
2069 old0, new0, old1, new1),
2070 eval_subst (TREE_OPERAND (arg, 1),
2071 old0, new0, old1, new1),
2072 eval_subst (TREE_OPERAND (arg, 2),
2073 old0, new0, old1, new1)));
2078 tree arg0 = TREE_OPERAND (arg, 0);
2079 tree arg1 = TREE_OPERAND (arg, 1);
2081 /* We need to check both for exact equality and tree equality. The
2082 former will be true if the operand has a side-effect. In that
2083 case, we know the operand occurred exactly once. */
2085 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2087 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2090 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2092 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2095 return fold (build (code, type, arg0, arg1));
2102 /* Return a tree for the case when the result of an expression is RESULT
2103 converted to TYPE and OMITTED was previously an operand of the expression
2104 but is now not needed (e.g., we folded OMITTED * 0).
2106 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2107 the conversion of RESULT to TYPE. */
2110 omit_one_operand (type, result, omitted)
2111 tree type, result, omitted;
2113 tree t = convert (type, result);
2115 if (TREE_SIDE_EFFECTS (omitted))
2116 return build (COMPOUND_EXPR, type, omitted, t);
2118 return non_lvalue (t);
2121 /* Return a simplified tree node for the truth-negation of ARG. This
2122 never alters ARG itself. We assume that ARG is an operation that
2123 returns a truth value (0 or 1). */
2126 invert_truthvalue (arg)
2129 tree type = TREE_TYPE (arg);
2130 enum tree_code code = TREE_CODE (arg);
2132 if (code == ERROR_MARK)
2135 /* If this is a comparison, we can simply invert it, except for
2136 floating-point non-equality comparisons, in which case we just
2137 enclose a TRUTH_NOT_EXPR around what we have. */
2139 if (TREE_CODE_CLASS (code) == '<')
2141 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2142 && code != NE_EXPR && code != EQ_EXPR)
2143 return build1 (TRUTH_NOT_EXPR, type, arg);
2145 return build (invert_tree_comparison (code), type,
2146 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2152 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2153 && TREE_INT_CST_HIGH (arg) == 0, 0));
2155 case TRUTH_AND_EXPR:
2156 return build (TRUTH_OR_EXPR, type,
2157 invert_truthvalue (TREE_OPERAND (arg, 0)),
2158 invert_truthvalue (TREE_OPERAND (arg, 1)));
2161 return build (TRUTH_AND_EXPR, type,
2162 invert_truthvalue (TREE_OPERAND (arg, 0)),
2163 invert_truthvalue (TREE_OPERAND (arg, 1)));
2165 case TRUTH_XOR_EXPR:
2166 /* Here we can invert either operand. We invert the first operand
2167 unless the second operand is a TRUTH_NOT_EXPR in which case our
2168 result is the XOR of the first operand with the inside of the
2169 negation of the second operand. */
2171 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2172 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2173 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2175 return build (TRUTH_XOR_EXPR, type,
2176 invert_truthvalue (TREE_OPERAND (arg, 0)),
2177 TREE_OPERAND (arg, 1));
2179 case TRUTH_ANDIF_EXPR:
2180 return build (TRUTH_ORIF_EXPR, type,
2181 invert_truthvalue (TREE_OPERAND (arg, 0)),
2182 invert_truthvalue (TREE_OPERAND (arg, 1)));
2184 case TRUTH_ORIF_EXPR:
2185 return build (TRUTH_ANDIF_EXPR, type,
2186 invert_truthvalue (TREE_OPERAND (arg, 0)),
2187 invert_truthvalue (TREE_OPERAND (arg, 1)));
2189 case TRUTH_NOT_EXPR:
2190 return TREE_OPERAND (arg, 0);
2193 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2194 invert_truthvalue (TREE_OPERAND (arg, 1)),
2195 invert_truthvalue (TREE_OPERAND (arg, 2)));
2198 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2199 invert_truthvalue (TREE_OPERAND (arg, 1)));
2201 case NON_LVALUE_EXPR:
2202 return invert_truthvalue (TREE_OPERAND (arg, 0));
2207 return build1 (TREE_CODE (arg), type,
2208 invert_truthvalue (TREE_OPERAND (arg, 0)));
2211 if (!integer_onep (TREE_OPERAND (arg, 1)))
2213 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2216 return build1 (TRUTH_NOT_EXPR, type, arg);
2218 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2220 return build1 (TRUTH_NOT_EXPR, type, arg);
2223 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2224 operands are another bit-wise operation with a common input. If so,
2225 distribute the bit operations to save an operation and possibly two if
2226 constants are involved. For example, convert
2227 (A | B) & (A | C) into A | (B & C)
2228 Further simplification will occur if B and C are constants.
2230 If this optimization cannot be done, 0 will be returned. */
2233 distribute_bit_expr (code, type, arg0, arg1)
2234 enum tree_code code;
2241 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2242 || TREE_CODE (arg0) == code
2243 || (TREE_CODE (arg0) != BIT_AND_EXPR
2244 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2247 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2249 common = TREE_OPERAND (arg0, 0);
2250 left = TREE_OPERAND (arg0, 1);
2251 right = TREE_OPERAND (arg1, 1);
2253 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2255 common = TREE_OPERAND (arg0, 0);
2256 left = TREE_OPERAND (arg0, 1);
2257 right = TREE_OPERAND (arg1, 0);
2259 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2261 common = TREE_OPERAND (arg0, 1);
2262 left = TREE_OPERAND (arg0, 0);
2263 right = TREE_OPERAND (arg1, 1);
2265 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2267 common = TREE_OPERAND (arg0, 1);
2268 left = TREE_OPERAND (arg0, 0);
2269 right = TREE_OPERAND (arg1, 0);
2274 return fold (build (TREE_CODE (arg0), type, common,
2275 fold (build (code, type, left, right))));
2278 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2279 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2282 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2285 int bitsize, bitpos;
2288 tree result = build (BIT_FIELD_REF, type, inner,
2289 size_int (bitsize), size_int (bitpos));
2291 TREE_UNSIGNED (result) = unsignedp;
2296 /* Optimize a bit-field compare.
2298 There are two cases: First is a compare against a constant and the
2299 second is a comparison of two items where the fields are at the same
2300 bit position relative to the start of a chunk (byte, halfword, word)
2301 large enough to contain it. In these cases we can avoid the shift
2302 implicit in bitfield extractions.
2304 For constants, we emit a compare of the shifted constant with the
2305 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2306 compared. For two fields at the same position, we do the ANDs with the
2307 similar mask and compare the result of the ANDs.
2309 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2310 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2311 are the left and right operands of the comparison, respectively.
2313 If the optimization described above can be done, we return the resulting
2314 tree. Otherwise we return zero. */
2317 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2318 enum tree_code code;
2322 int lbitpos, lbitsize, rbitpos, rbitsize;
2323 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2324 tree type = TREE_TYPE (lhs);
2325 tree signed_type, unsigned_type;
2326 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2327 enum machine_mode lmode, rmode, lnmode, rnmode;
2328 int lunsignedp, runsignedp;
2329 int lvolatilep = 0, rvolatilep = 0;
2330 tree linner, rinner;
2334 /* Get all the information about the extractions being done. If the bit size
2335 if the same as the size of the underlying object, we aren't doing an
2336 extraction at all and so can do nothing. */
2337 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2338 &lunsignedp, &lvolatilep);
2339 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2345 /* If this is not a constant, we can only do something if bit positions,
2346 sizes, and signedness are the same. */
2347 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2348 &rmode, &runsignedp, &rvolatilep);
2350 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2351 || lunsignedp != runsignedp || offset != 0)
2355 /* See if we can find a mode to refer to this field. We should be able to,
2356 but fail if we can't. */
2357 lnmode = get_best_mode (lbitsize, lbitpos,
2358 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2360 if (lnmode == VOIDmode)
2363 /* Set signed and unsigned types of the precision of this mode for the
2365 signed_type = type_for_mode (lnmode, 0);
2366 unsigned_type = type_for_mode (lnmode, 1);
2370 rnmode = get_best_mode (rbitsize, rbitpos,
2371 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2373 if (rnmode == VOIDmode)
2377 /* Compute the bit position and size for the new reference and our offset
2378 within it. If the new reference is the same size as the original, we
2379 won't optimize anything, so return zero. */
2380 lnbitsize = GET_MODE_BITSIZE (lnmode);
2381 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2382 lbitpos -= lnbitpos;
2383 if (lnbitsize == lbitsize)
2388 rnbitsize = GET_MODE_BITSIZE (rnmode);
2389 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2390 rbitpos -= rnbitpos;
2391 if (rnbitsize == rbitsize)
2395 #if BYTES_BIG_ENDIAN
2396 lbitpos = lnbitsize - lbitsize - lbitpos;
2399 /* Make the mask to be used against the extracted field. */
2400 mask = build_int_2 (~0, ~0);
2401 TREE_TYPE (mask) = unsigned_type;
2402 force_fit_type (mask, 0);
2403 mask = convert (unsigned_type, mask);
2404 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2405 mask = const_binop (RSHIFT_EXPR, mask,
2406 size_int (lnbitsize - lbitsize - lbitpos), 0);
2409 /* If not comparing with constant, just rework the comparison
2411 return build (code, compare_type,
2412 build (BIT_AND_EXPR, unsigned_type,
2413 make_bit_field_ref (linner, unsigned_type,
2414 lnbitsize, lnbitpos, 1),
2416 build (BIT_AND_EXPR, unsigned_type,
2417 make_bit_field_ref (rinner, unsigned_type,
2418 rnbitsize, rnbitpos, 1),
2421 /* Otherwise, we are handling the constant case. See if the constant is too
2422 big for the field. Warn and return a tree of for 0 (false) if so. We do
2423 this not only for its own sake, but to avoid having to test for this
2424 error case below. If we didn't, we might generate wrong code.
2426 For unsigned fields, the constant shifted right by the field length should
2427 be all zero. For signed fields, the high-order bits should agree with
2432 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2433 convert (unsigned_type, rhs),
2434 size_int (lbitsize), 0)))
2436 warning ("comparison is always %s due to width of bitfield",
2437 code == NE_EXPR ? "one" : "zero");
2438 return convert (compare_type,
2440 ? integer_one_node : integer_zero_node));
2445 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2446 size_int (lbitsize - 1), 0);
2447 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2449 warning ("comparison is always %s due to width of bitfield",
2450 code == NE_EXPR ? "one" : "zero");
2451 return convert (compare_type,
2453 ? integer_one_node : integer_zero_node));
2457 /* Single-bit compares should always be against zero. */
2458 if (lbitsize == 1 && ! integer_zerop (rhs))
2460 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2461 rhs = convert (type, integer_zero_node);
2464 /* Make a new bitfield reference, shift the constant over the
2465 appropriate number of bits and mask it with the computed mask
2466 (in case this was a signed field). If we changed it, make a new one. */
2467 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2470 TREE_SIDE_EFFECTS (lhs) = 1;
2471 TREE_THIS_VOLATILE (lhs) = 1;
2474 rhs = fold (const_binop (BIT_AND_EXPR,
2475 const_binop (LSHIFT_EXPR,
2476 convert (unsigned_type, rhs),
2477 size_int (lbitpos), 0),
2480 return build (code, compare_type,
2481 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2485 /* Subroutine for fold_truthop: decode a field reference.
2487 If EXP is a comparison reference, we return the innermost reference.
2489 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2490 set to the starting bit number.
2492 If the innermost field can be completely contained in a mode-sized
2493 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2495 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2496 otherwise it is not changed.
2498 *PUNSIGNEDP is set to the signedness of the field.
2500 *PMASK is set to the mask used. This is either contained in a
2501 BIT_AND_EXPR or derived from the width of the field.
2503 Return 0 if this is not a component reference or is one that we can't
2504 do anything with. */
2507 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2510 int *pbitsize, *pbitpos;
2511 enum machine_mode *pmode;
2512 int *punsignedp, *pvolatilep;
2519 /* All the optimizations using this function assume integer fields.
2520 There are problems with FP fields since the type_for_size call
2521 below can fail for, e.g., XFmode. */
2522 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2527 if (TREE_CODE (exp) == BIT_AND_EXPR)
2529 mask = TREE_OPERAND (exp, 1);
2530 exp = TREE_OPERAND (exp, 0);
2531 STRIP_NOPS (exp); STRIP_NOPS (mask);
2532 if (TREE_CODE (mask) != INTEGER_CST)
2536 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
2537 && TREE_CODE (exp) != BIT_FIELD_REF)
2540 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2541 punsignedp, pvolatilep);
2542 if (inner == exp || *pbitsize < 0 || offset != 0)
2547 tree unsigned_type = type_for_size (*pbitsize, 1);
2548 int precision = TYPE_PRECISION (unsigned_type);
2550 mask = build_int_2 (~0, ~0);
2551 TREE_TYPE (mask) = unsigned_type;
2552 force_fit_type (mask, 0);
2553 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2554 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2561 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2565 all_ones_mask_p (mask, size)
2569 tree type = TREE_TYPE (mask);
2570 int precision = TYPE_PRECISION (type);
2573 tmask = build_int_2 (~0, ~0);
2574 TREE_TYPE (tmask) = signed_type (type);
2575 force_fit_type (tmask, 0);
2577 operand_equal_p (mask,
2578 const_binop (RSHIFT_EXPR,
2579 const_binop (LSHIFT_EXPR, tmask,
2580 size_int (precision - size), 0),
2581 size_int (precision - size), 0),
2585 /* Subroutine for fold_truthop: determine if an operand is simple enough
2586 to be evaluated unconditionally. */
2589 simple_operand_p (exp)
2592 /* Strip any conversions that don't change the machine mode. */
2593 while ((TREE_CODE (exp) == NOP_EXPR
2594 || TREE_CODE (exp) == CONVERT_EXPR)
2595 && (TYPE_MODE (TREE_TYPE (exp))
2596 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2597 exp = TREE_OPERAND (exp, 0);
2599 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2600 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2601 && ! TREE_ADDRESSABLE (exp)
2602 && ! TREE_THIS_VOLATILE (exp)
2603 && ! DECL_NONLOCAL (exp)
2604 /* Don't regard global variables as simple. They may be
2605 allocated in ways unknown to the compiler (shared memory,
2606 #pragma weak, etc). */
2607 && ! TREE_PUBLIC (exp)
2608 && ! DECL_EXTERNAL (exp)
2609 /* Loading a static variable is unduly expensive, but global
2610 registers aren't expensive. */
2611 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2614 /* Subroutine for fold_truthop: try to optimize a range test.
2616 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2618 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2619 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2620 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2623 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2624 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2625 larger than HI_CST (they may be equal).
2627 We return the simplified tree or 0 if no optimization is possible. */
2630 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2631 enum tree_code jcode, lo_code, hi_code;
2632 tree type, var, lo_cst, hi_cst;
2635 enum tree_code rcode;
2637 /* See if this is a range test and normalize the constant terms. */
2639 if (jcode == TRUTH_AND_EXPR)
2644 /* See if we have VAR != CST && VAR != CST+1. */
2645 if (! (hi_code == NE_EXPR
2646 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2647 && tree_int_cst_equal (integer_one_node,
2648 const_binop (MINUS_EXPR,
2649 hi_cst, lo_cst, 0))))
2657 if (hi_code == LT_EXPR)
2658 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2659 else if (hi_code != LE_EXPR)
2662 if (lo_code == GT_EXPR)
2663 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2665 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2678 /* See if we have VAR == CST || VAR == CST+1. */
2679 if (! (hi_code == EQ_EXPR
2680 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2681 && tree_int_cst_equal (integer_one_node,
2682 const_binop (MINUS_EXPR,
2683 hi_cst, lo_cst, 0))))
2691 if (hi_code == GE_EXPR)
2692 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2693 else if (hi_code != GT_EXPR)
2696 if (lo_code == LE_EXPR)
2697 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2699 /* We now have VAR < LO_CST || VAR > HI_CST. */
2708 /* When normalizing, it is possible to both increment the smaller constant
2709 and decrement the larger constant. See if they are still ordered. */
2710 if (tree_int_cst_lt (hi_cst, lo_cst))
2713 /* Fail if VAR isn't an integer. */
2714 utype = TREE_TYPE (var);
2715 if (! INTEGRAL_TYPE_P (utype))
2718 /* The range test is invalid if subtracting the two constants results
2719 in overflow. This can happen in traditional mode. */
2720 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2721 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2724 if (! TREE_UNSIGNED (utype))
2726 utype = unsigned_type (utype);
2727 var = convert (utype, var);
2728 lo_cst = convert (utype, lo_cst);
2729 hi_cst = convert (utype, hi_cst);
2732 return fold (convert (type,
2733 build (rcode, utype,
2734 build (MINUS_EXPR, utype, var, lo_cst),
2735 const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
2738 /* Find ways of folding logical expressions of LHS and RHS:
2739 Try to merge two comparisons to the same innermost item.
2740 Look for range tests like "ch >= '0' && ch <= '9'".
2741 Look for combinations of simple terms on machines with expensive branches
2742 and evaluate the RHS unconditionally.
2744 For example, if we have p->a == 2 && p->b == 4 and we can make an
2745 object large enough to span both A and B, we can do this with a comparison
2746 against the object ANDed with the a mask.
2748 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2749 operations to do this with one comparison.
2751 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2752 function and the one above.
2754 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2755 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2757 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2760 We return the simplified tree or 0 if no optimization is possible. */
2763 fold_truthop (code, truth_type, lhs, rhs)
2764 enum tree_code code;
2765 tree truth_type, lhs, rhs;
2767 /* If this is the "or" of two comparisons, we can do something if we
2768 the comparisons are NE_EXPR. If this is the "and", we can do something
2769 if the comparisons are EQ_EXPR. I.e.,
2770 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2772 WANTED_CODE is this operation code. For single bit fields, we can
2773 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2774 comparison for one-bit fields. */
2776 enum tree_code wanted_code;
2777 enum tree_code lcode, rcode;
2778 tree ll_arg, lr_arg, rl_arg, rr_arg;
2779 tree ll_inner, lr_inner, rl_inner, rr_inner;
2780 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2781 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2782 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2783 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2784 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2785 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2786 enum machine_mode lnmode, rnmode;
2787 tree ll_mask, lr_mask, rl_mask, rr_mask;
2788 tree l_const, r_const;
2790 int first_bit, end_bit;
2793 /* Start by getting the comparison codes and seeing if this looks like
2794 a range test. Fail if anything is volatile. If one operand is a
2795 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2798 if (TREE_SIDE_EFFECTS (lhs)
2799 || TREE_SIDE_EFFECTS (rhs))
2802 lcode = TREE_CODE (lhs);
2803 rcode = TREE_CODE (rhs);
2805 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2806 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2808 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2809 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2811 if (TREE_CODE_CLASS (lcode) != '<'
2812 || TREE_CODE_CLASS (rcode) != '<')
2815 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2816 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2818 ll_arg = TREE_OPERAND (lhs, 0);
2819 lr_arg = TREE_OPERAND (lhs, 1);
2820 rl_arg = TREE_OPERAND (rhs, 0);
2821 rr_arg = TREE_OPERAND (rhs, 1);
2823 if (TREE_CODE (lr_arg) == INTEGER_CST
2824 && TREE_CODE (rr_arg) == INTEGER_CST
2825 && operand_equal_p (ll_arg, rl_arg, 0))
2827 if (tree_int_cst_lt (lr_arg, rr_arg))
2828 result = range_test (code, truth_type, lcode, rcode,
2829 ll_arg, lr_arg, rr_arg);
2831 result = range_test (code, truth_type, rcode, lcode,
2832 ll_arg, rr_arg, lr_arg);
2834 /* If this isn't a range test, it also isn't a comparison that
2835 can be merged. However, it wins to evaluate the RHS unconditionally
2836 on machines with expensive branches. */
2838 if (result == 0 && BRANCH_COST >= 2)
2840 if (TREE_CODE (ll_arg) != VAR_DECL
2841 && TREE_CODE (ll_arg) != PARM_DECL)
2843 /* Avoid evaluating the variable part twice. */
2844 ll_arg = save_expr (ll_arg);
2845 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2846 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2848 return build (code, truth_type, lhs, rhs);
2853 /* If the RHS can be evaluated unconditionally and its operands are
2854 simple, it wins to evaluate the RHS unconditionally on machines
2855 with expensive branches. In this case, this isn't a comparison
2856 that can be merged. */
2858 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2859 are with zero (tmw). */
2861 if (BRANCH_COST >= 2
2862 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2863 && simple_operand_p (rl_arg)
2864 && simple_operand_p (rr_arg))
2865 return build (code, truth_type, lhs, rhs);
2867 /* See if the comparisons can be merged. Then get all the parameters for
2870 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2871 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2875 ll_inner = decode_field_reference (ll_arg,
2876 &ll_bitsize, &ll_bitpos, &ll_mode,
2877 &ll_unsignedp, &volatilep, &ll_mask);
2878 lr_inner = decode_field_reference (lr_arg,
2879 &lr_bitsize, &lr_bitpos, &lr_mode,
2880 &lr_unsignedp, &volatilep, &lr_mask);
2881 rl_inner = decode_field_reference (rl_arg,
2882 &rl_bitsize, &rl_bitpos, &rl_mode,
2883 &rl_unsignedp, &volatilep, &rl_mask);
2884 rr_inner = decode_field_reference (rr_arg,
2885 &rr_bitsize, &rr_bitpos, &rr_mode,
2886 &rr_unsignedp, &volatilep, &rr_mask);
2888 /* It must be true that the inner operation on the lhs of each
2889 comparison must be the same if we are to be able to do anything.
2890 Then see if we have constants. If not, the same must be true for
2892 if (volatilep || ll_inner == 0 || rl_inner == 0
2893 || ! operand_equal_p (ll_inner, rl_inner, 0))
2896 if (TREE_CODE (lr_arg) == INTEGER_CST
2897 && TREE_CODE (rr_arg) == INTEGER_CST)
2898 l_const = lr_arg, r_const = rr_arg;
2899 else if (lr_inner == 0 || rr_inner == 0
2900 || ! operand_equal_p (lr_inner, rr_inner, 0))
2903 l_const = r_const = 0;
2905 /* If either comparison code is not correct for our logical operation,
2906 fail. However, we can convert a one-bit comparison against zero into
2907 the opposite comparison against that bit being set in the field. */
2909 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2910 if (lcode != wanted_code)
2912 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2918 if (rcode != wanted_code)
2920 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2926 /* See if we can find a mode that contains both fields being compared on
2927 the left. If we can't, fail. Otherwise, update all constants and masks
2928 to be relative to a field of that size. */
2929 first_bit = MIN (ll_bitpos, rl_bitpos);
2930 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2931 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2932 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2934 if (lnmode == VOIDmode)
2937 lnbitsize = GET_MODE_BITSIZE (lnmode);
2938 lnbitpos = first_bit & ~ (lnbitsize - 1);
2939 type = type_for_size (lnbitsize, 1);
2940 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2942 #if BYTES_BIG_ENDIAN
2943 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2944 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2947 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2948 size_int (xll_bitpos), 0);
2949 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2950 size_int (xrl_bitpos), 0);
2952 /* Make sure the constants are interpreted as unsigned, so we
2953 don't have sign bits outside the range of their type. */
2957 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2958 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2959 size_int (xll_bitpos), 0);
2963 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2964 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2965 size_int (xrl_bitpos), 0);
2968 /* If the right sides are not constant, do the same for it. Also,
2969 disallow this optimization if a size or signedness mismatch occurs
2970 between the left and right sides. */
2973 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2974 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2975 /* Make sure the two fields on the right
2976 correspond to the left without being swapped. */
2977 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2980 first_bit = MIN (lr_bitpos, rr_bitpos);
2981 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2982 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2983 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2985 if (rnmode == VOIDmode)
2988 rnbitsize = GET_MODE_BITSIZE (rnmode);
2989 rnbitpos = first_bit & ~ (rnbitsize - 1);
2990 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2992 #if BYTES_BIG_ENDIAN
2993 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2994 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2997 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2998 size_int (xlr_bitpos), 0);
2999 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3000 size_int (xrr_bitpos), 0);
3002 /* Make a mask that corresponds to both fields being compared.
3003 Do this for both items being compared. If the masks agree,
3004 we can do this by masking both and comparing the masked
3006 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3007 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3008 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3010 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3011 ll_unsignedp || rl_unsignedp);
3012 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3013 lr_unsignedp || rr_unsignedp);
3014 if (! all_ones_mask_p (ll_mask, lnbitsize))
3016 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3017 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3019 return build (wanted_code, truth_type, lhs, rhs);
3022 /* There is still another way we can do something: If both pairs of
3023 fields being compared are adjacent, we may be able to make a wider
3024 field containing them both. */
3025 if ((ll_bitsize + ll_bitpos == rl_bitpos
3026 && lr_bitsize + lr_bitpos == rr_bitpos)
3027 || (ll_bitpos == rl_bitpos + rl_bitsize
3028 && lr_bitpos == rr_bitpos + rr_bitsize))
3029 return build (wanted_code, truth_type,
3030 make_bit_field_ref (ll_inner, type,
3031 ll_bitsize + rl_bitsize,
3032 MIN (ll_bitpos, rl_bitpos),
3034 make_bit_field_ref (lr_inner, type,
3035 lr_bitsize + rr_bitsize,
3036 MIN (lr_bitpos, rr_bitpos),
3042 /* Handle the case of comparisons with constants. If there is something in
3043 common between the masks, those bits of the constants must be the same.
3044 If not, the condition is always false. Test for this to avoid generating
3045 incorrect code below. */
3046 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3047 if (! integer_zerop (result)
3048 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3049 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3051 if (wanted_code == NE_EXPR)
3053 warning ("`or' of unmatched not-equal tests is always 1");
3054 return convert (truth_type, integer_one_node);
3058 warning ("`and' of mutually exclusive equal-tests is always zero");
3059 return convert (truth_type, integer_zero_node);
3063 /* Construct the expression we will return. First get the component
3064 reference we will make. Unless the mask is all ones the width of
3065 that field, perform the mask operation. Then compare with the
3067 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3068 ll_unsignedp || rl_unsignedp);
3070 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3071 if (! all_ones_mask_p (ll_mask, lnbitsize))
3072 result = build (BIT_AND_EXPR, type, result, ll_mask);
3074 return build (wanted_code, truth_type, result,
3075 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3078 /* Perform constant folding and related simplification of EXPR.
3079 The related simplifications include x*1 => x, x*0 => 0, etc.,
3080 and application of the associative law.
3081 NOP_EXPR conversions may be removed freely (as long as we
3082 are careful not to change the C type of the overall expression)
3083 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3084 but we can constant-fold them if they have constant operands. */
3090 register tree t = expr;
3091 tree t1 = NULL_TREE;
3093 tree type = TREE_TYPE (expr);
3094 register tree arg0, arg1;
3095 register enum tree_code code = TREE_CODE (t);
3099 /* WINS will be nonzero when the switch is done
3100 if all operands are constant. */
3104 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3105 if (code == RTL_EXPR)
3108 /* Return right away if already constant. */
3109 if (TREE_CONSTANT (t))
3111 if (code == CONST_DECL)
3112 return DECL_INITIAL (t);
3116 kind = TREE_CODE_CLASS (code);
3117 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3121 /* Special case for conversion ops that can have fixed point args. */
3122 arg0 = TREE_OPERAND (t, 0);
3124 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3126 STRIP_TYPE_NOPS (arg0);
3128 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3129 subop = TREE_REALPART (arg0);
3133 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3134 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3135 && TREE_CODE (subop) != REAL_CST
3136 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3138 /* Note that TREE_CONSTANT isn't enough:
3139 static var addresses are constant but we can't
3140 do arithmetic on them. */
3143 else if (kind == 'e' || kind == '<'
3144 || kind == '1' || kind == '2' || kind == 'r')
3146 register int len = tree_code_length[(int) code];
3148 for (i = 0; i < len; i++)
3150 tree op = TREE_OPERAND (t, i);
3154 continue; /* Valid for CALL_EXPR, at least. */
3156 if (kind == '<' || code == RSHIFT_EXPR)
3158 /* Signedness matters here. Perhaps we can refine this
3160 STRIP_TYPE_NOPS (op);
3164 /* Strip any conversions that don't change the mode. */
3168 if (TREE_CODE (op) == COMPLEX_CST)
3169 subop = TREE_REALPART (op);
3173 if (TREE_CODE (subop) != INTEGER_CST
3174 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3175 && TREE_CODE (subop) != REAL_CST
3176 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3178 /* Note that TREE_CONSTANT isn't enough:
3179 static var addresses are constant but we can't
3180 do arithmetic on them. */
3190 /* If this is a commutative operation, and ARG0 is a constant, move it
3191 to ARG1 to reduce the number of tests below. */
3192 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3193 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3194 || code == BIT_AND_EXPR)
3195 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3197 tem = arg0; arg0 = arg1; arg1 = tem;
3199 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3200 TREE_OPERAND (t, 1) = tem;
3203 /* Now WINS is set as described above,
3204 ARG0 is the first operand of EXPR,
3205 and ARG1 is the second operand (if it has more than one operand).
3207 First check for cases where an arithmetic operation is applied to a
3208 compound, conditional, or comparison operation. Push the arithmetic
3209 operation inside the compound or conditional to see if any folding
3210 can then be done. Convert comparison to conditional for this purpose.
3211 The also optimizes non-constant cases that used to be done in
3214 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3215 one of the operands is a comparison and the other is a comparison, a
3216 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3217 code below would make the expression more complex. Change it to a
3218 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3219 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3221 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3222 || code == EQ_EXPR || code == NE_EXPR)
3223 && ((truth_value_p (TREE_CODE (arg0))
3224 && (truth_value_p (TREE_CODE (arg1))
3225 || (TREE_CODE (arg1) == BIT_AND_EXPR
3226 && integer_onep (TREE_OPERAND (arg1, 1)))))
3227 || (truth_value_p (TREE_CODE (arg1))
3228 && (truth_value_p (TREE_CODE (arg0))
3229 || (TREE_CODE (arg0) == BIT_AND_EXPR
3230 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3232 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3233 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3237 if (code == EQ_EXPR)
3238 t = invert_truthvalue (t);
3243 if (TREE_CODE_CLASS (code) == '1')
3245 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3246 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3247 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3248 else if (TREE_CODE (arg0) == COND_EXPR)
3250 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3251 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3252 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3254 /* If this was a conversion, and all we did was to move into
3255 inside the COND_EXPR, bring it back out. Then return so we
3256 don't get into an infinite recursion loop taking the conversion
3257 out and then back in. */
3259 if ((code == NOP_EXPR || code == CONVERT_EXPR
3260 || code == NON_LVALUE_EXPR)
3261 && TREE_CODE (t) == COND_EXPR
3262 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3263 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3264 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3265 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0))))
3266 t = build1 (code, type,
3268 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3269 TREE_OPERAND (t, 0),
3270 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3271 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3274 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3275 return fold (build (COND_EXPR, type, arg0,
3276 fold (build1 (code, type, integer_one_node)),
3277 fold (build1 (code, type, integer_zero_node))));
3279 else if (TREE_CODE_CLASS (code) == '2'
3280 || TREE_CODE_CLASS (code) == '<')
3282 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3283 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3284 fold (build (code, type,
3285 arg0, TREE_OPERAND (arg1, 1))));
3286 else if (TREE_CODE (arg1) == COND_EXPR
3287 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3289 tree test, true_value, false_value;
3291 if (TREE_CODE (arg1) == COND_EXPR)
3293 test = TREE_OPERAND (arg1, 0);
3294 true_value = TREE_OPERAND (arg1, 1);
3295 false_value = TREE_OPERAND (arg1, 2);
3300 true_value = integer_one_node;
3301 false_value = integer_zero_node;
3304 /* If ARG0 is complex we want to make sure we only evaluate
3305 it once. Though this is only required if it is volatile, it
3306 might be more efficient even if it is not. However, if we
3307 succeed in folding one part to a constant, we do not need
3308 to make this SAVE_EXPR. Since we do this optimization
3309 primarily to see if we do end up with constant and this
3310 SAVE_EXPR interfers with later optimizations, suppressing
3311 it when we can is important. */
3313 if ((TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
3314 || TREE_SIDE_EFFECTS (arg0))
3316 tree lhs = fold (build (code, type, arg0, true_value));
3317 tree rhs = fold (build (code, type, arg0, false_value));
3319 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3320 return fold (build (COND_EXPR, type, test, lhs, rhs));
3322 arg0 = save_expr (arg0);
3325 test = fold (build (COND_EXPR, type, test,
3326 fold (build (code, type, arg0, true_value)),
3327 fold (build (code, type, arg0, false_value))));
3328 if (TREE_CODE (arg0) == SAVE_EXPR)
3329 return build (COMPOUND_EXPR, type,
3330 convert (void_type_node, arg0), test);
3332 return convert (type, test);
3335 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3336 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3337 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3338 else if (TREE_CODE (arg0) == COND_EXPR
3339 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3341 tree test, true_value, false_value;
3343 if (TREE_CODE (arg0) == COND_EXPR)
3345 test = TREE_OPERAND (arg0, 0);
3346 true_value = TREE_OPERAND (arg0, 1);
3347 false_value = TREE_OPERAND (arg0, 2);
3352 true_value = integer_one_node;
3353 false_value = integer_zero_node;
3356 if ((TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
3357 || TREE_SIDE_EFFECTS (arg1))
3359 tree lhs = fold (build (code, type, true_value, arg1));
3360 tree rhs = fold (build (code, type, false_value, arg1));
3362 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3363 return fold (build (COND_EXPR, type, test, lhs, rhs));
3365 arg1 = save_expr (arg1);
3368 test = fold (build (COND_EXPR, type, test,
3369 fold (build (code, type, true_value, arg1)),
3370 fold (build (code, type, false_value, arg1))));
3371 if (TREE_CODE (arg1) == SAVE_EXPR)
3372 return build (COMPOUND_EXPR, type,
3373 convert (void_type_node, arg1), test);
3375 return convert (type, test);
3378 else if (TREE_CODE_CLASS (code) == '<'
3379 && TREE_CODE (arg0) == COMPOUND_EXPR)
3380 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3381 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3382 else if (TREE_CODE_CLASS (code) == '<'
3383 && TREE_CODE (arg1) == COMPOUND_EXPR)
3384 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3385 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3397 return fold (DECL_INITIAL (t));
3402 case FIX_TRUNC_EXPR:
3403 /* Other kinds of FIX are not handled properly by fold_convert. */
3405 /* In addition to the cases of two conversions in a row
3406 handled below, if we are converting something to its own
3407 type via an object of identical or wider precision, neither
3408 conversion is needed. */
3409 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3410 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3411 && TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == TREE_TYPE (t)
3412 && ((INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3413 && INTEGRAL_TYPE_P (TREE_TYPE (t)))
3414 || (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3415 && FLOAT_TYPE_P (TREE_TYPE (t))))
3416 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3417 >= TYPE_PRECISION (TREE_TYPE (t))))
3418 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3420 /* Two conversions in a row are not needed unless:
3421 - the intermediate type is narrower than both initial and final, or
3422 - the intermediate type and innermost type differ in signedness,
3423 and the outermost type is wider than the intermediate, or
3424 - the initial type is a pointer type and the precisions of the
3425 intermediate and final types differ, or
3426 - the final type is a pointer type and the precisions of the
3427 initial and intermediate types differ. */
3428 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3429 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3430 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3431 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3433 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3434 > TYPE_PRECISION (TREE_TYPE (t)))
3435 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3437 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3439 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3440 != TREE_UNSIGNED (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3441 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3442 < TYPE_PRECISION (TREE_TYPE (t))))
3443 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3444 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3445 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
3447 (TREE_UNSIGNED (TREE_TYPE (t))
3448 && (TYPE_PRECISION (TREE_TYPE (t))
3449 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3450 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3452 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3453 != TYPE_PRECISION (TREE_TYPE (t))))
3454 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
3455 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3456 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3457 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3459 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3460 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3461 /* Detect assigning a bitfield. */
3462 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3463 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3465 /* Don't leave an assignment inside a conversion
3466 unless assigning a bitfield. */
3467 tree prev = TREE_OPERAND (t, 0);
3468 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3469 /* First do the assignment, then return converted constant. */
3470 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3476 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3479 return fold_convert (t, arg0);
3481 #if 0 /* This loses on &"foo"[0]. */
3486 /* Fold an expression like: "foo"[2] */
3487 if (TREE_CODE (arg0) == STRING_CST
3488 && TREE_CODE (arg1) == INTEGER_CST
3489 && !TREE_INT_CST_HIGH (arg1)
3490 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3492 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3493 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3494 force_fit_type (t, 0);
3501 TREE_CONSTANT (t) = wins;
3507 if (TREE_CODE (arg0) == INTEGER_CST)
3509 HOST_WIDE_INT low, high;
3510 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3511 TREE_INT_CST_HIGH (arg0),
3513 t = build_int_2 (low, high);
3514 TREE_TYPE (t) = type;
3516 = (TREE_OVERFLOW (arg0)
3517 | force_fit_type (t, overflow));
3518 TREE_CONSTANT_OVERFLOW (t)
3519 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3521 else if (TREE_CODE (arg0) == REAL_CST)
3522 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3523 TREE_TYPE (t) = type;
3525 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3526 return TREE_OPERAND (arg0, 0);
3528 /* Convert - (a - b) to (b - a) for non-floating-point. */
3529 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3530 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3531 TREE_OPERAND (arg0, 0));
3538 if (TREE_CODE (arg0) == INTEGER_CST)
3540 if (! TREE_UNSIGNED (type)
3541 && TREE_INT_CST_HIGH (arg0) < 0)
3543 HOST_WIDE_INT low, high;
3544 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3545 TREE_INT_CST_HIGH (arg0),
3547 t = build_int_2 (low, high);
3548 TREE_TYPE (t) = type;
3550 = (TREE_OVERFLOW (arg0)
3551 | force_fit_type (t, overflow));
3552 TREE_CONSTANT_OVERFLOW (t)
3553 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3556 else if (TREE_CODE (arg0) == REAL_CST)
3558 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3559 t = build_real (type,
3560 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3562 TREE_TYPE (t) = type;
3564 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3565 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3569 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3571 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3572 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3573 TREE_OPERAND (arg0, 0),
3574 fold (build1 (NEGATE_EXPR,
3575 TREE_TYPE (TREE_TYPE (arg0)),
3576 TREE_OPERAND (arg0, 1))));
3577 else if (TREE_CODE (arg0) == COMPLEX_CST)
3578 return build_complex (TREE_OPERAND (arg0, 0),
3579 fold (build1 (NEGATE_EXPR,
3580 TREE_TYPE (TREE_TYPE (arg0)),
3581 TREE_OPERAND (arg0, 1))));
3582 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3583 return fold (build (TREE_CODE (arg0), type,
3584 fold (build1 (CONJ_EXPR, type,
3585 TREE_OPERAND (arg0, 0))),
3586 fold (build1 (CONJ_EXPR,
3587 type, TREE_OPERAND (arg0, 1)))));
3588 else if (TREE_CODE (arg0) == CONJ_EXPR)
3589 return TREE_OPERAND (arg0, 0);
3595 if (TREE_CODE (arg0) == INTEGER_CST)
3596 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3597 ~ TREE_INT_CST_HIGH (arg0));
3598 TREE_TYPE (t) = type;
3599 force_fit_type (t, 0);
3600 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3601 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3603 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3604 return TREE_OPERAND (arg0, 0);
3608 /* A + (-B) -> A - B */
3609 if (TREE_CODE (arg1) == NEGATE_EXPR)
3610 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3611 else if (! FLOAT_TYPE_P (type))
3613 if (integer_zerop (arg1))
3614 return non_lvalue (convert (type, arg0));
3616 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3617 with a constant, and the two constants have no bits in common,
3618 we should treat this as a BIT_IOR_EXPR since this may produce more
3620 if (TREE_CODE (arg0) == BIT_AND_EXPR
3621 && TREE_CODE (arg1) == BIT_AND_EXPR
3622 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3623 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3624 && integer_zerop (const_binop (BIT_AND_EXPR,
3625 TREE_OPERAND (arg0, 1),
3626 TREE_OPERAND (arg1, 1), 0)))
3628 code = BIT_IOR_EXPR;
3632 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3633 about the case where C is a constant, just try one of the
3634 four possibilities. */
3636 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3637 && operand_equal_p (TREE_OPERAND (arg0, 1),
3638 TREE_OPERAND (arg1, 1), 0))
3639 return fold (build (MULT_EXPR, type,
3640 fold (build (PLUS_EXPR, type,
3641 TREE_OPERAND (arg0, 0),
3642 TREE_OPERAND (arg1, 0))),
3643 TREE_OPERAND (arg0, 1)));
3645 /* In IEEE floating point, x+0 may not equal x. */
3646 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3648 && real_zerop (arg1))
3649 return non_lvalue (convert (type, arg0));
3651 /* In most languages, can't associate operations on floats
3652 through parentheses. Rather than remember where the parentheses
3653 were, we don't associate floats at all. It shouldn't matter much. */
3654 if (FLOAT_TYPE_P (type))
3656 /* The varsign == -1 cases happen only for addition and subtraction.
3657 It says that the arg that was split was really CON minus VAR.
3658 The rest of the code applies to all associative operations. */
3664 if (split_tree (arg0, code, &var, &con, &varsign))
3668 /* EXPR is (CON-VAR) +- ARG1. */
3669 /* If it is + and VAR==ARG1, return just CONST. */
3670 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3671 return convert (TREE_TYPE (t), con);
3673 /* If ARG0 is a constant, don't change things around;
3674 instead keep all the constant computations together. */
3676 if (TREE_CONSTANT (arg0))
3679 /* Otherwise return (CON +- ARG1) - VAR. */
3680 TREE_SET_CODE (t, MINUS_EXPR);
3681 TREE_OPERAND (t, 1) = var;
3683 = fold (build (code, TREE_TYPE (t), con, arg1));
3687 /* EXPR is (VAR+CON) +- ARG1. */
3688 /* If it is - and VAR==ARG1, return just CONST. */
3689 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3690 return convert (TREE_TYPE (t), con);
3692 /* If ARG0 is a constant, don't change things around;
3693 instead keep all the constant computations together. */
3695 if (TREE_CONSTANT (arg0))
3698 /* Otherwise return VAR +- (ARG1 +- CON). */
3699 TREE_OPERAND (t, 1) = tem
3700 = fold (build (code, TREE_TYPE (t), arg1, con));
3701 TREE_OPERAND (t, 0) = var;
3702 if (integer_zerop (tem)
3703 && (code == PLUS_EXPR || code == MINUS_EXPR))
3704 return convert (type, var);
3705 /* If we have x +/- (c - d) [c an explicit integer]
3706 change it to x -/+ (d - c) since if d is relocatable
3707 then the latter can be a single immediate insn
3708 and the former cannot. */
3709 if (TREE_CODE (tem) == MINUS_EXPR
3710 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3712 tree tem1 = TREE_OPERAND (tem, 1);
3713 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3714 TREE_OPERAND (tem, 0) = tem1;
3716 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3722 if (split_tree (arg1, code, &var, &con, &varsign))
3724 if (TREE_CONSTANT (arg1))
3729 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3731 /* EXPR is ARG0 +- (CON +- VAR). */
3732 if (TREE_CODE (t) == MINUS_EXPR
3733 && operand_equal_p (var, arg0, 0))
3735 /* If VAR and ARG0 cancel, return just CON or -CON. */
3736 if (code == PLUS_EXPR)
3737 return convert (TREE_TYPE (t), con);
3738 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3739 convert (TREE_TYPE (t), con)));
3743 = fold (build (code, TREE_TYPE (t), arg0, con));
3744 TREE_OPERAND (t, 1) = var;
3745 if (integer_zerop (TREE_OPERAND (t, 0))
3746 && TREE_CODE (t) == PLUS_EXPR)
3747 return convert (TREE_TYPE (t), var);
3752 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3753 if (TREE_CODE (arg1) == REAL_CST)
3755 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3757 t1 = const_binop (code, arg0, arg1, 0);
3758 if (t1 != NULL_TREE)
3760 /* The return value should always have
3761 the same type as the original expression. */
3762 TREE_TYPE (t1) = TREE_TYPE (t);
3768 if (! FLOAT_TYPE_P (type))
3770 if (! wins && integer_zerop (arg0))
3771 return build1 (NEGATE_EXPR, type, arg1);
3772 if (integer_zerop (arg1))
3773 return non_lvalue (convert (type, arg0));
3775 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3776 about the case where C is a constant, just try one of the
3777 four possibilities. */
3779 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3780 && operand_equal_p (TREE_OPERAND (arg0, 1),
3781 TREE_OPERAND (arg1, 1), 0))
3782 return fold (build (MULT_EXPR, type,
3783 fold (build (MINUS_EXPR, type,
3784 TREE_OPERAND (arg0, 0),
3785 TREE_OPERAND (arg1, 0))),
3786 TREE_OPERAND (arg0, 1)));
3788 /* Convert A - (-B) to A + B. */
3789 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3790 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3792 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3795 /* Except with IEEE floating point, 0-x equals -x. */
3796 if (! wins && real_zerop (arg0))
3797 return build1 (NEGATE_EXPR, type, arg1);
3798 /* Except with IEEE floating point, x-0 equals x. */
3799 if (real_zerop (arg1))
3800 return non_lvalue (convert (type, arg0));
3803 /* Fold &x - &x. This can happen from &x.foo - &x.
3804 This is unsafe for certain floats even in non-IEEE formats.
3805 In IEEE, it is unsafe because it does wrong for NaNs.
3806 Also note that operand_equal_p is always false if an operand
3809 if (operand_equal_p (arg0, arg1,
3810 FLOAT_TYPE_P (type) && ! flag_fast_math))
3811 return convert (type, integer_zero_node);
3816 if (! FLOAT_TYPE_P (type))
3818 if (integer_zerop (arg1))
3819 return omit_one_operand (type, arg1, arg0);
3820 if (integer_onep (arg1))
3821 return non_lvalue (convert (type, arg0));
3823 /* (a * (1 << b)) is (a << b) */
3824 if (TREE_CODE (arg1) == LSHIFT_EXPR
3825 && integer_onep (TREE_OPERAND (arg1, 0)))
3826 return fold (build (LSHIFT_EXPR, type, arg0,
3827 TREE_OPERAND (arg1, 1)));
3828 if (TREE_CODE (arg0) == LSHIFT_EXPR
3829 && integer_onep (TREE_OPERAND (arg0, 0)))
3830 return fold (build (LSHIFT_EXPR, type, arg1,
3831 TREE_OPERAND (arg0, 1)));
3835 /* x*0 is 0, except for IEEE floating point. */
3836 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3838 && real_zerop (arg1))
3839 return omit_one_operand (type, arg1, arg0);
3840 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3841 However, ANSI says we can drop signals,
3842 so we can do this anyway. */
3843 if (real_onep (arg1))
3844 return non_lvalue (convert (type, arg0));
3846 if (! wins && real_twop (arg1))
3848 tree arg = save_expr (arg0);
3849 return build (PLUS_EXPR, type, arg, arg);
3856 if (integer_all_onesp (arg1))
3857 return omit_one_operand (type, arg1, arg0);
3858 if (integer_zerop (arg1))
3859 return non_lvalue (convert (type, arg0));
3860 t1 = distribute_bit_expr (code, type, arg0, arg1);
3861 if (t1 != NULL_TREE)
3864 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3865 is a rotate of A by C1 bits. */
3867 if ((TREE_CODE (arg0) == RSHIFT_EXPR
3868 || TREE_CODE (arg0) == LSHIFT_EXPR)
3869 && (TREE_CODE (arg1) == RSHIFT_EXPR
3870 || TREE_CODE (arg1) == LSHIFT_EXPR)
3871 && TREE_CODE (arg0) != TREE_CODE (arg1)
3872 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3873 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3874 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3875 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3876 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
3877 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
3878 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
3879 + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
3880 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3881 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3882 TREE_CODE (arg0) == LSHIFT_EXPR
3883 ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
3888 if (integer_zerop (arg1))
3889 return non_lvalue (convert (type, arg0));
3890 if (integer_all_onesp (arg1))
3891 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3896 if (integer_all_onesp (arg1))
3897 return non_lvalue (convert (type, arg0));
3898 if (integer_zerop (arg1))
3899 return omit_one_operand (type, arg1, arg0);
3900 t1 = distribute_bit_expr (code, type, arg0, arg1);
3901 if (t1 != NULL_TREE)
3903 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3904 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3905 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3907 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3908 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3909 && (~TREE_INT_CST_LOW (arg0)
3910 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3911 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3913 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3914 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3916 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3917 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3918 && (~TREE_INT_CST_LOW (arg1)
3919 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3920 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3924 case BIT_ANDTC_EXPR:
3925 if (integer_all_onesp (arg0))
3926 return non_lvalue (convert (type, arg1));
3927 if (integer_zerop (arg0))
3928 return omit_one_operand (type, arg0, arg1);
3929 if (TREE_CODE (arg1) == INTEGER_CST)
3931 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3932 code = BIT_AND_EXPR;
3937 case TRUNC_DIV_EXPR:
3938 case ROUND_DIV_EXPR:
3939 case FLOOR_DIV_EXPR:
3941 case EXACT_DIV_EXPR:
3943 if (integer_onep (arg1))
3944 return non_lvalue (convert (type, arg0));
3945 if (integer_zerop (arg1))
3948 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
3949 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
3950 expressions, which often appear in the offsets or sizes of
3951 objects with a varying size. Only deal with positive divisors
3952 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
3954 Look for NOPs and SAVE_EXPRs inside. */
3956 if (TREE_CODE (arg1) == INTEGER_CST
3957 && tree_int_cst_lt (integer_zero_node, arg1))
3959 int have_save_expr = 0;
3960 tree c2 = integer_zero_node;
3963 if (TREE_CODE (xarg0) == SAVE_EXPR)
3964 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
3968 if (TREE_CODE (xarg0) == PLUS_EXPR
3969 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
3970 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
3971 else if (TREE_CODE (xarg0) == MINUS_EXPR
3972 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
3973 /* If we are doing this computation unsigned, the negate
3975 && ! TREE_UNSIGNED (type))
3977 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
3978 xarg0 = TREE_OPERAND (xarg0, 0);
3981 if (TREE_CODE (xarg0) == SAVE_EXPR)
3982 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
3986 if (TREE_CODE (xarg0) == MULT_EXPR
3987 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
3988 && tree_int_cst_lt (integer_zero_node, TREE_OPERAND (xarg0, 1))
3989 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
3990 TREE_OPERAND (xarg0, 1), arg1, 1))
3991 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
3992 TREE_OPERAND (xarg0, 1), 1)))
3993 && (tree_int_cst_lt (integer_zero_node, c2)
3994 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
3997 tree outer_div = integer_one_node;
3998 tree c1 = TREE_OPERAND (xarg0, 1);
4001 /* If C3 > C1, set them equal and do a divide by
4002 C3/C1 at the end of the operation. */
4003 if (tree_int_cst_lt (c1, c3))
4004 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4006 /* The result is A * (C1/C3) + (C2/C3). */
4007 t = fold (build (PLUS_EXPR, type,
4008 fold (build (MULT_EXPR, type,
4009 TREE_OPERAND (xarg0, 0),
4010 const_binop (code, c1, c3, 1))),
4011 const_binop (code, c2, c3, 1)));
4013 if (! integer_onep (outer_div))
4014 t = fold (build (code, type, t, outer_div));
4023 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4024 #ifndef REAL_INFINITY
4025 if (TREE_CODE (arg1) == REAL_CST
4026 && real_zerop (arg1))
4029 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4034 case FLOOR_MOD_EXPR:
4035 case ROUND_MOD_EXPR:
4036 case TRUNC_MOD_EXPR:
4037 if (integer_onep (arg1))
4038 return omit_one_operand (type, integer_zero_node, arg0);
4039 if (integer_zerop (arg1))
4042 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4043 where C1 % C3 == 0. Handle similarly to the division case,
4044 but don't bother with SAVE_EXPRs. */
4046 if (TREE_CODE (arg1) == INTEGER_CST
4047 && ! integer_zerop (arg1))
4049 tree c2 = integer_zero_node;
4052 if (TREE_CODE (xarg0) == PLUS_EXPR
4053 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4054 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4055 else if (TREE_CODE (xarg0) == MINUS_EXPR
4056 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4057 && ! TREE_UNSIGNED (type))
4059 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4060 xarg0 = TREE_OPERAND (xarg0, 0);
4065 if (TREE_CODE (xarg0) == MULT_EXPR
4066 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4067 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4068 TREE_OPERAND (xarg0, 1),
4070 && tree_int_cst_lt (integer_zero_node, c2))
4071 /* The result is (C2%C3). */
4072 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4073 TREE_OPERAND (xarg0, 0));
4082 if (integer_zerop (arg1))
4083 return non_lvalue (convert (type, arg0));
4084 /* Since negative shift count is not well-defined,
4085 don't try to compute it in the compiler. */
4086 if (tree_int_cst_lt (arg1, integer_zero_node))
4091 if (operand_equal_p (arg0, arg1, 0))
4093 if (INTEGRAL_TYPE_P (type)
4094 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4095 return omit_one_operand (type, arg1, arg0);
4099 if (operand_equal_p (arg0, arg1, 0))
4101 if (INTEGRAL_TYPE_P (type)
4102 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4103 return omit_one_operand (type, arg1, arg0);
4106 case TRUTH_NOT_EXPR:
4107 /* Note that the operand of this must be an int
4108 and its values must be 0 or 1.
4109 ("true" is a fixed value perhaps depending on the language,
4110 but we don't handle values other than 1 correctly yet.) */
4111 return invert_truthvalue (arg0);
4113 case TRUTH_ANDIF_EXPR:
4114 /* Note that the operands of this must be ints
4115 and their values must be 0 or 1.
4116 ("true" is a fixed value perhaps depending on the language.) */
4117 /* If first arg is constant zero, return it. */
4118 if (integer_zerop (arg0))
4120 case TRUTH_AND_EXPR:
4121 /* If either arg is constant true, drop it. */
4122 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4123 return non_lvalue (arg1);
4124 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4125 return non_lvalue (arg0);
4126 /* If second arg is constant zero, result is zero, but first arg
4127 must be evaluated. */
4128 if (integer_zerop (arg1))
4129 return omit_one_operand (type, arg1, arg0);
4132 /* We only do these simplifications if we are optimizing. */
4136 /* Check for things like (A || B) && (A || C). We can convert this
4137 to A || (B && C). Note that either operator can be any of the four
4138 truth and/or operations and the transformation will still be
4139 valid. Also note that we only care about order for the
4140 ANDIF and ORIF operators. */
4141 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4142 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4143 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4144 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4145 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4147 tree a00 = TREE_OPERAND (arg0, 0);
4148 tree a01 = TREE_OPERAND (arg0, 1);
4149 tree a10 = TREE_OPERAND (arg1, 0);
4150 tree a11 = TREE_OPERAND (arg1, 1);
4151 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4152 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4153 && (code == TRUTH_AND_EXPR
4154 || code == TRUTH_OR_EXPR));
4156 if (operand_equal_p (a00, a10, 0))
4157 return fold (build (TREE_CODE (arg0), type, a00,
4158 fold (build (code, type, a01, a11))));
4159 else if (commutative && operand_equal_p (a00, a11, 0))
4160 return fold (build (TREE_CODE (arg0), type, a00,
4161 fold (build (code, type, a01, a10))));
4162 else if (commutative && operand_equal_p (a01, a10, 0))
4163 return fold (build (TREE_CODE (arg0), type, a01,
4164 fold (build (code, type, a00, a11))));
4166 /* This case if tricky because we must either have commutative
4167 operators or else A10 must not have side-effects. */
4169 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4170 && operand_equal_p (a01, a11, 0))
4171 return fold (build (TREE_CODE (arg0), type,
4172 fold (build (code, type, a00, a10)),
4176 /* Check for the possibility of merging component references. If our
4177 lhs is another similar operation, try to merge its rhs with our
4178 rhs. Then try to merge our lhs and rhs. */
4179 if (TREE_CODE (arg0) == code
4180 && 0 != (tem = fold_truthop (code, type,
4181 TREE_OPERAND (arg0, 1), arg1)))
4182 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4184 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4189 case TRUTH_ORIF_EXPR:
4190 /* Note that the operands of this must be ints
4191 and their values must be 0 or true.
4192 ("true" is a fixed value perhaps depending on the language.) */
4193 /* If first arg is constant true, return it. */
4194 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4197 /* If either arg is constant zero, drop it. */
4198 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4199 return non_lvalue (arg1);
4200 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4201 return non_lvalue (arg0);
4202 /* If second arg is constant true, result is true, but we must
4203 evaluate first arg. */
4204 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4205 return omit_one_operand (type, arg1, arg0);
4208 case TRUTH_XOR_EXPR:
4209 /* If either arg is constant zero, drop it. */
4210 if (integer_zerop (arg0))
4211 return non_lvalue (arg1);
4212 if (integer_zerop (arg1))
4213 return non_lvalue (arg0);
4214 /* If either arg is constant true, this is a logical inversion. */
4215 if (integer_onep (arg0))
4216 return non_lvalue (invert_truthvalue (arg1));
4217 if (integer_onep (arg1))
4218 return non_lvalue (invert_truthvalue (arg0));
4227 /* If one arg is a constant integer, put it last. */
4228 if (TREE_CODE (arg0) == INTEGER_CST
4229 && TREE_CODE (arg1) != INTEGER_CST)
4231 TREE_OPERAND (t, 0) = arg1;
4232 TREE_OPERAND (t, 1) = arg0;
4233 arg0 = TREE_OPERAND (t, 0);
4234 arg1 = TREE_OPERAND (t, 1);
4235 code = swap_tree_comparison (code);
4236 TREE_SET_CODE (t, code);
4239 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4240 First, see if one arg is constant; find the constant arg
4241 and the other one. */
4243 tree constop = 0, varop;
4246 if (TREE_CONSTANT (arg1))
4247 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
4248 if (TREE_CONSTANT (arg0))
4249 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
4251 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4253 /* This optimization is invalid for ordered comparisons
4254 if CONST+INCR overflows or if foo+incr might overflow.
4255 This optimization is invalid for floating point due to rounding.
4256 For pointer types we assume overflow doesn't happen. */
4257 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4258 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4259 && (code == EQ_EXPR || code == NE_EXPR)))
4262 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4263 constop, TREE_OPERAND (varop, 1)));
4264 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4265 *constoploc = newconst;
4269 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4271 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4272 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4273 && (code == EQ_EXPR || code == NE_EXPR)))
4276 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4277 constop, TREE_OPERAND (varop, 1)));
4278 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4279 *constoploc = newconst;
4285 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4286 if (TREE_CODE (arg1) == INTEGER_CST
4287 && TREE_CODE (arg0) != INTEGER_CST
4288 && ! tree_int_cst_lt (arg1, integer_one_node))
4290 switch (TREE_CODE (t))
4294 TREE_SET_CODE (t, code);
4295 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4296 TREE_OPERAND (t, 1) = arg1;
4301 TREE_SET_CODE (t, code);
4302 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4303 TREE_OPERAND (t, 1) = arg1;
4307 /* If this is an EQ or NE comparison with zero and ARG0 is
4308 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4309 two operations, but the latter can be done in one less insn
4310 one machine that have only two-operand insns or on which a
4311 constant cannot be the first operand. */
4312 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4313 && TREE_CODE (arg0) == BIT_AND_EXPR)
4315 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4316 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4318 fold (build (code, type,
4319 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4321 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4322 TREE_OPERAND (arg0, 1),
4323 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4324 convert (TREE_TYPE (arg0),
4327 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4328 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4330 fold (build (code, type,
4331 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4333 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4334 TREE_OPERAND (arg0, 0),
4335 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4336 convert (TREE_TYPE (arg0),
4341 /* If this is an NE or EQ comparison of zero against the result of a
4342 signed MOD operation whose second operand is a power of 2, make
4343 the MOD operation unsigned since it is simpler and equivalent. */
4344 if ((code == NE_EXPR || code == EQ_EXPR)
4345 && integer_zerop (arg1)
4346 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4347 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4348 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4349 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4350 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4351 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4353 tree newtype = unsigned_type (TREE_TYPE (arg0));
4354 tree newmod = build (TREE_CODE (arg0), newtype,
4355 convert (newtype, TREE_OPERAND (arg0, 0)),
4356 convert (newtype, TREE_OPERAND (arg0, 1)));
4358 return build (code, type, newmod, convert (newtype, arg1));
4361 /* If this is an NE comparison of zero with an AND of one, remove the
4362 comparison since the AND will give the correct value. */
4363 if (code == NE_EXPR && integer_zerop (arg1)
4364 && TREE_CODE (arg0) == BIT_AND_EXPR
4365 && integer_onep (TREE_OPERAND (arg0, 1)))
4366 return convert (type, arg0);
4368 /* If we have (A & C) == C where C is a power of 2, convert this into
4369 (A & C) != 0. Similarly for NE_EXPR. */
4370 if ((code == EQ_EXPR || code == NE_EXPR)
4371 && TREE_CODE (arg0) == BIT_AND_EXPR
4372 && integer_pow2p (TREE_OPERAND (arg0, 1))
4373 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4374 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4375 arg0, integer_zero_node);
4377 /* Simplify comparison of something with itself. (For IEEE
4378 floating-point, we can only do some of these simplifications.) */
4379 if (operand_equal_p (arg0, arg1, 0))
4386 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4388 t = build_int_2 (1, 0);
4389 TREE_TYPE (t) = type;
4393 TREE_SET_CODE (t, code);
4397 /* For NE, we can only do this simplification if integer. */
4398 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4400 /* ... fall through ... */
4403 t = build_int_2 (0, 0);
4404 TREE_TYPE (t) = type;
4409 /* An unsigned comparison against 0 can be simplified. */
4410 if (integer_zerop (arg1)
4411 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4412 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4413 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4415 switch (TREE_CODE (t))
4419 TREE_SET_CODE (t, NE_EXPR);
4423 TREE_SET_CODE (t, EQ_EXPR);
4426 return omit_one_operand (type,
4427 convert (type, integer_one_node),
4430 return omit_one_operand (type,
4431 convert (type, integer_zero_node),
4436 /* If we are comparing an expression that just has comparisons
4437 of two integer values, arithmetic expressions of those comparisons,
4438 and constants, we can simplify it. There are only three cases
4439 to check: the two values can either be equal, the first can be
4440 greater, or the second can be greater. Fold the expression for
4441 those three values. Since each value must be 0 or 1, we have
4442 eight possibilities, each of which corresponds to the constant 0
4443 or 1 or one of the six possible comparisons.
4445 This handles common cases like (a > b) == 0 but also handles
4446 expressions like ((x > y) - (y > x)) > 0, which supposedly
4447 occur in macroized code. */
4449 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4451 tree cval1 = 0, cval2 = 0;
4454 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4455 /* Don't handle degenerate cases here; they should already
4456 have been handled anyway. */
4457 && cval1 != 0 && cval2 != 0
4458 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4459 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4460 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4461 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4462 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4464 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4465 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4467 /* We can't just pass T to eval_subst in case cval1 or cval2
4468 was the same as ARG1. */
4471 = fold (build (code, type,
4472 eval_subst (arg0, cval1, maxval, cval2, minval),
4475 = fold (build (code, type,
4476 eval_subst (arg0, cval1, maxval, cval2, maxval),
4479 = fold (build (code, type,
4480 eval_subst (arg0, cval1, minval, cval2, maxval),
4483 /* All three of these results should be 0 or 1. Confirm they
4484 are. Then use those values to select the proper code
4487 if ((integer_zerop (high_result)
4488 || integer_onep (high_result))
4489 && (integer_zerop (equal_result)
4490 || integer_onep (equal_result))
4491 && (integer_zerop (low_result)
4492 || integer_onep (low_result)))
4494 /* Make a 3-bit mask with the high-order bit being the
4495 value for `>', the next for '=', and the low for '<'. */
4496 switch ((integer_onep (high_result) * 4)
4497 + (integer_onep (equal_result) * 2)
4498 + integer_onep (low_result))
4502 return omit_one_operand (type, integer_zero_node, arg0);
4523 return omit_one_operand (type, integer_one_node, arg0);
4526 t = build (code, type, cval1, cval2);
4528 return save_expr (t);
4535 /* If this is a comparison of a field, we may be able to simplify it. */
4536 if ((TREE_CODE (arg0) == COMPONENT_REF
4537 || TREE_CODE (arg0) == BIT_FIELD_REF)
4538 && (code == EQ_EXPR || code == NE_EXPR)
4539 /* Handle the constant case even without -O
4540 to make sure the warnings are given. */
4541 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4543 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4547 /* If this is a comparison of complex values and either or both
4548 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4549 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4550 may prevent needless evaluations. */
4551 if ((code == EQ_EXPR || code == NE_EXPR)
4552 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4553 && (TREE_CODE (arg0) == COMPLEX_EXPR
4554 || TREE_CODE (arg1) == COMPLEX_EXPR))
4556 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4557 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4558 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4559 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4560 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4562 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4565 fold (build (code, type, real0, real1)),
4566 fold (build (code, type, imag0, imag1))));
4569 /* From here on, the only cases we handle are when the result is
4570 known to be a constant.
4572 To compute GT, swap the arguments and do LT.
4573 To compute GE, do LT and invert the result.
4574 To compute LE, swap the arguments, do LT and invert the result.
4575 To compute NE, do EQ and invert the result.
4577 Therefore, the code below must handle only EQ and LT. */
4579 if (code == LE_EXPR || code == GT_EXPR)
4581 tem = arg0, arg0 = arg1, arg1 = tem;
4582 code = swap_tree_comparison (code);
4585 /* Note that it is safe to invert for real values here because we
4586 will check below in the one case that it matters. */
4589 if (code == NE_EXPR || code == GE_EXPR)
4592 code = invert_tree_comparison (code);
4595 /* Compute a result for LT or EQ if args permit;
4596 otherwise return T. */
4597 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4599 if (code == EQ_EXPR)
4600 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4601 == TREE_INT_CST_LOW (arg1))
4602 && (TREE_INT_CST_HIGH (arg0)
4603 == TREE_INT_CST_HIGH (arg1)),
4606 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4607 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4608 : INT_CST_LT (arg0, arg1)),
4612 /* Assume a nonexplicit constant cannot equal an explicit one,
4613 since such code would be undefined anyway.
4614 Exception: on sysvr4, using #pragma weak,
4615 a label can come out as 0. */
4616 else if (TREE_CODE (arg1) == INTEGER_CST
4617 && !integer_zerop (arg1)
4618 && TREE_CONSTANT (arg0)
4619 && TREE_CODE (arg0) == ADDR_EXPR
4621 t1 = build_int_2 (0, 0);
4623 /* Two real constants can be compared explicitly. */
4624 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4626 /* If either operand is a NaN, the result is false with two
4627 exceptions: First, an NE_EXPR is true on NaNs, but that case
4628 is already handled correctly since we will be inverting the
4629 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4630 or a GE_EXPR into a LT_EXPR, we must return true so that it
4631 will be inverted into false. */
4633 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4634 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4635 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4637 else if (code == EQ_EXPR)
4638 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4639 TREE_REAL_CST (arg1)),
4642 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4643 TREE_REAL_CST (arg1)),
4647 if (t1 == NULL_TREE)
4651 TREE_INT_CST_LOW (t1) ^= 1;
4653 TREE_TYPE (t1) = type;
4657 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4658 so all simple results must be passed through pedantic_non_lvalue. */
4659 if (TREE_CODE (arg0) == INTEGER_CST)
4660 return pedantic_non_lvalue
4661 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4662 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4663 return pedantic_non_lvalue (omit_one_operand (type, arg1, arg0));
4665 /* If the second operand is zero, invert the comparison and swap
4666 the second and third operands. Likewise if the second operand
4667 is constant and the third is not or if the third operand is
4668 equivalent to the first operand of the comparison. */
4670 if (integer_zerop (arg1)
4671 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4672 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4673 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4674 TREE_OPERAND (t, 2),
4675 TREE_OPERAND (arg0, 1))))
4677 /* See if this can be inverted. If it can't, possibly because
4678 it was a floating-point inequality comparison, don't do
4680 tem = invert_truthvalue (arg0);
4682 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4684 arg0 = TREE_OPERAND (t, 0) = tem;
4685 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
4686 TREE_OPERAND (t, 2) = arg1;
4687 arg1 = TREE_OPERAND (t, 1);
4691 /* If we have A op B ? A : C, we may be able to convert this to a
4692 simpler expression, depending on the operation and the values
4693 of B and C. IEEE floating point prevents this though,
4694 because A or B might be -0.0 or a NaN. */
4696 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4697 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4698 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4700 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4701 arg1, TREE_OPERAND (arg0, 1)))
4703 tree arg2 = TREE_OPERAND (t, 2);
4704 enum tree_code comp_code = TREE_CODE (arg0);
4706 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4707 depending on the comparison operation. */
4708 if (integer_zerop (TREE_OPERAND (arg0, 1))
4709 && TREE_CODE (arg2) == NEGATE_EXPR
4710 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4714 return pedantic_non_lvalue
4715 (fold (build1 (NEGATE_EXPR, type, arg1)));
4717 return pedantic_non_lvalue (convert (type, arg1));
4720 return pedantic_non_lvalue
4721 (fold (build1 (ABS_EXPR, type, arg1)));
4724 return pedantic_non_lvalue
4725 (fold (build1 (NEGATE_EXPR, type,
4726 fold (build1 (ABS_EXPR, type, arg1)))));
4729 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4732 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4734 if (comp_code == NE_EXPR)
4735 return pedantic_non_lvalue (convert (type, arg1));
4736 else if (comp_code == EQ_EXPR)
4737 return pedantic_non_lvalue (convert (type, integer_zero_node));
4740 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4741 or max (A, B), depending on the operation. */
4743 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4744 arg2, TREE_OPERAND (arg0, 0)))
4748 return pedantic_non_lvalue (convert (type, arg2));
4750 return pedantic_non_lvalue (convert (type, arg1));
4753 return pedantic_non_lvalue
4754 (fold (build (MIN_EXPR, type, arg1, arg2)));
4757 return pedantic_non_lvalue
4758 (fold (build (MAX_EXPR, type, arg1, arg2)));
4761 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4762 we might still be able to simplify this. For example,
4763 if C1 is one less or one more than C2, this might have started
4764 out as a MIN or MAX and been transformed by this function.
4765 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4767 if (INTEGRAL_TYPE_P (type)
4768 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4769 && TREE_CODE (arg2) == INTEGER_CST)
4773 /* We can replace A with C1 in this case. */
4774 arg1 = TREE_OPERAND (t, 1)
4775 = convert (type, TREE_OPERAND (arg0, 1));
4779 /* If C1 is C2 + 1, this is min(A, C2). */
4780 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4781 && operand_equal_p (TREE_OPERAND (arg0, 1),
4782 const_binop (PLUS_EXPR, arg2,
4783 integer_one_node, 0), 1))
4784 return pedantic_non_lvalue
4785 (fold (build (MIN_EXPR, type, arg1, arg2)));
4789 /* If C1 is C2 - 1, this is min(A, C2). */
4790 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4791 && operand_equal_p (TREE_OPERAND (arg0, 1),
4792 const_binop (MINUS_EXPR, arg2,
4793 integer_one_node, 0), 1))
4794 return pedantic_non_lvalue
4795 (fold (build (MIN_EXPR, type, arg1, arg2)));
4799 /* If C1 is C2 - 1, this is max(A, C2). */
4800 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4801 && operand_equal_p (TREE_OPERAND (arg0, 1),
4802 const_binop (MINUS_EXPR, arg2,
4803 integer_one_node, 0), 1))
4804 return pedantic_non_lvalue
4805 (fold (build (MAX_EXPR, type, arg1, arg2)));
4809 /* If C1 is C2 + 1, this is max(A, C2). */
4810 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4811 && operand_equal_p (TREE_OPERAND (arg0, 1),
4812 const_binop (PLUS_EXPR, arg2,
4813 integer_one_node, 0), 1))
4814 return pedantic_non_lvalue
4815 (fold (build (MAX_EXPR, type, arg1, arg2)));
4820 /* Convert A ? 1 : 0 to simply A. */
4821 if (integer_onep (TREE_OPERAND (t, 1))
4822 && integer_zerop (TREE_OPERAND (t, 2))
4823 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
4824 call to fold will try to move the conversion inside
4825 a COND, which will recurse. In that case, the COND_EXPR
4826 is probably the best choice, so leave it alone. */
4827 && type == TREE_TYPE (arg0))
4828 return pedantic_non_lvalue (arg0);
4831 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
4832 operation is simply A & 2. */
4834 if (integer_zerop (TREE_OPERAND (t, 2))
4835 && TREE_CODE (arg0) == NE_EXPR
4836 && integer_zerop (TREE_OPERAND (arg0, 1))
4837 && integer_pow2p (arg1)
4838 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
4839 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
4841 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
4846 /* When pedantic, a compound expression can be neither an lvalue
4847 nor an integer constant expression. */
4848 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
4850 /* Don't let (0, 0) be null pointer constant. */
4851 if (integer_zerop (arg1))
4852 return non_lvalue (arg1);
4857 return build_complex (arg0, arg1);
4861 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4863 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4864 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
4865 TREE_OPERAND (arg0, 1));
4866 else if (TREE_CODE (arg0) == COMPLEX_CST)
4867 return TREE_REALPART (arg0);
4868 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4869 return fold (build (TREE_CODE (arg0), type,
4870 fold (build1 (REALPART_EXPR, type,
4871 TREE_OPERAND (arg0, 0))),
4872 fold (build1 (REALPART_EXPR,
4873 type, TREE_OPERAND (arg0, 1)))));
4877 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4878 return convert (type, integer_zero_node);
4879 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4880 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
4881 TREE_OPERAND (arg0, 0));
4882 else if (TREE_CODE (arg0) == COMPLEX_CST)
4883 return TREE_IMAGPART (arg0);
4884 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4885 return fold (build (TREE_CODE (arg0), type,
4886 fold (build1 (IMAGPART_EXPR, type,
4887 TREE_OPERAND (arg0, 0))),
4888 fold (build1 (IMAGPART_EXPR, type,
4889 TREE_OPERAND (arg0, 1)))));
4894 } /* switch (code) */