1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
20 /*@@ Fix lossage on folding division of big integers. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int and size_binop.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'. */
48 /* Handle floating overflow for `const_binop'. */
49 static jmp_buf float_error;
51 static void encode PROTO((short *, HOST_WIDE_INT, HOST_WIDE_INT));
52 static void decode PROTO((short *, HOST_WIDE_INT *, HOST_WIDE_INT *));
53 static int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
54 HOST_WIDE_INT, HOST_WIDE_INT,
55 HOST_WIDE_INT, HOST_WIDE_INT *,
56 HOST_WIDE_INT *, HOST_WIDE_INT *,
58 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
59 static tree const_binop PROTO((enum tree_code, tree, tree, int));
60 static tree fold_convert PROTO((tree, tree));
61 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
62 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
63 static int truth_value_p PROTO((enum tree_code));
64 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
65 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
66 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
67 static tree omit_one_operand PROTO((tree, tree, tree));
68 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
69 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
70 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
72 static tree decode_field_reference PROTO((tree, int *, int *,
73 enum machine_mode *, int *,
75 static int all_ones_mask_p PROTO((tree, int));
76 static int simple_operand_p PROTO((tree));
77 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
78 enum tree_code, tree, tree, tree));
79 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
85 /* Yield nonzero if a signed left shift of A by B bits overflows. */
86 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
88 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
89 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
90 Then this yields nonzero if overflow occurred during the addition.
91 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
92 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
93 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
95 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
96 We do that by representing the two-word integer as MAX_SHORTS shorts,
97 with only 8 bits stored in each short, as a positive number. */
99 /* Unpack a two-word integer into MAX_SHORTS shorts.
100 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
101 SHORTS points to the array of shorts. */
104 encode (shorts, low, hi)
106 HOST_WIDE_INT low, hi;
110 for (i = 0; i < MAX_SHORTS / 2; i++)
112 shorts[i] = (low >> (i * 8)) & 0xff;
113 shorts[i + MAX_SHORTS / 2] = (hi >> (i * 8) & 0xff);
117 /* Pack an array of MAX_SHORTS shorts into a two-word integer.
118 SHORTS points to the array of shorts.
119 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
122 decode (shorts, low, hi)
124 HOST_WIDE_INT *low, *hi;
127 HOST_WIDE_INT lv = 0, hv = 0;
129 for (i = 0; i < MAX_SHORTS / 2; i++)
131 lv |= (HOST_WIDE_INT) shorts[i] << (i * 8);
132 hv |= (HOST_WIDE_INT) shorts[i + MAX_SHORTS / 2] << (i * 8);
138 /* Make the integer constant T valid for its type
139 by setting to 0 or 1 all the bits in the constant
140 that don't belong in the type.
141 Yield 1 if a signed overflow occurs, 0 otherwise.
142 If OVERFLOW is nonzero, a signed overflow has already occurred
143 in calculating T, so propagate it.
145 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
149 force_fit_type (t, overflow)
153 HOST_WIDE_INT low, high;
156 if (TREE_CODE (t) == REAL_CST)
158 #ifdef CHECK_FLOAT_VALUE
159 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
165 else if (TREE_CODE (t) != INTEGER_CST)
168 low = TREE_INT_CST_LOW (t);
169 high = TREE_INT_CST_HIGH (t);
171 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
174 prec = TYPE_PRECISION (TREE_TYPE (t));
176 /* First clear all bits that are beyond the type's precision. */
178 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
180 else if (prec > HOST_BITS_PER_WIDE_INT)
182 TREE_INT_CST_HIGH (t)
183 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
187 TREE_INT_CST_HIGH (t) = 0;
188 if (prec < HOST_BITS_PER_WIDE_INT)
189 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
192 /* Unsigned types do not suffer sign extension or overflow. */
193 if (TREE_UNSIGNED (TREE_TYPE (t)))
196 /* If the value's sign bit is set, extend the sign. */
197 if (prec != 2 * HOST_BITS_PER_WIDE_INT
198 && (prec > HOST_BITS_PER_WIDE_INT
199 ? (TREE_INT_CST_HIGH (t)
200 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
201 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
203 /* Value is negative:
204 set to 1 all the bits that are outside this type's precision. */
205 if (prec > HOST_BITS_PER_WIDE_INT)
207 TREE_INT_CST_HIGH (t)
208 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
212 TREE_INT_CST_HIGH (t) = -1;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
218 /* Yield nonzero if signed overflow occurred. */
220 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
224 /* Add two doubleword integers with doubleword result.
225 Each argument is given as two `HOST_WIDE_INT' pieces.
226 One argument is L1 and H1; the other, L2 and H2.
227 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
228 We use the 8-shorts representation internally. */
231 add_double (l1, h1, l2, h2, lv, hv)
232 HOST_WIDE_INT l1, h1, l2, h2;
233 HOST_WIDE_INT *lv, *hv;
235 short arg1[MAX_SHORTS];
236 short arg2[MAX_SHORTS];
237 register int carry = 0;
240 encode (arg1, l1, h1);
241 encode (arg2, l2, h2);
243 for (i = 0; i < MAX_SHORTS; i++)
245 carry += arg1[i] + arg2[i];
246 arg1[i] = carry & 0xff;
250 decode (arg1, lv, hv);
251 return overflow_sum_sign (h1, h2, *hv);
254 /* Negate a doubleword integer with doubleword result.
255 Return nonzero if the operation overflows, assuming it's signed.
256 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
257 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
258 We use the 8-shorts representation internally. */
261 neg_double (l1, h1, lv, hv)
262 HOST_WIDE_INT l1, h1;
263 HOST_WIDE_INT *lv, *hv;
269 return (*hv & h1) < 0;
279 /* Multiply two doubleword integers with doubleword result.
280 Return nonzero if the operation overflows, assuming it's signed.
281 Each argument is given as two `HOST_WIDE_INT' pieces.
282 One argument is L1 and H1; the other, L2 and H2.
283 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.
284 We use the 8-shorts representation internally. */
287 mul_double (l1, h1, l2, h2, lv, hv)
288 HOST_WIDE_INT l1, h1, l2, h2;
289 HOST_WIDE_INT *lv, *hv;
291 short arg1[MAX_SHORTS];
292 short arg2[MAX_SHORTS];
293 short prod[MAX_SHORTS * 2];
294 register int carry = 0;
295 register int i, j, k;
296 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
298 /* These cases are used extensively, arising from pointer combinations. */
303 int overflow = left_shift_overflows (h1, 1);
304 unsigned HOST_WIDE_INT temp = l1 + l1;
305 *hv = (h1 << 1) + (temp < l1);
311 int overflow = left_shift_overflows (h1, 2);
312 unsigned HOST_WIDE_INT temp = l1 + l1;
313 h1 = (h1 << 2) + ((temp < l1) << 1);
323 int overflow = left_shift_overflows (h1, 3);
324 unsigned HOST_WIDE_INT temp = l1 + l1;
325 h1 = (h1 << 3) + ((temp < l1) << 2);
328 h1 += (temp < l1) << 1;
338 encode (arg1, l1, h1);
339 encode (arg2, l2, h2);
341 bzero (prod, sizeof prod);
343 for (i = 0; i < MAX_SHORTS; i++)
344 for (j = 0; j < MAX_SHORTS; j++)
347 carry = arg1[i] * arg2[j];
351 prod[k] = carry & 0xff;
357 decode (prod, lv, hv); /* This ignores
358 prod[MAX_SHORTS] -> prod[MAX_SHORTS*2-1] */
360 /* Check for overflow by calculating the top half of the answer in full;
361 it should agree with the low half's sign bit. */
362 decode (prod+MAX_SHORTS, &toplow, &tophigh);
365 neg_double (l2, h2, &neglow, &neghigh);
366 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
370 neg_double (l1, h1, &neglow, &neghigh);
371 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
373 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
376 /* Shift the doubleword integer in L1, H1 left by COUNT places
377 keeping only PREC bits of result.
378 Shift right if COUNT is negative.
379 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
380 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
383 lshift_double (l1, h1, count, prec, lv, hv, arith)
384 HOST_WIDE_INT l1, h1, count;
386 HOST_WIDE_INT *lv, *hv;
389 short arg1[MAX_SHORTS];
395 rshift_double (l1, h1, - count, prec, lv, hv, arith);
399 encode (arg1, l1, h1);
407 for (i = 0; i < MAX_SHORTS; i++)
409 carry += arg1[i] << 1;
410 arg1[i] = carry & 0xff;
416 decode (arg1, lv, hv);
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
425 rshift_double (l1, h1, count, prec, lv, hv, arith)
426 HOST_WIDE_INT l1, h1, count;
428 HOST_WIDE_INT *lv, *hv;
431 short arg1[MAX_SHORTS];
435 encode (arg1, l1, h1);
442 carry = arith && arg1[7] >> 7;
443 for (i = MAX_SHORTS - 1; i >= 0; i--)
447 arg1[i] = (carry >> 1) & 0xff;
452 decode (arg1, lv, hv);
455 /* Rotate the doubldword integer in L1, H1 left by COUNT places
456 keeping only PREC bits of result.
457 Rotate right if COUNT is negative.
458 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
461 lrotate_double (l1, h1, count, prec, lv, hv)
462 HOST_WIDE_INT l1, h1, count;
464 HOST_WIDE_INT *lv, *hv;
466 short arg1[MAX_SHORTS];
472 rrotate_double (l1, h1, - count, prec, lv, hv);
476 encode (arg1, l1, h1);
481 carry = arg1[MAX_SHORTS - 1] >> 7;
484 for (i = 0; i < MAX_SHORTS; i++)
486 carry += arg1[i] << 1;
487 arg1[i] = carry & 0xff;
493 decode (arg1, lv, hv);
496 /* Rotate the doubleword integer in L1, H1 left by COUNT places
497 keeping only PREC bits of result. COUNT must be positive.
498 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
501 rrotate_double (l1, h1, count, prec, lv, hv)
502 HOST_WIDE_INT l1, h1, count;
504 HOST_WIDE_INT *lv, *hv;
506 short arg1[MAX_SHORTS];
510 encode (arg1, l1, h1);
518 for (i = MAX_SHORTS - 1; i >= 0; i--)
522 arg1[i] = (carry >> 1) & 0xff;
527 decode (arg1, lv, hv);
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535 It controls how the quotient is rounded to a integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
540 div_and_round_double (code, uns,
541 lnum_orig, hnum_orig, lden_orig, hden_orig,
542 lquo, hquo, lrem, hrem)
545 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
546 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
547 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
550 short num[MAX_SHORTS + 1]; /* extra element for scaling. */
551 short den[MAX_SHORTS], quo[MAX_SHORTS];
552 register int i, j, work;
553 register int carry = 0;
554 HOST_WIDE_INT lnum = lnum_orig;
555 HOST_WIDE_INT hnum = hnum_orig;
556 HOST_WIDE_INT lden = lden_orig;
557 HOST_WIDE_INT hden = hden_orig;
560 if ((hden == 0) && (lden == 0))
563 /* calculate quotient sign and convert operands to unsigned. */
569 /* (minimum integer) / (-1) is the only overflow case. */
570 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
576 neg_double (lden, hden, &lden, &hden);
580 if (hnum == 0 && hden == 0)
581 { /* single precision */
583 /* This unsigned division rounds toward zero. */
584 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
589 { /* trivial case: dividend < divisor */
590 /* hden != 0 already checked. */
597 bzero (quo, sizeof quo);
599 bzero (num, sizeof num); /* to zero 9th element */
600 bzero (den, sizeof den);
602 encode (num, lnum, hnum);
603 encode (den, lden, hden);
605 /* This code requires more than just hden == 0.
606 We also have to require that we don't need more than three bytes
607 to hold CARRY. If we ever did need four bytes to hold it, we
608 would lose part of it when computing WORK on the next round. */
609 if (hden == 0 && (((unsigned HOST_WIDE_INT) lden << 8) >> 8) == lden)
610 { /* simpler algorithm */
611 /* hnum != 0 already checked. */
612 for (i = MAX_SHORTS - 1; i >= 0; i--)
614 work = num[i] + (carry << 8);
615 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
616 carry = work % (unsigned HOST_WIDE_INT) lden;
619 else { /* full double precision,
620 with thanks to Don Knuth's
621 "Seminumerical Algorithms". */
623 int quo_est, scale, num_hi_sig, den_hi_sig, quo_hi_sig;
625 /* Find the highest non-zero divisor digit. */
626 for (i = MAX_SHORTS - 1; ; i--)
631 for (i = MAX_SHORTS - 1; ; i--)
636 quo_hi_sig = num_hi_sig - den_hi_sig + 1;
638 /* Insure that the first digit of the divisor is at least BASE/2.
639 This is required by the quotient digit estimation algorithm. */
641 scale = BASE / (den[den_hi_sig] + 1);
642 if (scale > 1) { /* scale divisor and dividend */
644 for (i = 0; i <= MAX_SHORTS - 1; i++) {
645 work = (num[i] * scale) + carry;
646 num[i] = work & 0xff;
648 if (num[i] != 0) num_hi_sig = i;
651 for (i = 0; i <= MAX_SHORTS - 1; i++) {
652 work = (den[i] * scale) + carry;
653 den[i] = work & 0xff;
655 if (den[i] != 0) den_hi_sig = i;
660 for (i = quo_hi_sig; i > 0; i--) {
661 /* guess the next quotient digit, quo_est, by dividing the first
662 two remaining dividend digits by the high order quotient digit.
663 quo_est is never low and is at most 2 high. */
665 int num_hi; /* index of highest remaining dividend digit */
667 num_hi = i + den_hi_sig;
669 work = (num[num_hi] * BASE) + (num_hi > 0 ? num[num_hi - 1] : 0);
670 if (num[num_hi] != den[den_hi_sig]) {
671 quo_est = work / den[den_hi_sig];
677 /* refine quo_est so it's usually correct, and at most one high. */
678 while ((den[den_hi_sig - 1] * quo_est)
679 > (((work - (quo_est * den[den_hi_sig])) * BASE)
680 + ((num_hi - 1) > 0 ? num[num_hi - 2] : 0)))
683 /* Try QUO_EST as the quotient digit, by multiplying the
684 divisor by QUO_EST and subtracting from the remaining dividend.
685 Keep in mind that QUO_EST is the I - 1st digit. */
689 for (j = 0; j <= den_hi_sig; j++)
693 work = num[i + j - 1] - (quo_est * den[j]) + carry;
701 num[i + j - 1] = digit;
704 /* if quo_est was high by one, then num[i] went negative and
705 we need to correct things. */
710 carry = 0; /* add divisor back in */
711 for (j = 0; j <= den_hi_sig; j++)
713 work = num[i + j - 1] + den[j] + carry;
723 num[i + j - 1] = work;
725 num [num_hi] += carry;
728 /* store the quotient digit. */
729 quo[i - 1] = quo_est;
733 decode (quo, lquo, hquo);
736 /* if result is negative, make it so. */
738 neg_double (*lquo, *hquo, lquo, hquo);
740 /* compute trial remainder: rem = num - (quo * den) */
741 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
742 neg_double (*lrem, *hrem, lrem, hrem);
743 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
748 case TRUNC_MOD_EXPR: /* round toward zero */
749 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
753 case FLOOR_MOD_EXPR: /* round toward negative infinity */
754 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
757 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
760 else return overflow;
764 case CEIL_MOD_EXPR: /* round toward positive infinity */
765 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
770 else return overflow;
774 case ROUND_MOD_EXPR: /* round to closest integer */
776 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
777 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
779 /* get absolute values */
780 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
781 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
783 /* if (2 * abs (lrem) >= abs (lden)) */
784 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
785 labs_rem, habs_rem, <wice, &htwice);
786 if (((unsigned HOST_WIDE_INT) habs_den
787 < (unsigned HOST_WIDE_INT) htwice)
788 || (((unsigned HOST_WIDE_INT) habs_den
789 == (unsigned HOST_WIDE_INT) htwice)
790 && ((HOST_WIDE_INT unsigned) labs_den
791 < (unsigned HOST_WIDE_INT) ltwice)))
795 add_double (*lquo, *hquo,
796 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
799 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
802 else return overflow;
810 /* compute true remainder: rem = num - (quo * den) */
811 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
812 neg_double (*lrem, *hrem, lrem, hrem);
813 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
817 #ifndef REAL_ARITHMETIC
818 /* Effectively truncate a real value to represent the nearest possible value
819 in a narrower mode. The result is actually represented in the same data
820 type as the argument, but its value is usually different.
822 A trap may occur during the FP operations and it is the responsibility
823 of the calling function to have a handler established. */
826 real_value_truncate (mode, arg)
827 enum machine_mode mode;
830 return REAL_VALUE_TRUNCATE (mode, arg);
833 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
835 /* Check for infinity in an IEEE double precision number. */
841 /* The IEEE 64-bit double format. */
846 unsigned exponent : 11;
847 unsigned mantissa1 : 20;
852 unsigned mantissa1 : 20;
853 unsigned exponent : 11;
859 if (u.big_endian.sign == 1)
862 return (u.big_endian.exponent == 2047
863 && u.big_endian.mantissa1 == 0
864 && u.big_endian.mantissa2 == 0);
869 return (u.little_endian.exponent == 2047
870 && u.little_endian.mantissa1 == 0
871 && u.little_endian.mantissa2 == 0);
875 /* Check whether an IEEE double precision number is a NaN. */
881 /* The IEEE 64-bit double format. */
886 unsigned exponent : 11;
887 unsigned mantissa1 : 20;
892 unsigned mantissa1 : 20;
893 unsigned exponent : 11;
899 if (u.big_endian.sign == 1)
902 return (u.big_endian.exponent == 2047
903 && (u.big_endian.mantissa1 != 0
904 || u.big_endian.mantissa2 != 0));
909 return (u.little_endian.exponent == 2047
910 && (u.little_endian.mantissa1 != 0
911 || u.little_endian.mantissa2 != 0));
915 /* Check for a negative IEEE double precision number. */
921 /* The IEEE 64-bit double format. */
926 unsigned exponent : 11;
927 unsigned mantissa1 : 20;
932 unsigned mantissa1 : 20;
933 unsigned exponent : 11;
939 if (u.big_endian.sign == 1)
942 return u.big_endian.sign;
947 return u.little_endian.sign;
950 #else /* Target not IEEE */
952 /* Let's assume other float formats don't have infinity.
953 (This can be overridden by redefining REAL_VALUE_ISINF.) */
961 /* Let's assume other float formats don't have NaNs.
962 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
970 /* Let's assume other float formats don't have minus zero.
971 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
978 #endif /* Target not IEEE */
979 #endif /* no REAL_ARITHMETIC */
981 /* Split a tree IN into a constant and a variable part
982 that could be combined with CODE to make IN.
983 CODE must be a commutative arithmetic operation.
984 Store the constant part into *CONP and the variable in &VARP.
985 Return 1 if this was done; zero means the tree IN did not decompose
988 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
989 Therefore, we must tell the caller whether the variable part
990 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
991 The value stored is the coefficient for the variable term.
992 The constant term we return should always be added;
993 we negate it if necessary. */
996 split_tree (in, code, varp, conp, varsignp)
1002 register tree outtype = TREE_TYPE (in);
1006 /* Strip any conversions that don't change the machine mode. */
1007 while ((TREE_CODE (in) == NOP_EXPR
1008 || TREE_CODE (in) == CONVERT_EXPR)
1009 && (TYPE_MODE (TREE_TYPE (in))
1010 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
1011 in = TREE_OPERAND (in, 0);
1013 if (TREE_CODE (in) == code
1014 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1015 /* We can associate addition and subtraction together
1016 (even though the C standard doesn't say so)
1017 for integers because the value is not affected.
1018 For reals, the value might be affected, so we can't. */
1019 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1020 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1022 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1023 if (code == INTEGER_CST)
1025 *conp = TREE_OPERAND (in, 0);
1026 *varp = TREE_OPERAND (in, 1);
1027 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1028 && TREE_TYPE (*varp) != outtype)
1029 *varp = convert (outtype, *varp);
1030 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1033 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1035 *conp = TREE_OPERAND (in, 1);
1036 *varp = TREE_OPERAND (in, 0);
1038 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1039 && TREE_TYPE (*varp) != outtype)
1040 *varp = convert (outtype, *varp);
1041 if (TREE_CODE (in) == MINUS_EXPR)
1043 /* If operation is subtraction and constant is second,
1044 must negate it to get an additive constant.
1045 And this cannot be done unless it is a manifest constant.
1046 It could also be the address of a static variable.
1047 We cannot negate that, so give up. */
1048 if (TREE_CODE (*conp) == INTEGER_CST)
1049 /* Subtracting from integer_zero_node loses for long long. */
1050 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1056 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1058 *conp = TREE_OPERAND (in, 0);
1059 *varp = TREE_OPERAND (in, 1);
1060 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1061 && TREE_TYPE (*varp) != outtype)
1062 *varp = convert (outtype, *varp);
1063 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1070 /* Combine two constants NUM and ARG2 under operation CODE
1071 to produce a new constant.
1072 We assume ARG1 and ARG2 have the same data type,
1073 or at least are the same kind of constant and the same machine mode.
1075 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1078 const_binop (code, arg1, arg2, notrunc)
1079 enum tree_code code;
1080 register tree arg1, arg2;
1083 if (TREE_CODE (arg1) == INTEGER_CST)
1085 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
1086 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
1087 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
1088 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
1089 HOST_WIDE_INT low, hi;
1090 HOST_WIDE_INT garbagel, garbageh;
1092 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1098 t = build_int_2 (int1l | int2l, int1h | int2h);
1102 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
1106 t = build_int_2 (int1l & int2l, int1h & int2h);
1109 case BIT_ANDTC_EXPR:
1110 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
1116 /* It's unclear from the C standard whether shifts can overflow.
1117 The following code ignores overflow; perhaps a C standard
1118 interpretation ruling is needed. */
1119 lshift_double (int1l, int1h, int2l,
1120 TYPE_PRECISION (TREE_TYPE (arg1)),
1123 t = build_int_2 (low, hi);
1124 TREE_TYPE (t) = TREE_TYPE (arg1);
1126 force_fit_type (t, 0);
1127 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1128 TREE_CONSTANT_OVERFLOW (t)
1129 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1135 lrotate_double (int1l, int1h, int2l,
1136 TYPE_PRECISION (TREE_TYPE (arg1)),
1138 t = build_int_2 (low, hi);
1145 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1148 overflow = int2h < hi;
1150 t = build_int_2 (int2l, int2h);
1156 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1159 overflow = int1h < hi;
1161 t = build_int_2 (int1l, int1h);
1164 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1165 t = build_int_2 (low, hi);
1169 if (int2h == 0 && int2l == 0)
1171 t = build_int_2 (int1l, int1h);
1174 neg_double (int2l, int2h, &low, &hi);
1175 add_double (int1l, int1h, low, hi, &low, &hi);
1176 overflow = overflow_sum_sign (hi, int2h, int1h);
1177 t = build_int_2 (low, hi);
1181 /* Optimize simple cases. */
1184 unsigned HOST_WIDE_INT temp;
1189 t = build_int_2 (0, 0);
1192 t = build_int_2 (int2l, int2h);
1195 overflow = left_shift_overflows (int2h, 1);
1196 temp = int2l + int2l;
1197 int2h = (int2h << 1) + (temp < int2l);
1198 t = build_int_2 (temp, int2h);
1200 #if 0 /* This code can lose carries. */
1202 temp = int2l + int2l + int2l;
1203 int2h = int2h * 3 + (temp < int2l);
1204 t = build_int_2 (temp, int2h);
1208 overflow = left_shift_overflows (int2h, 2);
1209 temp = int2l + int2l;
1210 int2h = (int2h << 2) + ((temp < int2l) << 1);
1213 int2h += (temp < int2l);
1214 t = build_int_2 (temp, int2h);
1217 overflow = left_shift_overflows (int2h, 3);
1218 temp = int2l + int2l;
1219 int2h = (int2h << 3) + ((temp < int2l) << 2);
1222 int2h += (temp < int2l) << 1;
1225 int2h += (temp < int2l);
1226 t = build_int_2 (temp, int2h);
1237 t = build_int_2 (0, 0);
1242 t = build_int_2 (int1l, int1h);
1247 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1248 t = build_int_2 (low, hi);
1251 case TRUNC_DIV_EXPR:
1252 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1253 case EXACT_DIV_EXPR:
1254 /* This is a shortcut for a common special case.
1255 It reduces the number of tree nodes generated
1257 if (int2h == 0 && int2l > 0
1258 && TREE_TYPE (arg1) == sizetype
1259 && int1h == 0 && int1l >= 0)
1261 if (code == CEIL_DIV_EXPR)
1263 return size_int (int1l / int2l);
1265 case ROUND_DIV_EXPR:
1266 if (int2h == 0 && int2l == 1)
1268 t = build_int_2 (int1l, int1h);
1271 if (int1l == int2l && int1h == int2h)
1273 if ((int1l | int1h) == 0)
1275 t = build_int_2 (1, 0);
1278 overflow = div_and_round_double (code, uns,
1279 int1l, int1h, int2l, int2h,
1280 &low, &hi, &garbagel, &garbageh);
1281 t = build_int_2 (low, hi);
1284 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1285 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1286 overflow = div_and_round_double (code, uns,
1287 int1l, int1h, int2l, int2h,
1288 &garbagel, &garbageh, &low, &hi);
1289 t = build_int_2 (low, hi);
1296 low = (((unsigned HOST_WIDE_INT) int1h
1297 < (unsigned HOST_WIDE_INT) int2h)
1298 || (((unsigned HOST_WIDE_INT) int1h
1299 == (unsigned HOST_WIDE_INT) int2h)
1300 && ((unsigned HOST_WIDE_INT) int1l
1301 < (unsigned HOST_WIDE_INT) int2l)));
1305 low = ((int1h < int2h)
1306 || ((int1h == int2h)
1307 && ((unsigned HOST_WIDE_INT) int1l
1308 < (unsigned HOST_WIDE_INT) int2l)));
1310 if (low == (code == MIN_EXPR))
1311 t = build_int_2 (int1l, int1h);
1313 t = build_int_2 (int2l, int2h);
1320 TREE_TYPE (t) = TREE_TYPE (arg1);
1322 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow))
1323 | TREE_OVERFLOW (arg1)
1324 | TREE_OVERFLOW (arg2));
1325 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1326 | TREE_CONSTANT_OVERFLOW (arg1)
1327 | TREE_CONSTANT_OVERFLOW (arg2));
1330 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1331 if (TREE_CODE (arg1) == REAL_CST)
1333 REAL_VALUE_TYPE d1 = TREE_REAL_CST (arg1);
1334 REAL_VALUE_TYPE d2 = TREE_REAL_CST (arg2);
1336 REAL_VALUE_TYPE value;
1339 if (setjmp (float_error))
1341 t = copy_node (arg1);
1346 set_float_handler (float_error);
1348 #ifdef REAL_ARITHMETIC
1349 REAL_ARITHMETIC (value, code, d1, d2);
1366 #ifndef REAL_INFINITY
1375 value = MIN (d1, d2);
1379 value = MAX (d1, d2);
1385 #endif /* no REAL_ARITHMETIC */
1386 t = build_real (TREE_TYPE (arg1),
1387 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1389 set_float_handler (NULL_PTR);
1392 = (force_fit_type (t, overflow)
1393 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1394 TREE_CONSTANT_OVERFLOW (t)
1396 | TREE_CONSTANT_OVERFLOW (arg1)
1397 | TREE_CONSTANT_OVERFLOW (arg2);
1400 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1401 if (TREE_CODE (arg1) == COMPLEX_CST)
1403 register tree r1 = TREE_REALPART (arg1);
1404 register tree i1 = TREE_IMAGPART (arg1);
1405 register tree r2 = TREE_REALPART (arg2);
1406 register tree i2 = TREE_IMAGPART (arg2);
1412 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1413 const_binop (PLUS_EXPR, i1, i2, notrunc));
1417 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1418 const_binop (MINUS_EXPR, i1, i2, notrunc));
1422 t = build_complex (const_binop (MINUS_EXPR,
1423 const_binop (MULT_EXPR,
1425 const_binop (MULT_EXPR,
1428 const_binop (PLUS_EXPR,
1429 const_binop (MULT_EXPR,
1431 const_binop (MULT_EXPR,
1438 register tree magsquared
1439 = const_binop (PLUS_EXPR,
1440 const_binop (MULT_EXPR, r2, r2, notrunc),
1441 const_binop (MULT_EXPR, i2, i2, notrunc),
1445 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1446 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1447 const_binop (PLUS_EXPR,
1448 const_binop (MULT_EXPR, r1, r2,
1450 const_binop (MULT_EXPR, i1, i2,
1453 magsquared, notrunc),
1454 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1455 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1456 const_binop (MINUS_EXPR,
1457 const_binop (MULT_EXPR, i1, r2,
1459 const_binop (MULT_EXPR, r1, i2,
1462 magsquared, notrunc));
1469 TREE_TYPE (t) = TREE_TYPE (arg1);
1475 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1479 unsigned int number;
1482 /* Type-size nodes already made for small sizes. */
1483 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1485 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1486 && size_table[number] != 0)
1487 return size_table[number];
1488 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1490 push_obstacks_nochange ();
1491 /* Make this a permanent node. */
1492 end_temporary_allocation ();
1493 t = build_int_2 (number, 0);
1494 TREE_TYPE (t) = sizetype;
1495 size_table[number] = t;
1500 t = build_int_2 (number, 0);
1501 TREE_TYPE (t) = sizetype;
1506 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1507 CODE is a tree code. Data type is taken from `sizetype',
1508 If the operands are constant, so is the result. */
1511 size_binop (code, arg0, arg1)
1512 enum tree_code code;
1515 /* Handle the special case of two integer constants faster. */
1516 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1518 /* And some specific cases even faster than that. */
1519 if (code == PLUS_EXPR
1520 && TREE_INT_CST_LOW (arg0) == 0
1521 && TREE_INT_CST_HIGH (arg0) == 0)
1523 if (code == MINUS_EXPR
1524 && TREE_INT_CST_LOW (arg1) == 0
1525 && TREE_INT_CST_HIGH (arg1) == 0)
1527 if (code == MULT_EXPR
1528 && TREE_INT_CST_LOW (arg0) == 1
1529 && TREE_INT_CST_HIGH (arg0) == 0)
1531 /* Handle general case of two integer constants. */
1532 return const_binop (code, arg0, arg1, 1);
1535 if (arg0 == error_mark_node || arg1 == error_mark_node)
1536 return error_mark_node;
1538 return fold (build (code, sizetype, arg0, arg1));
1541 /* Given T, a tree representing type conversion of ARG1, a constant,
1542 return a constant tree representing the result of conversion. */
1545 fold_convert (t, arg1)
1549 register tree type = TREE_TYPE (t);
1552 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1554 if (TREE_CODE (arg1) == INTEGER_CST)
1556 /* Given an integer constant, make new constant with new type,
1557 appropriately sign-extended or truncated. */
1558 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1559 TREE_INT_CST_HIGH (arg1));
1560 TREE_TYPE (t) = type;
1561 /* Indicate an overflow if (1) ARG1 already overflowed,
1562 or (2) force_fit_type indicates an overflow.
1563 Tell force_fit_type that an overflow has already occurred
1564 if ARG1 is a too-large unsigned value and T is signed. */
1566 = (TREE_OVERFLOW (arg1)
1567 | force_fit_type (t,
1568 (TREE_INT_CST_HIGH (arg1) < 0
1569 & (TREE_UNSIGNED (type)
1570 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1571 TREE_CONSTANT_OVERFLOW (t)
1572 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1574 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1575 else if (TREE_CODE (arg1) == REAL_CST)
1577 REAL_VALUE_TYPE l, x, u;
1579 l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
1580 x = TREE_REAL_CST (arg1);
1581 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1583 /* See if X will be in range after truncation towards 0.
1584 To compensate for truncation, move the bounds away from 0,
1585 but reject if X exactly equals the adjusted bounds. */
1586 #ifdef REAL_ARITHMETIC
1587 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1588 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1593 if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1596 #ifndef REAL_ARITHMETIC
1599 HOST_WIDE_INT low, high;
1600 HOST_WIDE_INT half_word
1601 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1603 d = TREE_REAL_CST (arg1);
1607 high = (HOST_WIDE_INT) (d / half_word / half_word);
1608 d -= (REAL_VALUE_TYPE) high * half_word * half_word;
1609 if (d >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1611 low = d - (REAL_VALUE_TYPE) half_word * half_word / 2;
1612 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1615 low = (HOST_WIDE_INT) d;
1616 if (TREE_REAL_CST (arg1) < 0)
1617 neg_double (low, high, &low, &high);
1618 t = build_int_2 (low, high);
1622 HOST_WIDE_INT low, high;
1623 REAL_VALUE_TO_INT (&low, &high, (TREE_REAL_CST (arg1)));
1624 t = build_int_2 (low, high);
1627 TREE_TYPE (t) = type;
1629 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1630 TREE_CONSTANT_OVERFLOW (t)
1631 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1633 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1634 TREE_TYPE (t) = type;
1636 else if (TREE_CODE (type) == REAL_TYPE)
1638 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1639 if (TREE_CODE (arg1) == INTEGER_CST)
1640 return build_real_from_int_cst (type, arg1);
1641 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1642 if (TREE_CODE (arg1) == REAL_CST)
1644 if (setjmp (float_error))
1647 t = copy_node (arg1);
1650 set_float_handler (float_error);
1652 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1653 TREE_REAL_CST (arg1)));
1654 set_float_handler (NULL_PTR);
1658 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1659 TREE_CONSTANT_OVERFLOW (t)
1660 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1664 TREE_CONSTANT (t) = 1;
1668 /* Return an expr equal to X but certainly not valid as an lvalue.
1669 Also make sure it is not valid as an null pointer constant. */
1677 /* These things are certainly not lvalues. */
1678 if (TREE_CODE (x) == NON_LVALUE_EXPR
1679 || TREE_CODE (x) == INTEGER_CST
1680 || TREE_CODE (x) == REAL_CST
1681 || TREE_CODE (x) == STRING_CST
1682 || TREE_CODE (x) == ADDR_EXPR)
1684 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1686 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1687 so convert_for_assignment won't strip it.
1688 This is so this 0 won't be treated as a null pointer constant. */
1689 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1690 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1696 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1697 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1701 /* When pedantic, return an expr equal to X but certainly not valid as a
1702 pedantic lvalue. Otherwise, return X. */
1705 pedantic_non_lvalue (x)
1709 return non_lvalue (x);
1714 /* Given a tree comparison code, return the code that is the logical inverse
1715 of the given code. It is not safe to do this for floating-point
1716 comparisons, except for NE_EXPR and EQ_EXPR. */
1718 static enum tree_code
1719 invert_tree_comparison (code)
1720 enum tree_code code;
1741 /* Similar, but return the comparison that results if the operands are
1742 swapped. This is safe for floating-point. */
1744 static enum tree_code
1745 swap_tree_comparison (code)
1746 enum tree_code code;
1766 /* Return nonzero if CODE is a tree code that represents a truth value. */
1769 truth_value_p (code)
1770 enum tree_code code;
1772 return (TREE_CODE_CLASS (code) == '<'
1773 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1774 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1775 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1778 /* Return nonzero if two operands are necessarily equal.
1779 If ONLY_CONST is non-zero, only return non-zero for constants.
1780 This function tests whether the operands are indistinguishable;
1781 it does not test whether they are equal using C's == operation.
1782 The distinction is important for IEEE floating point, because
1783 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1784 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1787 operand_equal_p (arg0, arg1, only_const)
1791 /* If both types don't have the same signedness, then we can't consider
1792 them equal. We must check this before the STRIP_NOPS calls
1793 because they may change the signedness of the arguments. */
1794 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1800 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1801 We don't care about side effects in that case because the SAVE_EXPR
1802 takes care of that for us. */
1803 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1804 return ! only_const;
1806 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1809 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1810 && TREE_CODE (arg0) == ADDR_EXPR
1811 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1814 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1815 && TREE_CODE (arg0) == INTEGER_CST
1816 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1817 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1820 /* Detect when real constants are equal. */
1821 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1822 && TREE_CODE (arg0) == REAL_CST)
1823 return !bcmp (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1),
1824 sizeof (REAL_VALUE_TYPE));
1832 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1834 /* This is needed for conversions and for COMPONENT_REF.
1835 Might as well play it safe and always test this. */
1836 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1839 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1842 /* Two conversions are equal only if signedness and modes match. */
1843 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1844 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1845 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1848 return operand_equal_p (TREE_OPERAND (arg0, 0),
1849 TREE_OPERAND (arg1, 0), 0);
1853 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1854 TREE_OPERAND (arg1, 0), 0)
1855 && operand_equal_p (TREE_OPERAND (arg0, 1),
1856 TREE_OPERAND (arg1, 1), 0));
1859 switch (TREE_CODE (arg0))
1862 return operand_equal_p (TREE_OPERAND (arg0, 0),
1863 TREE_OPERAND (arg1, 0), 0);
1867 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1868 TREE_OPERAND (arg1, 0), 0)
1869 && operand_equal_p (TREE_OPERAND (arg0, 1),
1870 TREE_OPERAND (arg1, 1), 0));
1873 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1874 TREE_OPERAND (arg1, 0), 0)
1875 && operand_equal_p (TREE_OPERAND (arg0, 1),
1876 TREE_OPERAND (arg1, 1), 0)
1877 && operand_equal_p (TREE_OPERAND (arg0, 2),
1878 TREE_OPERAND (arg1, 2), 0));
1886 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1887 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1889 When in doubt, return 0. */
1892 operand_equal_for_comparison_p (arg0, arg1, other)
1896 int unsignedp1, unsignedpo;
1897 tree primarg1, primother;
1898 unsigned correct_width;
1900 if (operand_equal_p (arg0, arg1, 0))
1903 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
1906 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1907 actual comparison operand, ARG0.
1909 First throw away any conversions to wider types
1910 already present in the operands. */
1912 primarg1 = get_narrower (arg1, &unsignedp1);
1913 primother = get_narrower (other, &unsignedpo);
1915 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1916 if (unsignedp1 == unsignedpo
1917 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1918 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1920 tree type = TREE_TYPE (arg0);
1922 /* Make sure shorter operand is extended the right way
1923 to match the longer operand. */
1924 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1925 TREE_TYPE (primarg1)),
1928 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1935 /* See if ARG is an expression that is either a comparison or is performing
1936 arithmetic on comparisons. The comparisons must only be comparing
1937 two different values, which will be stored in *CVAL1 and *CVAL2; if
1938 they are non-zero it means that some operands have already been found.
1939 No variables may be used anywhere else in the expression except in the
1940 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1941 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1943 If this is true, return 1. Otherwise, return zero. */
1946 twoval_comparison_p (arg, cval1, cval2, save_p)
1948 tree *cval1, *cval2;
1951 enum tree_code code = TREE_CODE (arg);
1952 char class = TREE_CODE_CLASS (code);
1954 /* We can handle some of the 'e' cases here. */
1955 if (class == 'e' && code == TRUTH_NOT_EXPR)
1957 else if (class == 'e'
1958 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1959 || code == COMPOUND_EXPR))
1962 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1963 the expression. There may be no way to make this work, but it needs
1964 to be looked at again for 2.6. */
1966 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1968 /* If we've already found a CVAL1 or CVAL2, this expression is
1969 two complex to handle. */
1970 if (*cval1 || *cval2)
1981 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1984 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1985 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1986 cval1, cval2, save_p));
1992 if (code == COND_EXPR)
1993 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1994 cval1, cval2, save_p)
1995 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1996 cval1, cval2, save_p)
1997 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1998 cval1, cval2, save_p));
2002 /* First see if we can handle the first operand, then the second. For
2003 the second operand, we know *CVAL1 can't be zero. It must be that
2004 one side of the comparison is each of the values; test for the
2005 case where this isn't true by failing if the two operands
2008 if (operand_equal_p (TREE_OPERAND (arg, 0),
2009 TREE_OPERAND (arg, 1), 0))
2013 *cval1 = TREE_OPERAND (arg, 0);
2014 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2016 else if (*cval2 == 0)
2017 *cval2 = TREE_OPERAND (arg, 0);
2018 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2023 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2025 else if (*cval2 == 0)
2026 *cval2 = TREE_OPERAND (arg, 1);
2027 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2038 /* ARG is a tree that is known to contain just arithmetic operations and
2039 comparisons. Evaluate the operations in the tree substituting NEW0 for
2040 any occurrence of OLD0 as an operand of a comparison and likewise for
2044 eval_subst (arg, old0, new0, old1, new1)
2046 tree old0, new0, old1, new1;
2048 tree type = TREE_TYPE (arg);
2049 enum tree_code code = TREE_CODE (arg);
2050 char class = TREE_CODE_CLASS (code);
2052 /* We can handle some of the 'e' cases here. */
2053 if (class == 'e' && code == TRUTH_NOT_EXPR)
2055 else if (class == 'e'
2056 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2062 return fold (build1 (code, type,
2063 eval_subst (TREE_OPERAND (arg, 0),
2064 old0, new0, old1, new1)));
2067 return fold (build (code, type,
2068 eval_subst (TREE_OPERAND (arg, 0),
2069 old0, new0, old1, new1),
2070 eval_subst (TREE_OPERAND (arg, 1),
2071 old0, new0, old1, new1)));
2077 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2080 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2083 return fold (build (code, type,
2084 eval_subst (TREE_OPERAND (arg, 0),
2085 old0, new0, old1, new1),
2086 eval_subst (TREE_OPERAND (arg, 1),
2087 old0, new0, old1, new1),
2088 eval_subst (TREE_OPERAND (arg, 2),
2089 old0, new0, old1, new1)));
2094 tree arg0 = TREE_OPERAND (arg, 0);
2095 tree arg1 = TREE_OPERAND (arg, 1);
2097 /* We need to check both for exact equality and tree equality. The
2098 former will be true if the operand has a side-effect. In that
2099 case, we know the operand occurred exactly once. */
2101 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2103 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2106 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2108 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2111 return fold (build (code, type, arg0, arg1));
2118 /* Return a tree for the case when the result of an expression is RESULT
2119 converted to TYPE and OMITTED was previously an operand of the expression
2120 but is now not needed (e.g., we folded OMITTED * 0).
2122 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2123 the conversion of RESULT to TYPE. */
2126 omit_one_operand (type, result, omitted)
2127 tree type, result, omitted;
2129 tree t = convert (type, result);
2131 if (TREE_SIDE_EFFECTS (omitted))
2132 return build (COMPOUND_EXPR, type, omitted, t);
2134 return non_lvalue (t);
2137 /* Return a simplified tree node for the truth-negation of ARG. This
2138 never alters ARG itself. We assume that ARG is an operation that
2139 returns a truth value (0 or 1). */
2142 invert_truthvalue (arg)
2145 tree type = TREE_TYPE (arg);
2146 enum tree_code code = TREE_CODE (arg);
2148 if (code == ERROR_MARK)
2151 /* If this is a comparison, we can simply invert it, except for
2152 floating-point non-equality comparisons, in which case we just
2153 enclose a TRUTH_NOT_EXPR around what we have. */
2155 if (TREE_CODE_CLASS (code) == '<')
2157 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2158 && code != NE_EXPR && code != EQ_EXPR)
2159 return build1 (TRUTH_NOT_EXPR, type, arg);
2161 return build (invert_tree_comparison (code), type,
2162 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2168 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2169 && TREE_INT_CST_HIGH (arg) == 0, 0));
2171 case TRUTH_AND_EXPR:
2172 return build (TRUTH_OR_EXPR, type,
2173 invert_truthvalue (TREE_OPERAND (arg, 0)),
2174 invert_truthvalue (TREE_OPERAND (arg, 1)));
2177 return build (TRUTH_AND_EXPR, type,
2178 invert_truthvalue (TREE_OPERAND (arg, 0)),
2179 invert_truthvalue (TREE_OPERAND (arg, 1)));
2181 case TRUTH_XOR_EXPR:
2182 /* Here we can invert either operand. We invert the first operand
2183 unless the second operand is a TRUTH_NOT_EXPR in which case our
2184 result is the XOR of the first operand with the inside of the
2185 negation of the second operand. */
2187 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2188 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2189 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2191 return build (TRUTH_XOR_EXPR, type,
2192 invert_truthvalue (TREE_OPERAND (arg, 0)),
2193 TREE_OPERAND (arg, 1));
2195 case TRUTH_ANDIF_EXPR:
2196 return build (TRUTH_ORIF_EXPR, type,
2197 invert_truthvalue (TREE_OPERAND (arg, 0)),
2198 invert_truthvalue (TREE_OPERAND (arg, 1)));
2200 case TRUTH_ORIF_EXPR:
2201 return build (TRUTH_ANDIF_EXPR, type,
2202 invert_truthvalue (TREE_OPERAND (arg, 0)),
2203 invert_truthvalue (TREE_OPERAND (arg, 1)));
2205 case TRUTH_NOT_EXPR:
2206 return TREE_OPERAND (arg, 0);
2209 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2210 invert_truthvalue (TREE_OPERAND (arg, 1)),
2211 invert_truthvalue (TREE_OPERAND (arg, 2)));
2214 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2215 invert_truthvalue (TREE_OPERAND (arg, 1)));
2217 case NON_LVALUE_EXPR:
2218 return invert_truthvalue (TREE_OPERAND (arg, 0));
2223 return build1 (TREE_CODE (arg), type,
2224 invert_truthvalue (TREE_OPERAND (arg, 0)));
2227 if (!integer_onep (TREE_OPERAND (arg, 1)))
2229 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2232 return build1 (TRUTH_NOT_EXPR, type, arg);
2234 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2236 return build1 (TRUTH_NOT_EXPR, type, arg);
2239 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2240 operands are another bit-wise operation with a common input. If so,
2241 distribute the bit operations to save an operation and possibly two if
2242 constants are involved. For example, convert
2243 (A | B) & (A | C) into A | (B & C)
2244 Further simplification will occur if B and C are constants.
2246 If this optimization cannot be done, 0 will be returned. */
2249 distribute_bit_expr (code, type, arg0, arg1)
2250 enum tree_code code;
2257 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2258 || TREE_CODE (arg0) == code
2259 || (TREE_CODE (arg0) != BIT_AND_EXPR
2260 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2263 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2265 common = TREE_OPERAND (arg0, 0);
2266 left = TREE_OPERAND (arg0, 1);
2267 right = TREE_OPERAND (arg1, 1);
2269 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2271 common = TREE_OPERAND (arg0, 0);
2272 left = TREE_OPERAND (arg0, 1);
2273 right = TREE_OPERAND (arg1, 0);
2275 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2277 common = TREE_OPERAND (arg0, 1);
2278 left = TREE_OPERAND (arg0, 0);
2279 right = TREE_OPERAND (arg1, 1);
2281 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2283 common = TREE_OPERAND (arg0, 1);
2284 left = TREE_OPERAND (arg0, 0);
2285 right = TREE_OPERAND (arg1, 0);
2290 return fold (build (TREE_CODE (arg0), type, common,
2291 fold (build (code, type, left, right))));
2294 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2295 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2298 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2301 int bitsize, bitpos;
2304 tree result = build (BIT_FIELD_REF, type, inner,
2305 size_int (bitsize), size_int (bitpos));
2307 TREE_UNSIGNED (result) = unsignedp;
2312 /* Optimize a bit-field compare.
2314 There are two cases: First is a compare against a constant and the
2315 second is a comparison of two items where the fields are at the same
2316 bit position relative to the start of a chunk (byte, halfword, word)
2317 large enough to contain it. In these cases we can avoid the shift
2318 implicit in bitfield extractions.
2320 For constants, we emit a compare of the shifted constant with the
2321 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2322 compared. For two fields at the same position, we do the ANDs with the
2323 similar mask and compare the result of the ANDs.
2325 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2326 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2327 are the left and right operands of the comparison, respectively.
2329 If the optimization described above can be done, we return the resulting
2330 tree. Otherwise we return zero. */
2333 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2334 enum tree_code code;
2338 int lbitpos, lbitsize, rbitpos, rbitsize;
2339 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2340 tree type = TREE_TYPE (lhs);
2341 tree signed_type, unsigned_type;
2342 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2343 enum machine_mode lmode, rmode, lnmode, rnmode;
2344 int lunsignedp, runsignedp;
2345 int lvolatilep = 0, rvolatilep = 0;
2346 tree linner, rinner;
2350 /* Get all the information about the extractions being done. If the bit size
2351 if the same as the size of the underlying object, we aren't doing an
2352 extraction at all and so can do nothing. */
2353 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2354 &lunsignedp, &lvolatilep);
2355 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2361 /* If this is not a constant, we can only do something if bit positions,
2362 sizes, and signedness are the same. */
2363 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2364 &rmode, &runsignedp, &rvolatilep);
2366 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2367 || lunsignedp != runsignedp || offset != 0)
2371 /* See if we can find a mode to refer to this field. We should be able to,
2372 but fail if we can't. */
2373 lnmode = get_best_mode (lbitsize, lbitpos,
2374 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2376 if (lnmode == VOIDmode)
2379 /* Set signed and unsigned types of the precision of this mode for the
2381 signed_type = type_for_mode (lnmode, 0);
2382 unsigned_type = type_for_mode (lnmode, 1);
2386 rnmode = get_best_mode (rbitsize, rbitpos,
2387 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2389 if (rnmode == VOIDmode)
2393 /* Compute the bit position and size for the new reference and our offset
2394 within it. If the new reference is the same size as the original, we
2395 won't optimize anything, so return zero. */
2396 lnbitsize = GET_MODE_BITSIZE (lnmode);
2397 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2398 lbitpos -= lnbitpos;
2399 if (lnbitsize == lbitsize)
2404 rnbitsize = GET_MODE_BITSIZE (rnmode);
2405 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2406 rbitpos -= rnbitpos;
2407 if (rnbitsize == rbitsize)
2411 #if BYTES_BIG_ENDIAN
2412 lbitpos = lnbitsize - lbitsize - lbitpos;
2415 /* Make the mask to be used against the extracted field. */
2416 mask = build_int_2 (~0, ~0);
2417 TREE_TYPE (mask) = unsigned_type;
2418 force_fit_type (mask, 0);
2419 mask = convert (unsigned_type, mask);
2420 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2421 mask = const_binop (RSHIFT_EXPR, mask,
2422 size_int (lnbitsize - lbitsize - lbitpos), 0);
2425 /* If not comparing with constant, just rework the comparison
2427 return build (code, compare_type,
2428 build (BIT_AND_EXPR, unsigned_type,
2429 make_bit_field_ref (linner, unsigned_type,
2430 lnbitsize, lnbitpos, 1),
2432 build (BIT_AND_EXPR, unsigned_type,
2433 make_bit_field_ref (rinner, unsigned_type,
2434 rnbitsize, rnbitpos, 1),
2437 /* Otherwise, we are handling the constant case. See if the constant is too
2438 big for the field. Warn and return a tree of for 0 (false) if so. We do
2439 this not only for its own sake, but to avoid having to test for this
2440 error case below. If we didn't, we might generate wrong code.
2442 For unsigned fields, the constant shifted right by the field length should
2443 be all zero. For signed fields, the high-order bits should agree with
2448 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2449 convert (unsigned_type, rhs),
2450 size_int (lbitsize), 0)))
2452 warning ("comparison is always %s due to width of bitfield",
2453 code == NE_EXPR ? "one" : "zero");
2454 return convert (compare_type,
2456 ? integer_one_node : integer_zero_node));
2461 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2462 size_int (lbitsize - 1), 0);
2463 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2465 warning ("comparison is always %s due to width of bitfield",
2466 code == NE_EXPR ? "one" : "zero");
2467 return convert (compare_type,
2469 ? integer_one_node : integer_zero_node));
2473 /* Single-bit compares should always be against zero. */
2474 if (lbitsize == 1 && ! integer_zerop (rhs))
2476 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2477 rhs = convert (type, integer_zero_node);
2480 /* Make a new bitfield reference, shift the constant over the
2481 appropriate number of bits and mask it with the computed mask
2482 (in case this was a signed field). If we changed it, make a new one. */
2483 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2486 TREE_SIDE_EFFECTS (lhs) = 1;
2487 TREE_THIS_VOLATILE (lhs) = 1;
2490 rhs = fold (const_binop (BIT_AND_EXPR,
2491 const_binop (LSHIFT_EXPR,
2492 convert (unsigned_type, rhs),
2493 size_int (lbitpos), 0),
2496 return build (code, compare_type,
2497 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2501 /* Subroutine for fold_truthop: decode a field reference.
2503 If EXP is a comparison reference, we return the innermost reference.
2505 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2506 set to the starting bit number.
2508 If the innermost field can be completely contained in a mode-sized
2509 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2511 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2512 otherwise it is not changed.
2514 *PUNSIGNEDP is set to the signedness of the field.
2516 *PMASK is set to the mask used. This is either contained in a
2517 BIT_AND_EXPR or derived from the width of the field.
2519 Return 0 if this is not a component reference or is one that we can't
2520 do anything with. */
2523 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2526 int *pbitsize, *pbitpos;
2527 enum machine_mode *pmode;
2528 int *punsignedp, *pvolatilep;
2535 /* All the optimizations using this function assume integer fields.
2536 There are problems with FP fields since the type_for_size call
2537 below can fail for, e.g., XFmode. */
2538 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2543 if (TREE_CODE (exp) == BIT_AND_EXPR)
2545 mask = TREE_OPERAND (exp, 1);
2546 exp = TREE_OPERAND (exp, 0);
2547 STRIP_NOPS (exp); STRIP_NOPS (mask);
2548 if (TREE_CODE (mask) != INTEGER_CST)
2552 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
2553 && TREE_CODE (exp) != BIT_FIELD_REF)
2556 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2557 punsignedp, pvolatilep);
2558 if (inner == exp || *pbitsize < 0 || offset != 0)
2563 tree unsigned_type = type_for_size (*pbitsize, 1);
2564 int precision = TYPE_PRECISION (unsigned_type);
2566 mask = build_int_2 (~0, ~0);
2567 TREE_TYPE (mask) = unsigned_type;
2568 force_fit_type (mask, 0);
2569 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2570 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2577 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2581 all_ones_mask_p (mask, size)
2585 tree type = TREE_TYPE (mask);
2586 int precision = TYPE_PRECISION (type);
2589 tmask = build_int_2 (~0, ~0);
2590 TREE_TYPE (tmask) = signed_type (type);
2591 force_fit_type (tmask, 0);
2593 operand_equal_p (mask,
2594 const_binop (RSHIFT_EXPR,
2595 const_binop (LSHIFT_EXPR, tmask,
2596 size_int (precision - size), 0),
2597 size_int (precision - size), 0),
2601 /* Subroutine for fold_truthop: determine if an operand is simple enough
2602 to be evaluated unconditionally. */
2605 simple_operand_p (exp)
2608 /* Strip any conversions that don't change the machine mode. */
2609 while ((TREE_CODE (exp) == NOP_EXPR
2610 || TREE_CODE (exp) == CONVERT_EXPR)
2611 && (TYPE_MODE (TREE_TYPE (exp))
2612 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2613 exp = TREE_OPERAND (exp, 0);
2615 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2616 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2617 && ! TREE_ADDRESSABLE (exp)
2618 && ! TREE_THIS_VOLATILE (exp)
2619 && ! DECL_NONLOCAL (exp)
2620 /* Don't regard global variables as simple. They may be
2621 allocated in ways unknown to the compiler (shared memory,
2622 #pragma weak, etc). */
2623 && ! TREE_PUBLIC (exp)
2624 && ! DECL_EXTERNAL (exp)
2625 /* Loading a static variable is unduly expensive, but global
2626 registers aren't expensive. */
2627 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2630 /* Subroutine for fold_truthop: try to optimize a range test.
2632 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2634 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2635 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2636 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2639 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2640 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2641 larger than HI_CST (they may be equal).
2643 We return the simplified tree or 0 if no optimization is possible. */
2646 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2647 enum tree_code jcode, lo_code, hi_code;
2648 tree type, var, lo_cst, hi_cst;
2651 enum tree_code rcode;
2653 /* See if this is a range test and normalize the constant terms. */
2655 if (jcode == TRUTH_AND_EXPR)
2660 /* See if we have VAR != CST && VAR != CST+1. */
2661 if (! (hi_code == NE_EXPR
2662 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2663 && tree_int_cst_equal (integer_one_node,
2664 const_binop (MINUS_EXPR,
2665 hi_cst, lo_cst, 0))))
2673 if (hi_code == LT_EXPR)
2674 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2675 else if (hi_code != LE_EXPR)
2678 if (lo_code == GT_EXPR)
2679 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2681 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2694 /* See if we have VAR == CST || VAR == CST+1. */
2695 if (! (hi_code == EQ_EXPR
2696 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2697 && tree_int_cst_equal (integer_one_node,
2698 const_binop (MINUS_EXPR,
2699 hi_cst, lo_cst, 0))))
2707 if (hi_code == GE_EXPR)
2708 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2709 else if (hi_code != GT_EXPR)
2712 if (lo_code == LE_EXPR)
2713 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2715 /* We now have VAR < LO_CST || VAR > HI_CST. */
2724 /* When normalizing, it is possible to both increment the smaller constant
2725 and decrement the larger constant. See if they are still ordered. */
2726 if (tree_int_cst_lt (hi_cst, lo_cst))
2729 /* Fail if VAR isn't an integer. */
2730 utype = TREE_TYPE (var);
2731 if (! INTEGRAL_TYPE_P (utype))
2734 /* The range test is invalid if subtracting the two constants results
2735 in overflow. This can happen in traditional mode. */
2736 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2737 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2740 if (! TREE_UNSIGNED (utype))
2742 utype = unsigned_type (utype);
2743 var = convert (utype, var);
2744 lo_cst = convert (utype, lo_cst);
2745 hi_cst = convert (utype, hi_cst);
2748 return fold (convert (type,
2749 build (rcode, utype,
2750 build (MINUS_EXPR, utype, var, lo_cst),
2751 const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
2754 /* Find ways of folding logical expressions of LHS and RHS:
2755 Try to merge two comparisons to the same innermost item.
2756 Look for range tests like "ch >= '0' && ch <= '9'".
2757 Look for combinations of simple terms on machines with expensive branches
2758 and evaluate the RHS unconditionally.
2760 For example, if we have p->a == 2 && p->b == 4 and we can make an
2761 object large enough to span both A and B, we can do this with a comparison
2762 against the object ANDed with the a mask.
2764 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2765 operations to do this with one comparison.
2767 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2768 function and the one above.
2770 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2771 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2773 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2776 We return the simplified tree or 0 if no optimization is possible. */
2779 fold_truthop (code, truth_type, lhs, rhs)
2780 enum tree_code code;
2781 tree truth_type, lhs, rhs;
2783 /* If this is the "or" of two comparisons, we can do something if we
2784 the comparisons are NE_EXPR. If this is the "and", we can do something
2785 if the comparisons are EQ_EXPR. I.e.,
2786 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2788 WANTED_CODE is this operation code. For single bit fields, we can
2789 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2790 comparison for one-bit fields. */
2792 enum tree_code wanted_code;
2793 enum tree_code lcode, rcode;
2794 tree ll_arg, lr_arg, rl_arg, rr_arg;
2795 tree ll_inner, lr_inner, rl_inner, rr_inner;
2796 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2797 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2798 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2799 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2800 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2801 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2802 enum machine_mode lnmode, rnmode;
2803 tree ll_mask, lr_mask, rl_mask, rr_mask;
2804 tree l_const, r_const;
2806 int first_bit, end_bit;
2809 /* Start by getting the comparison codes and seeing if this looks like
2810 a range test. Fail if anything is volatile. If one operand is a
2811 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2814 if (TREE_SIDE_EFFECTS (lhs)
2815 || TREE_SIDE_EFFECTS (rhs))
2818 lcode = TREE_CODE (lhs);
2819 rcode = TREE_CODE (rhs);
2821 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2822 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2824 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2825 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2827 if (TREE_CODE_CLASS (lcode) != '<'
2828 || TREE_CODE_CLASS (rcode) != '<')
2831 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2832 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2834 ll_arg = TREE_OPERAND (lhs, 0);
2835 lr_arg = TREE_OPERAND (lhs, 1);
2836 rl_arg = TREE_OPERAND (rhs, 0);
2837 rr_arg = TREE_OPERAND (rhs, 1);
2839 if (TREE_CODE (lr_arg) == INTEGER_CST
2840 && TREE_CODE (rr_arg) == INTEGER_CST
2841 && operand_equal_p (ll_arg, rl_arg, 0))
2843 if (tree_int_cst_lt (lr_arg, rr_arg))
2844 result = range_test (code, truth_type, lcode, rcode,
2845 ll_arg, lr_arg, rr_arg);
2847 result = range_test (code, truth_type, rcode, lcode,
2848 ll_arg, rr_arg, lr_arg);
2850 /* If this isn't a range test, it also isn't a comparison that
2851 can be merged. However, it wins to evaluate the RHS unconditionally
2852 on machines with expensive branches. */
2854 if (result == 0 && BRANCH_COST >= 2)
2856 if (TREE_CODE (ll_arg) != VAR_DECL
2857 && TREE_CODE (ll_arg) != PARM_DECL)
2859 /* Avoid evaluating the variable part twice. */
2860 ll_arg = save_expr (ll_arg);
2861 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2862 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2864 return build (code, truth_type, lhs, rhs);
2869 /* If the RHS can be evaluated unconditionally and its operands are
2870 simple, it wins to evaluate the RHS unconditionally on machines
2871 with expensive branches. In this case, this isn't a comparison
2872 that can be merged. */
2874 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2875 are with zero (tmw). */
2877 if (BRANCH_COST >= 2
2878 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2879 && simple_operand_p (rl_arg)
2880 && simple_operand_p (rr_arg))
2881 return build (code, truth_type, lhs, rhs);
2883 /* See if the comparisons can be merged. Then get all the parameters for
2886 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2887 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2891 ll_inner = decode_field_reference (ll_arg,
2892 &ll_bitsize, &ll_bitpos, &ll_mode,
2893 &ll_unsignedp, &volatilep, &ll_mask);
2894 lr_inner = decode_field_reference (lr_arg,
2895 &lr_bitsize, &lr_bitpos, &lr_mode,
2896 &lr_unsignedp, &volatilep, &lr_mask);
2897 rl_inner = decode_field_reference (rl_arg,
2898 &rl_bitsize, &rl_bitpos, &rl_mode,
2899 &rl_unsignedp, &volatilep, &rl_mask);
2900 rr_inner = decode_field_reference (rr_arg,
2901 &rr_bitsize, &rr_bitpos, &rr_mode,
2902 &rr_unsignedp, &volatilep, &rr_mask);
2904 /* It must be true that the inner operation on the lhs of each
2905 comparison must be the same if we are to be able to do anything.
2906 Then see if we have constants. If not, the same must be true for
2908 if (volatilep || ll_inner == 0 || rl_inner == 0
2909 || ! operand_equal_p (ll_inner, rl_inner, 0))
2912 if (TREE_CODE (lr_arg) == INTEGER_CST
2913 && TREE_CODE (rr_arg) == INTEGER_CST)
2914 l_const = lr_arg, r_const = rr_arg;
2915 else if (lr_inner == 0 || rr_inner == 0
2916 || ! operand_equal_p (lr_inner, rr_inner, 0))
2919 l_const = r_const = 0;
2921 /* If either comparison code is not correct for our logical operation,
2922 fail. However, we can convert a one-bit comparison against zero into
2923 the opposite comparison against that bit being set in the field. */
2925 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2926 if (lcode != wanted_code)
2928 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2934 if (rcode != wanted_code)
2936 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2942 /* See if we can find a mode that contains both fields being compared on
2943 the left. If we can't, fail. Otherwise, update all constants and masks
2944 to be relative to a field of that size. */
2945 first_bit = MIN (ll_bitpos, rl_bitpos);
2946 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2947 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2948 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2950 if (lnmode == VOIDmode)
2953 lnbitsize = GET_MODE_BITSIZE (lnmode);
2954 lnbitpos = first_bit & ~ (lnbitsize - 1);
2955 type = type_for_size (lnbitsize, 1);
2956 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2958 #if BYTES_BIG_ENDIAN
2959 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2960 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2963 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2964 size_int (xll_bitpos), 0);
2965 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2966 size_int (xrl_bitpos), 0);
2968 /* Make sure the constants are interpreted as unsigned, so we
2969 don't have sign bits outside the range of their type. */
2973 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2974 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2975 size_int (xll_bitpos), 0);
2979 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2980 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2981 size_int (xrl_bitpos), 0);
2984 /* If the right sides are not constant, do the same for it. Also,
2985 disallow this optimization if a size or signedness mismatch occurs
2986 between the left and right sides. */
2989 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2990 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2991 /* Make sure the two fields on the right
2992 correspond to the left without being swapped. */
2993 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2996 first_bit = MIN (lr_bitpos, rr_bitpos);
2997 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2998 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2999 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3001 if (rnmode == VOIDmode)
3004 rnbitsize = GET_MODE_BITSIZE (rnmode);
3005 rnbitpos = first_bit & ~ (rnbitsize - 1);
3006 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3008 #if BYTES_BIG_ENDIAN
3009 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3010 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3013 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3014 size_int (xlr_bitpos), 0);
3015 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3016 size_int (xrr_bitpos), 0);
3018 /* Make a mask that corresponds to both fields being compared.
3019 Do this for both items being compared. If the masks agree,
3020 we can do this by masking both and comparing the masked
3022 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3023 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3024 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3026 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3027 ll_unsignedp || rl_unsignedp);
3028 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3029 lr_unsignedp || rr_unsignedp);
3030 if (! all_ones_mask_p (ll_mask, lnbitsize))
3032 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3033 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3035 return build (wanted_code, truth_type, lhs, rhs);
3038 /* There is still another way we can do something: If both pairs of
3039 fields being compared are adjacent, we may be able to make a wider
3040 field containing them both. */
3041 if ((ll_bitsize + ll_bitpos == rl_bitpos
3042 && lr_bitsize + lr_bitpos == rr_bitpos)
3043 || (ll_bitpos == rl_bitpos + rl_bitsize
3044 && lr_bitpos == rr_bitpos + rr_bitsize))
3045 return build (wanted_code, truth_type,
3046 make_bit_field_ref (ll_inner, type,
3047 ll_bitsize + rl_bitsize,
3048 MIN (ll_bitpos, rl_bitpos),
3050 make_bit_field_ref (lr_inner, type,
3051 lr_bitsize + rr_bitsize,
3052 MIN (lr_bitpos, rr_bitpos),
3058 /* Handle the case of comparisons with constants. If there is something in
3059 common between the masks, those bits of the constants must be the same.
3060 If not, the condition is always false. Test for this to avoid generating
3061 incorrect code below. */
3062 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3063 if (! integer_zerop (result)
3064 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3065 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3067 if (wanted_code == NE_EXPR)
3069 warning ("`or' of unmatched not-equal tests is always 1");
3070 return convert (truth_type, integer_one_node);
3074 warning ("`and' of mutually exclusive equal-tests is always zero");
3075 return convert (truth_type, integer_zero_node);
3079 /* Construct the expression we will return. First get the component
3080 reference we will make. Unless the mask is all ones the width of
3081 that field, perform the mask operation. Then compare with the
3083 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3084 ll_unsignedp || rl_unsignedp);
3086 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3087 if (! all_ones_mask_p (ll_mask, lnbitsize))
3088 result = build (BIT_AND_EXPR, type, result, ll_mask);
3090 return build (wanted_code, truth_type, result,
3091 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3094 /* Perform constant folding and related simplification of EXPR.
3095 The related simplifications include x*1 => x, x*0 => 0, etc.,
3096 and application of the associative law.
3097 NOP_EXPR conversions may be removed freely (as long as we
3098 are careful not to change the C type of the overall expression)
3099 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3100 but we can constant-fold them if they have constant operands. */
3106 register tree t = expr;
3107 tree t1 = NULL_TREE;
3109 tree type = TREE_TYPE (expr);
3110 register tree arg0, arg1;
3111 register enum tree_code code = TREE_CODE (t);
3115 /* WINS will be nonzero when the switch is done
3116 if all operands are constant. */
3120 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3121 if (code == RTL_EXPR)
3124 /* Return right away if already constant. */
3125 if (TREE_CONSTANT (t))
3127 if (code == CONST_DECL)
3128 return DECL_INITIAL (t);
3132 kind = TREE_CODE_CLASS (code);
3133 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3137 /* Special case for conversion ops that can have fixed point args. */
3138 arg0 = TREE_OPERAND (t, 0);
3140 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3142 STRIP_TYPE_NOPS (arg0);
3144 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3145 subop = TREE_REALPART (arg0);
3149 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3150 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3151 && TREE_CODE (subop) != REAL_CST
3152 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3154 /* Note that TREE_CONSTANT isn't enough:
3155 static var addresses are constant but we can't
3156 do arithmetic on them. */
3159 else if (kind == 'e' || kind == '<'
3160 || kind == '1' || kind == '2' || kind == 'r')
3162 register int len = tree_code_length[(int) code];
3164 for (i = 0; i < len; i++)
3166 tree op = TREE_OPERAND (t, i);
3170 continue; /* Valid for CALL_EXPR, at least. */
3172 if (kind == '<' || code == RSHIFT_EXPR)
3174 /* Signedness matters here. Perhaps we can refine this
3176 STRIP_TYPE_NOPS (op);
3180 /* Strip any conversions that don't change the mode. */
3184 if (TREE_CODE (op) == COMPLEX_CST)
3185 subop = TREE_REALPART (op);
3189 if (TREE_CODE (subop) != INTEGER_CST
3190 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3191 && TREE_CODE (subop) != REAL_CST
3192 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3194 /* Note that TREE_CONSTANT isn't enough:
3195 static var addresses are constant but we can't
3196 do arithmetic on them. */
3206 /* If this is a commutative operation, and ARG0 is a constant, move it
3207 to ARG1 to reduce the number of tests below. */
3208 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3209 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3210 || code == BIT_AND_EXPR)
3211 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3213 tem = arg0; arg0 = arg1; arg1 = tem;
3215 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3216 TREE_OPERAND (t, 1) = tem;
3219 /* Now WINS is set as described above,
3220 ARG0 is the first operand of EXPR,
3221 and ARG1 is the second operand (if it has more than one operand).
3223 First check for cases where an arithmetic operation is applied to a
3224 compound, conditional, or comparison operation. Push the arithmetic
3225 operation inside the compound or conditional to see if any folding
3226 can then be done. Convert comparison to conditional for this purpose.
3227 The also optimizes non-constant cases that used to be done in
3230 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3231 one of the operands is a comparison and the other is a comparison, a
3232 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3233 code below would make the expression more complex. Change it to a
3234 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3235 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3237 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3238 || code == EQ_EXPR || code == NE_EXPR)
3239 && ((truth_value_p (TREE_CODE (arg0))
3240 && (truth_value_p (TREE_CODE (arg1))
3241 || (TREE_CODE (arg1) == BIT_AND_EXPR
3242 && integer_onep (TREE_OPERAND (arg1, 1)))))
3243 || (truth_value_p (TREE_CODE (arg1))
3244 && (truth_value_p (TREE_CODE (arg0))
3245 || (TREE_CODE (arg0) == BIT_AND_EXPR
3246 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3248 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3249 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3253 if (code == EQ_EXPR)
3254 t = invert_truthvalue (t);
3259 if (TREE_CODE_CLASS (code) == '1')
3261 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3262 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3263 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3264 else if (TREE_CODE (arg0) == COND_EXPR)
3266 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3267 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3268 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3270 /* If this was a conversion, and all we did was to move into
3271 inside the COND_EXPR, bring it back out. Then return so we
3272 don't get into an infinite recursion loop taking the conversion
3273 out and then back in. */
3275 if ((code == NOP_EXPR || code == CONVERT_EXPR
3276 || code == NON_LVALUE_EXPR)
3277 && TREE_CODE (t) == COND_EXPR
3278 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3279 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3280 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3281 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0))))
3282 t = build1 (code, type,
3284 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3285 TREE_OPERAND (t, 0),
3286 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3287 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3290 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3291 return fold (build (COND_EXPR, type, arg0,
3292 fold (build1 (code, type, integer_one_node)),
3293 fold (build1 (code, type, integer_zero_node))));
3295 else if (TREE_CODE_CLASS (code) == '2'
3296 || TREE_CODE_CLASS (code) == '<')
3298 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3299 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3300 fold (build (code, type,
3301 arg0, TREE_OPERAND (arg1, 1))));
3302 else if (TREE_CODE (arg1) == COND_EXPR
3303 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3305 tree test, true_value, false_value;
3307 if (TREE_CODE (arg1) == COND_EXPR)
3309 test = TREE_OPERAND (arg1, 0);
3310 true_value = TREE_OPERAND (arg1, 1);
3311 false_value = TREE_OPERAND (arg1, 2);
3316 true_value = integer_one_node;
3317 false_value = integer_zero_node;
3320 /* If ARG0 is complex we want to make sure we only evaluate
3321 it once. Though this is only required if it is volatile, it
3322 might be more efficient even if it is not. However, if we
3323 succeed in folding one part to a constant, we do not need
3324 to make this SAVE_EXPR. Since we do this optimization
3325 primarily to see if we do end up with constant and this
3326 SAVE_EXPR interfers with later optimizations, suppressing
3327 it when we can is important. */
3329 if ((TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
3330 || TREE_SIDE_EFFECTS (arg0))
3332 tree lhs = fold (build (code, type, arg0, true_value));
3333 tree rhs = fold (build (code, type, arg0, false_value));
3335 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3336 return fold (build (COND_EXPR, type, test, lhs, rhs));
3338 arg0 = save_expr (arg0);
3341 test = fold (build (COND_EXPR, type, test,
3342 fold (build (code, type, arg0, true_value)),
3343 fold (build (code, type, arg0, false_value))));
3344 if (TREE_CODE (arg0) == SAVE_EXPR)
3345 return build (COMPOUND_EXPR, type,
3346 convert (void_type_node, arg0), test);
3348 return convert (type, test);
3351 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3352 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3353 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3354 else if (TREE_CODE (arg0) == COND_EXPR
3355 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3357 tree test, true_value, false_value;
3359 if (TREE_CODE (arg0) == COND_EXPR)
3361 test = TREE_OPERAND (arg0, 0);
3362 true_value = TREE_OPERAND (arg0, 1);
3363 false_value = TREE_OPERAND (arg0, 2);
3368 true_value = integer_one_node;
3369 false_value = integer_zero_node;
3372 if ((TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
3373 || TREE_SIDE_EFFECTS (arg1))
3375 tree lhs = fold (build (code, type, true_value, arg1));
3376 tree rhs = fold (build (code, type, false_value, arg1));
3378 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3379 return fold (build (COND_EXPR, type, test, lhs, rhs));
3381 arg1 = save_expr (arg1);
3384 test = fold (build (COND_EXPR, type, test,
3385 fold (build (code, type, true_value, arg1)),
3386 fold (build (code, type, false_value, arg1))));
3387 if (TREE_CODE (arg1) == SAVE_EXPR)
3388 return build (COMPOUND_EXPR, type,
3389 convert (void_type_node, arg1), test);
3391 return convert (type, test);
3394 else if (TREE_CODE_CLASS (code) == '<'
3395 && TREE_CODE (arg0) == COMPOUND_EXPR)
3396 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3397 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3398 else if (TREE_CODE_CLASS (code) == '<'
3399 && TREE_CODE (arg1) == COMPOUND_EXPR)
3400 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3401 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3413 return fold (DECL_INITIAL (t));
3418 case FIX_TRUNC_EXPR:
3419 /* Other kinds of FIX are not handled properly by fold_convert. */
3421 /* In addition to the cases of two conversions in a row
3422 handled below, if we are converting something to its own
3423 type via an object of identical or wider precision, neither
3424 conversion is needed. */
3425 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3426 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3427 && TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == TREE_TYPE (t)
3428 && ((INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3429 && INTEGRAL_TYPE_P (TREE_TYPE (t)))
3430 || (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0)))
3431 && FLOAT_TYPE_P (TREE_TYPE (t))))
3432 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3433 >= TYPE_PRECISION (TREE_TYPE (t))))
3434 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3436 /* Two conversions in a row are not needed unless:
3437 - the intermediate type is narrower than both initial and final, or
3438 - the intermediate type and innermost type differ in signedness,
3439 and the outermost type is wider than the intermediate, or
3440 - the initial type is a pointer type and the precisions of the
3441 intermediate and final types differ, or
3442 - the final type is a pointer type and the precisions of the
3443 initial and intermediate types differ. */
3444 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3445 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3446 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3447 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3449 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3450 > TYPE_PRECISION (TREE_TYPE (t)))
3451 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3453 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 0)))
3455 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3456 != TREE_UNSIGNED (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3457 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3458 < TYPE_PRECISION (TREE_TYPE (t))))
3459 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
3460 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3461 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
3463 (TREE_UNSIGNED (TREE_TYPE (t))
3464 && (TYPE_PRECISION (TREE_TYPE (t))
3465 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3466 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3468 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
3469 != TYPE_PRECISION (TREE_TYPE (t))))
3470 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
3471 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
3472 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
3473 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3475 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3476 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3477 /* Detect assigning a bitfield. */
3478 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3479 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3481 /* Don't leave an assignment inside a conversion
3482 unless assigning a bitfield. */
3483 tree prev = TREE_OPERAND (t, 0);
3484 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3485 /* First do the assignment, then return converted constant. */
3486 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3492 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3495 return fold_convert (t, arg0);
3497 #if 0 /* This loses on &"foo"[0]. */
3502 /* Fold an expression like: "foo"[2] */
3503 if (TREE_CODE (arg0) == STRING_CST
3504 && TREE_CODE (arg1) == INTEGER_CST
3505 && !TREE_INT_CST_HIGH (arg1)
3506 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3508 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3509 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3510 force_fit_type (t, 0);
3517 TREE_CONSTANT (t) = wins;
3523 if (TREE_CODE (arg0) == INTEGER_CST)
3525 HOST_WIDE_INT low, high;
3526 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3527 TREE_INT_CST_HIGH (arg0),
3529 t = build_int_2 (low, high);
3530 TREE_TYPE (t) = type;
3532 = (TREE_OVERFLOW (arg0)
3533 | force_fit_type (t, overflow));
3534 TREE_CONSTANT_OVERFLOW (t)
3535 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3537 else if (TREE_CODE (arg0) == REAL_CST)
3538 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3539 TREE_TYPE (t) = type;
3541 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3542 return TREE_OPERAND (arg0, 0);
3544 /* Convert - (a - b) to (b - a) for non-floating-point. */
3545 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3546 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3547 TREE_OPERAND (arg0, 0));
3554 if (TREE_CODE (arg0) == INTEGER_CST)
3556 if (! TREE_UNSIGNED (type)
3557 && TREE_INT_CST_HIGH (arg0) < 0)
3559 HOST_WIDE_INT low, high;
3560 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3561 TREE_INT_CST_HIGH (arg0),
3563 t = build_int_2 (low, high);
3564 TREE_TYPE (t) = type;
3566 = (TREE_OVERFLOW (arg0)
3567 | force_fit_type (t, overflow));
3568 TREE_CONSTANT_OVERFLOW (t)
3569 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3572 else if (TREE_CODE (arg0) == REAL_CST)
3574 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3575 t = build_real (type,
3576 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3578 TREE_TYPE (t) = type;
3580 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3581 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3585 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3587 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3588 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3589 TREE_OPERAND (arg0, 0),
3590 fold (build1 (NEGATE_EXPR,
3591 TREE_TYPE (TREE_TYPE (arg0)),
3592 TREE_OPERAND (arg0, 1))));
3593 else if (TREE_CODE (arg0) == COMPLEX_CST)
3594 return build_complex (TREE_OPERAND (arg0, 0),
3595 fold (build1 (NEGATE_EXPR,
3596 TREE_TYPE (TREE_TYPE (arg0)),
3597 TREE_OPERAND (arg0, 1))));
3598 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3599 return fold (build (TREE_CODE (arg0), type,
3600 fold (build1 (CONJ_EXPR, type,
3601 TREE_OPERAND (arg0, 0))),
3602 fold (build1 (CONJ_EXPR,
3603 type, TREE_OPERAND (arg0, 1)))));
3604 else if (TREE_CODE (arg0) == CONJ_EXPR)
3605 return TREE_OPERAND (arg0, 0);
3611 if (TREE_CODE (arg0) == INTEGER_CST)
3612 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3613 ~ TREE_INT_CST_HIGH (arg0));
3614 TREE_TYPE (t) = type;
3615 force_fit_type (t, 0);
3616 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3617 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3619 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3620 return TREE_OPERAND (arg0, 0);
3624 /* A + (-B) -> A - B */
3625 if (TREE_CODE (arg1) == NEGATE_EXPR)
3626 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3627 else if (! FLOAT_TYPE_P (type))
3629 if (integer_zerop (arg1))
3630 return non_lvalue (convert (type, arg0));
3632 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3633 with a constant, and the two constants have no bits in common,
3634 we should treat this as a BIT_IOR_EXPR since this may produce more
3636 if (TREE_CODE (arg0) == BIT_AND_EXPR
3637 && TREE_CODE (arg1) == BIT_AND_EXPR
3638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3639 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3640 && integer_zerop (const_binop (BIT_AND_EXPR,
3641 TREE_OPERAND (arg0, 1),
3642 TREE_OPERAND (arg1, 1), 0)))
3644 code = BIT_IOR_EXPR;
3648 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3649 about the case where C is a constant, just try one of the
3650 four possibilities. */
3652 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3653 && operand_equal_p (TREE_OPERAND (arg0, 1),
3654 TREE_OPERAND (arg1, 1), 0))
3655 return fold (build (MULT_EXPR, type,
3656 fold (build (PLUS_EXPR, type,
3657 TREE_OPERAND (arg0, 0),
3658 TREE_OPERAND (arg1, 0))),
3659 TREE_OPERAND (arg0, 1)));
3661 /* In IEEE floating point, x+0 may not equal x. */
3662 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3664 && real_zerop (arg1))
3665 return non_lvalue (convert (type, arg0));
3667 /* In most languages, can't associate operations on floats
3668 through parentheses. Rather than remember where the parentheses
3669 were, we don't associate floats at all. It shouldn't matter much. */
3670 if (FLOAT_TYPE_P (type))
3672 /* The varsign == -1 cases happen only for addition and subtraction.
3673 It says that the arg that was split was really CON minus VAR.
3674 The rest of the code applies to all associative operations. */
3680 if (split_tree (arg0, code, &var, &con, &varsign))
3684 /* EXPR is (CON-VAR) +- ARG1. */
3685 /* If it is + and VAR==ARG1, return just CONST. */
3686 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3687 return convert (TREE_TYPE (t), con);
3689 /* If ARG0 is a constant, don't change things around;
3690 instead keep all the constant computations together. */
3692 if (TREE_CONSTANT (arg0))
3695 /* Otherwise return (CON +- ARG1) - VAR. */
3696 TREE_SET_CODE (t, MINUS_EXPR);
3697 TREE_OPERAND (t, 1) = var;
3699 = fold (build (code, TREE_TYPE (t), con, arg1));
3703 /* EXPR is (VAR+CON) +- ARG1. */
3704 /* If it is - and VAR==ARG1, return just CONST. */
3705 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3706 return convert (TREE_TYPE (t), con);
3708 /* If ARG0 is a constant, don't change things around;
3709 instead keep all the constant computations together. */
3711 if (TREE_CONSTANT (arg0))
3714 /* Otherwise return VAR +- (ARG1 +- CON). */
3715 TREE_OPERAND (t, 1) = tem
3716 = fold (build (code, TREE_TYPE (t), arg1, con));
3717 TREE_OPERAND (t, 0) = var;
3718 if (integer_zerop (tem)
3719 && (code == PLUS_EXPR || code == MINUS_EXPR))
3720 return convert (type, var);
3721 /* If we have x +/- (c - d) [c an explicit integer]
3722 change it to x -/+ (d - c) since if d is relocatable
3723 then the latter can be a single immediate insn
3724 and the former cannot. */
3725 if (TREE_CODE (tem) == MINUS_EXPR
3726 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3728 tree tem1 = TREE_OPERAND (tem, 1);
3729 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3730 TREE_OPERAND (tem, 0) = tem1;
3732 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3738 if (split_tree (arg1, code, &var, &con, &varsign))
3740 if (TREE_CONSTANT (arg1))
3745 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3747 /* EXPR is ARG0 +- (CON +- VAR). */
3748 if (TREE_CODE (t) == MINUS_EXPR
3749 && operand_equal_p (var, arg0, 0))
3751 /* If VAR and ARG0 cancel, return just CON or -CON. */
3752 if (code == PLUS_EXPR)
3753 return convert (TREE_TYPE (t), con);
3754 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3755 convert (TREE_TYPE (t), con)));
3759 = fold (build (code, TREE_TYPE (t), arg0, con));
3760 TREE_OPERAND (t, 1) = var;
3761 if (integer_zerop (TREE_OPERAND (t, 0))
3762 && TREE_CODE (t) == PLUS_EXPR)
3763 return convert (TREE_TYPE (t), var);
3768 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3769 if (TREE_CODE (arg1) == REAL_CST)
3771 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3773 t1 = const_binop (code, arg0, arg1, 0);
3774 if (t1 != NULL_TREE)
3776 /* The return value should always have
3777 the same type as the original expression. */
3778 TREE_TYPE (t1) = TREE_TYPE (t);
3784 if (! FLOAT_TYPE_P (type))
3786 if (! wins && integer_zerop (arg0))
3787 return build1 (NEGATE_EXPR, type, arg1);
3788 if (integer_zerop (arg1))
3789 return non_lvalue (convert (type, arg0));
3791 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3792 about the case where C is a constant, just try one of the
3793 four possibilities. */
3795 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3796 && operand_equal_p (TREE_OPERAND (arg0, 1),
3797 TREE_OPERAND (arg1, 1), 0))
3798 return fold (build (MULT_EXPR, type,
3799 fold (build (MINUS_EXPR, type,
3800 TREE_OPERAND (arg0, 0),
3801 TREE_OPERAND (arg1, 0))),
3802 TREE_OPERAND (arg0, 1)));
3804 /* Convert A - (-B) to A + B. */
3805 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3806 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3808 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3811 /* Except with IEEE floating point, 0-x equals -x. */
3812 if (! wins && real_zerop (arg0))
3813 return build1 (NEGATE_EXPR, type, arg1);
3814 /* Except with IEEE floating point, x-0 equals x. */
3815 if (real_zerop (arg1))
3816 return non_lvalue (convert (type, arg0));
3819 /* Fold &x - &x. This can happen from &x.foo - &x.
3820 This is unsafe for certain floats even in non-IEEE formats.
3821 In IEEE, it is unsafe because it does wrong for NaNs.
3822 Also note that operand_equal_p is always false if an operand
3825 if (operand_equal_p (arg0, arg1,
3826 FLOAT_TYPE_P (type) && ! flag_fast_math))
3827 return convert (type, integer_zero_node);
3832 if (! FLOAT_TYPE_P (type))
3834 if (integer_zerop (arg1))
3835 return omit_one_operand (type, arg1, arg0);
3836 if (integer_onep (arg1))
3837 return non_lvalue (convert (type, arg0));
3839 /* ((A / C) * C) is A if the division is an
3840 EXACT_DIV_EXPR. Since C is normally a constant,
3841 just check for one of the four possibilities. */
3843 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3844 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3845 return TREE_OPERAND (arg0, 0);
3847 /* (a * (1 << b)) is (a << b) */
3848 if (TREE_CODE (arg1) == LSHIFT_EXPR
3849 && integer_onep (TREE_OPERAND (arg1, 0)))
3850 return fold (build (LSHIFT_EXPR, type, arg0,
3851 TREE_OPERAND (arg1, 1)));
3852 if (TREE_CODE (arg0) == LSHIFT_EXPR
3853 && integer_onep (TREE_OPERAND (arg0, 0)))
3854 return fold (build (LSHIFT_EXPR, type, arg1,
3855 TREE_OPERAND (arg0, 1)));
3859 /* x*0 is 0, except for IEEE floating point. */
3860 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3862 && real_zerop (arg1))
3863 return omit_one_operand (type, arg1, arg0);
3864 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3865 However, ANSI says we can drop signals,
3866 so we can do this anyway. */
3867 if (real_onep (arg1))
3868 return non_lvalue (convert (type, arg0));
3870 if (! wins && real_twop (arg1))
3872 tree arg = save_expr (arg0);
3873 return build (PLUS_EXPR, type, arg, arg);
3880 if (integer_all_onesp (arg1))
3881 return omit_one_operand (type, arg1, arg0);
3882 if (integer_zerop (arg1))
3883 return non_lvalue (convert (type, arg0));
3884 t1 = distribute_bit_expr (code, type, arg0, arg1);
3885 if (t1 != NULL_TREE)
3888 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3889 is a rotate of A by C1 bits. */
3891 if ((TREE_CODE (arg0) == RSHIFT_EXPR
3892 || TREE_CODE (arg0) == LSHIFT_EXPR)
3893 && (TREE_CODE (arg1) == RSHIFT_EXPR
3894 || TREE_CODE (arg1) == LSHIFT_EXPR)
3895 && TREE_CODE (arg0) != TREE_CODE (arg1)
3896 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3897 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3898 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3899 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3900 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
3901 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
3902 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
3903 + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
3904 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3905 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3906 TREE_CODE (arg0) == LSHIFT_EXPR
3907 ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
3912 if (integer_zerop (arg1))
3913 return non_lvalue (convert (type, arg0));
3914 if (integer_all_onesp (arg1))
3915 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3920 if (integer_all_onesp (arg1))
3921 return non_lvalue (convert (type, arg0));
3922 if (integer_zerop (arg1))
3923 return omit_one_operand (type, arg1, arg0);
3924 t1 = distribute_bit_expr (code, type, arg0, arg1);
3925 if (t1 != NULL_TREE)
3927 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3928 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3929 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3931 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3932 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3933 && (~TREE_INT_CST_LOW (arg0)
3934 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3935 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3937 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3938 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3940 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3941 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3942 && (~TREE_INT_CST_LOW (arg1)
3943 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3944 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3948 case BIT_ANDTC_EXPR:
3949 if (integer_all_onesp (arg0))
3950 return non_lvalue (convert (type, arg1));
3951 if (integer_zerop (arg0))
3952 return omit_one_operand (type, arg0, arg1);
3953 if (TREE_CODE (arg1) == INTEGER_CST)
3955 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3956 code = BIT_AND_EXPR;
3961 case TRUNC_DIV_EXPR:
3962 case ROUND_DIV_EXPR:
3963 case FLOOR_DIV_EXPR:
3965 case EXACT_DIV_EXPR:
3967 if (integer_onep (arg1))
3968 return non_lvalue (convert (type, arg0));
3969 if (integer_zerop (arg1))
3972 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
3973 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
3974 expressions, which often appear in the offsets or sizes of
3975 objects with a varying size. Only deal with positive divisors
3976 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
3978 Look for NOPs and SAVE_EXPRs inside. */
3980 if (TREE_CODE (arg1) == INTEGER_CST
3981 && tree_int_cst_sgn (arg1) >= 0)
3983 int have_save_expr = 0;
3984 tree c2 = integer_zero_node;
3987 if (TREE_CODE (xarg0) == SAVE_EXPR)
3988 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
3992 if (TREE_CODE (xarg0) == PLUS_EXPR
3993 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
3994 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
3995 else if (TREE_CODE (xarg0) == MINUS_EXPR
3996 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
3997 /* If we are doing this computation unsigned, the negate
3999 && ! TREE_UNSIGNED (type))
4001 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4002 xarg0 = TREE_OPERAND (xarg0, 0);
4005 if (TREE_CODE (xarg0) == SAVE_EXPR)
4006 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4010 if (TREE_CODE (xarg0) == MULT_EXPR
4011 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4012 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4013 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4014 TREE_OPERAND (xarg0, 1), arg1, 1))
4015 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4016 TREE_OPERAND (xarg0, 1), 1)))
4017 && (tree_int_cst_sgn (c2) >= 0
4018 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4021 tree outer_div = integer_one_node;
4022 tree c1 = TREE_OPERAND (xarg0, 1);
4025 /* If C3 > C1, set them equal and do a divide by
4026 C3/C1 at the end of the operation. */
4027 if (tree_int_cst_lt (c1, c3))
4028 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4030 /* The result is A * (C1/C3) + (C2/C3). */
4031 t = fold (build (PLUS_EXPR, type,
4032 fold (build (MULT_EXPR, type,
4033 TREE_OPERAND (xarg0, 0),
4034 const_binop (code, c1, c3, 1))),
4035 const_binop (code, c2, c3, 1)));
4037 if (! integer_onep (outer_div))
4038 t = fold (build (code, type, t, convert (type, outer_div)));
4047 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4048 #ifndef REAL_INFINITY
4049 if (TREE_CODE (arg1) == REAL_CST
4050 && real_zerop (arg1))
4053 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4058 case FLOOR_MOD_EXPR:
4059 case ROUND_MOD_EXPR:
4060 case TRUNC_MOD_EXPR:
4061 if (integer_onep (arg1))
4062 return omit_one_operand (type, integer_zero_node, arg0);
4063 if (integer_zerop (arg1))
4066 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4067 where C1 % C3 == 0. Handle similarly to the division case,
4068 but don't bother with SAVE_EXPRs. */
4070 if (TREE_CODE (arg1) == INTEGER_CST
4071 && ! integer_zerop (arg1))
4073 tree c2 = integer_zero_node;
4076 if (TREE_CODE (xarg0) == PLUS_EXPR
4077 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4078 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4079 else if (TREE_CODE (xarg0) == MINUS_EXPR
4080 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4081 && ! TREE_UNSIGNED (type))
4083 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4084 xarg0 = TREE_OPERAND (xarg0, 0);
4089 if (TREE_CODE (xarg0) == MULT_EXPR
4090 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4091 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4092 TREE_OPERAND (xarg0, 1),
4094 && tree_int_cst_sgn (c2) >= 0)
4095 /* The result is (C2%C3). */
4096 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4097 TREE_OPERAND (xarg0, 0));
4106 if (integer_zerop (arg1))
4107 return non_lvalue (convert (type, arg0));
4108 /* Since negative shift count is not well-defined,
4109 don't try to compute it in the compiler. */
4110 if (tree_int_cst_sgn (arg1) < 0)
4115 if (operand_equal_p (arg0, arg1, 0))
4117 if (INTEGRAL_TYPE_P (type)
4118 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4119 return omit_one_operand (type, arg1, arg0);
4123 if (operand_equal_p (arg0, arg1, 0))
4125 if (INTEGRAL_TYPE_P (type)
4126 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4127 return omit_one_operand (type, arg1, arg0);
4130 case TRUTH_NOT_EXPR:
4131 /* Note that the operand of this must be an int
4132 and its values must be 0 or 1.
4133 ("true" is a fixed value perhaps depending on the language,
4134 but we don't handle values other than 1 correctly yet.) */
4135 return invert_truthvalue (arg0);
4137 case TRUTH_ANDIF_EXPR:
4138 /* Note that the operands of this must be ints
4139 and their values must be 0 or 1.
4140 ("true" is a fixed value perhaps depending on the language.) */
4141 /* If first arg is constant zero, return it. */
4142 if (integer_zerop (arg0))
4144 case TRUTH_AND_EXPR:
4145 /* If either arg is constant true, drop it. */
4146 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4147 return non_lvalue (arg1);
4148 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4149 return non_lvalue (arg0);
4150 /* If second arg is constant zero, result is zero, but first arg
4151 must be evaluated. */
4152 if (integer_zerop (arg1))
4153 return omit_one_operand (type, arg1, arg0);
4156 /* We only do these simplifications if we are optimizing. */
4160 /* Check for things like (A || B) && (A || C). We can convert this
4161 to A || (B && C). Note that either operator can be any of the four
4162 truth and/or operations and the transformation will still be
4163 valid. Also note that we only care about order for the
4164 ANDIF and ORIF operators. */
4165 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4166 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4167 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4168 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4169 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4171 tree a00 = TREE_OPERAND (arg0, 0);
4172 tree a01 = TREE_OPERAND (arg0, 1);
4173 tree a10 = TREE_OPERAND (arg1, 0);
4174 tree a11 = TREE_OPERAND (arg1, 1);
4175 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4176 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4177 && (code == TRUTH_AND_EXPR
4178 || code == TRUTH_OR_EXPR));
4180 if (operand_equal_p (a00, a10, 0))
4181 return fold (build (TREE_CODE (arg0), type, a00,
4182 fold (build (code, type, a01, a11))));
4183 else if (commutative && operand_equal_p (a00, a11, 0))
4184 return fold (build (TREE_CODE (arg0), type, a00,
4185 fold (build (code, type, a01, a10))));
4186 else if (commutative && operand_equal_p (a01, a10, 0))
4187 return fold (build (TREE_CODE (arg0), type, a01,
4188 fold (build (code, type, a00, a11))));
4190 /* This case if tricky because we must either have commutative
4191 operators or else A10 must not have side-effects. */
4193 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4194 && operand_equal_p (a01, a11, 0))
4195 return fold (build (TREE_CODE (arg0), type,
4196 fold (build (code, type, a00, a10)),
4200 /* Check for the possibility of merging component references. If our
4201 lhs is another similar operation, try to merge its rhs with our
4202 rhs. Then try to merge our lhs and rhs. */
4203 if (TREE_CODE (arg0) == code
4204 && 0 != (tem = fold_truthop (code, type,
4205 TREE_OPERAND (arg0, 1), arg1)))
4206 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4208 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4213 case TRUTH_ORIF_EXPR:
4214 /* Note that the operands of this must be ints
4215 and their values must be 0 or true.
4216 ("true" is a fixed value perhaps depending on the language.) */
4217 /* If first arg is constant true, return it. */
4218 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4221 /* If either arg is constant zero, drop it. */
4222 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4223 return non_lvalue (arg1);
4224 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4225 return non_lvalue (arg0);
4226 /* If second arg is constant true, result is true, but we must
4227 evaluate first arg. */
4228 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4229 return omit_one_operand (type, arg1, arg0);
4232 case TRUTH_XOR_EXPR:
4233 /* If either arg is constant zero, drop it. */
4234 if (integer_zerop (arg0))
4235 return non_lvalue (arg1);
4236 if (integer_zerop (arg1))
4237 return non_lvalue (arg0);
4238 /* If either arg is constant true, this is a logical inversion. */
4239 if (integer_onep (arg0))
4240 return non_lvalue (invert_truthvalue (arg1));
4241 if (integer_onep (arg1))
4242 return non_lvalue (invert_truthvalue (arg0));
4251 /* If one arg is a constant integer, put it last. */
4252 if (TREE_CODE (arg0) == INTEGER_CST
4253 && TREE_CODE (arg1) != INTEGER_CST)
4255 TREE_OPERAND (t, 0) = arg1;
4256 TREE_OPERAND (t, 1) = arg0;
4257 arg0 = TREE_OPERAND (t, 0);
4258 arg1 = TREE_OPERAND (t, 1);
4259 code = swap_tree_comparison (code);
4260 TREE_SET_CODE (t, code);
4263 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4264 First, see if one arg is constant; find the constant arg
4265 and the other one. */
4267 tree constop = 0, varop;
4270 if (TREE_CONSTANT (arg1))
4271 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
4272 if (TREE_CONSTANT (arg0))
4273 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
4275 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4277 /* This optimization is invalid for ordered comparisons
4278 if CONST+INCR overflows or if foo+incr might overflow.
4279 This optimization is invalid for floating point due to rounding.
4280 For pointer types we assume overflow doesn't happen. */
4281 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4282 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4283 && (code == EQ_EXPR || code == NE_EXPR)))
4286 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4287 constop, TREE_OPERAND (varop, 1)));
4288 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4289 *constoploc = newconst;
4293 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4295 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4296 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4297 && (code == EQ_EXPR || code == NE_EXPR)))
4300 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4301 constop, TREE_OPERAND (varop, 1)));
4302 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4303 *constoploc = newconst;
4309 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4310 if (TREE_CODE (arg1) == INTEGER_CST
4311 && TREE_CODE (arg0) != INTEGER_CST
4312 && tree_int_cst_sgn (arg1) > 0)
4314 switch (TREE_CODE (t))
4318 TREE_SET_CODE (t, code);
4319 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4320 TREE_OPERAND (t, 1) = arg1;
4325 TREE_SET_CODE (t, code);
4326 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4327 TREE_OPERAND (t, 1) = arg1;
4331 /* If this is an EQ or NE comparison with zero and ARG0 is
4332 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4333 two operations, but the latter can be done in one less insn
4334 one machine that have only two-operand insns or on which a
4335 constant cannot be the first operand. */
4336 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4337 && TREE_CODE (arg0) == BIT_AND_EXPR)
4339 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4340 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4342 fold (build (code, type,
4343 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4345 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4346 TREE_OPERAND (arg0, 1),
4347 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4348 convert (TREE_TYPE (arg0),
4351 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4352 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4354 fold (build (code, type,
4355 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4357 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4358 TREE_OPERAND (arg0, 0),
4359 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4360 convert (TREE_TYPE (arg0),
4365 /* If this is an NE or EQ comparison of zero against the result of a
4366 signed MOD operation whose second operand is a power of 2, make
4367 the MOD operation unsigned since it is simpler and equivalent. */
4368 if ((code == NE_EXPR || code == EQ_EXPR)
4369 && integer_zerop (arg1)
4370 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4371 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4372 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4373 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4374 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4375 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4377 tree newtype = unsigned_type (TREE_TYPE (arg0));
4378 tree newmod = build (TREE_CODE (arg0), newtype,
4379 convert (newtype, TREE_OPERAND (arg0, 0)),
4380 convert (newtype, TREE_OPERAND (arg0, 1)));
4382 return build (code, type, newmod, convert (newtype, arg1));
4385 /* If this is an NE comparison of zero with an AND of one, remove the
4386 comparison since the AND will give the correct value. */
4387 if (code == NE_EXPR && integer_zerop (arg1)
4388 && TREE_CODE (arg0) == BIT_AND_EXPR
4389 && integer_onep (TREE_OPERAND (arg0, 1)))
4390 return convert (type, arg0);
4392 /* If we have (A & C) == C where C is a power of 2, convert this into
4393 (A & C) != 0. Similarly for NE_EXPR. */
4394 if ((code == EQ_EXPR || code == NE_EXPR)
4395 && TREE_CODE (arg0) == BIT_AND_EXPR
4396 && integer_pow2p (TREE_OPERAND (arg0, 1))
4397 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4398 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4399 arg0, integer_zero_node);
4401 /* Simplify comparison of something with itself. (For IEEE
4402 floating-point, we can only do some of these simplifications.) */
4403 if (operand_equal_p (arg0, arg1, 0))
4410 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4412 t = build_int_2 (1, 0);
4413 TREE_TYPE (t) = type;
4417 TREE_SET_CODE (t, code);
4421 /* For NE, we can only do this simplification if integer. */
4422 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4424 /* ... fall through ... */
4427 t = build_int_2 (0, 0);
4428 TREE_TYPE (t) = type;
4433 /* An unsigned comparison against 0 can be simplified. */
4434 if (integer_zerop (arg1)
4435 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4436 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4437 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4439 switch (TREE_CODE (t))
4443 TREE_SET_CODE (t, NE_EXPR);
4447 TREE_SET_CODE (t, EQ_EXPR);
4450 return omit_one_operand (type,
4451 convert (type, integer_one_node),
4454 return omit_one_operand (type,
4455 convert (type, integer_zero_node),
4460 /* If we are comparing an expression that just has comparisons
4461 of two integer values, arithmetic expressions of those comparisons,
4462 and constants, we can simplify it. There are only three cases
4463 to check: the two values can either be equal, the first can be
4464 greater, or the second can be greater. Fold the expression for
4465 those three values. Since each value must be 0 or 1, we have
4466 eight possibilities, each of which corresponds to the constant 0
4467 or 1 or one of the six possible comparisons.
4469 This handles common cases like (a > b) == 0 but also handles
4470 expressions like ((x > y) - (y > x)) > 0, which supposedly
4471 occur in macroized code. */
4473 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4475 tree cval1 = 0, cval2 = 0;
4478 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4479 /* Don't handle degenerate cases here; they should already
4480 have been handled anyway. */
4481 && cval1 != 0 && cval2 != 0
4482 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4483 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4484 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4485 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4486 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4488 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4489 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4491 /* We can't just pass T to eval_subst in case cval1 or cval2
4492 was the same as ARG1. */
4495 = fold (build (code, type,
4496 eval_subst (arg0, cval1, maxval, cval2, minval),
4499 = fold (build (code, type,
4500 eval_subst (arg0, cval1, maxval, cval2, maxval),
4503 = fold (build (code, type,
4504 eval_subst (arg0, cval1, minval, cval2, maxval),
4507 /* All three of these results should be 0 or 1. Confirm they
4508 are. Then use those values to select the proper code
4511 if ((integer_zerop (high_result)
4512 || integer_onep (high_result))
4513 && (integer_zerop (equal_result)
4514 || integer_onep (equal_result))
4515 && (integer_zerop (low_result)
4516 || integer_onep (low_result)))
4518 /* Make a 3-bit mask with the high-order bit being the
4519 value for `>', the next for '=', and the low for '<'. */
4520 switch ((integer_onep (high_result) * 4)
4521 + (integer_onep (equal_result) * 2)
4522 + integer_onep (low_result))
4526 return omit_one_operand (type, integer_zero_node, arg0);
4547 return omit_one_operand (type, integer_one_node, arg0);
4550 t = build (code, type, cval1, cval2);
4552 return save_expr (t);
4559 /* If this is a comparison of a field, we may be able to simplify it. */
4560 if ((TREE_CODE (arg0) == COMPONENT_REF
4561 || TREE_CODE (arg0) == BIT_FIELD_REF)
4562 && (code == EQ_EXPR || code == NE_EXPR)
4563 /* Handle the constant case even without -O
4564 to make sure the warnings are given. */
4565 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4567 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4571 /* If this is a comparison of complex values and either or both
4572 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4573 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4574 may prevent needless evaluations. */
4575 if ((code == EQ_EXPR || code == NE_EXPR)
4576 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4577 && (TREE_CODE (arg0) == COMPLEX_EXPR
4578 || TREE_CODE (arg1) == COMPLEX_EXPR))
4580 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4581 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4582 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4583 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4584 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4586 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4589 fold (build (code, type, real0, real1)),
4590 fold (build (code, type, imag0, imag1))));
4593 /* From here on, the only cases we handle are when the result is
4594 known to be a constant.
4596 To compute GT, swap the arguments and do LT.
4597 To compute GE, do LT and invert the result.
4598 To compute LE, swap the arguments, do LT and invert the result.
4599 To compute NE, do EQ and invert the result.
4601 Therefore, the code below must handle only EQ and LT. */
4603 if (code == LE_EXPR || code == GT_EXPR)
4605 tem = arg0, arg0 = arg1, arg1 = tem;
4606 code = swap_tree_comparison (code);
4609 /* Note that it is safe to invert for real values here because we
4610 will check below in the one case that it matters. */
4613 if (code == NE_EXPR || code == GE_EXPR)
4616 code = invert_tree_comparison (code);
4619 /* Compute a result for LT or EQ if args permit;
4620 otherwise return T. */
4621 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4623 if (code == EQ_EXPR)
4624 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4625 == TREE_INT_CST_LOW (arg1))
4626 && (TREE_INT_CST_HIGH (arg0)
4627 == TREE_INT_CST_HIGH (arg1)),
4630 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4631 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4632 : INT_CST_LT (arg0, arg1)),
4636 /* Assume a nonexplicit constant cannot equal an explicit one,
4637 since such code would be undefined anyway.
4638 Exception: on sysvr4, using #pragma weak,
4639 a label can come out as 0. */
4640 else if (TREE_CODE (arg1) == INTEGER_CST
4641 && !integer_zerop (arg1)
4642 && TREE_CONSTANT (arg0)
4643 && TREE_CODE (arg0) == ADDR_EXPR
4645 t1 = build_int_2 (0, 0);
4647 /* Two real constants can be compared explicitly. */
4648 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4650 /* If either operand is a NaN, the result is false with two
4651 exceptions: First, an NE_EXPR is true on NaNs, but that case
4652 is already handled correctly since we will be inverting the
4653 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4654 or a GE_EXPR into a LT_EXPR, we must return true so that it
4655 will be inverted into false. */
4657 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4658 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4659 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4661 else if (code == EQ_EXPR)
4662 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4663 TREE_REAL_CST (arg1)),
4666 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4667 TREE_REAL_CST (arg1)),
4671 if (t1 == NULL_TREE)
4675 TREE_INT_CST_LOW (t1) ^= 1;
4677 TREE_TYPE (t1) = type;
4681 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4682 so all simple results must be passed through pedantic_non_lvalue. */
4683 if (TREE_CODE (arg0) == INTEGER_CST)
4684 return pedantic_non_lvalue
4685 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4686 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4687 return pedantic_non_lvalue (omit_one_operand (type, arg1, arg0));
4689 /* If the second operand is zero, invert the comparison and swap
4690 the second and third operands. Likewise if the second operand
4691 is constant and the third is not or if the third operand is
4692 equivalent to the first operand of the comparison. */
4694 if (integer_zerop (arg1)
4695 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4696 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4697 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4698 TREE_OPERAND (t, 2),
4699 TREE_OPERAND (arg0, 1))))
4701 /* See if this can be inverted. If it can't, possibly because
4702 it was a floating-point inequality comparison, don't do
4704 tem = invert_truthvalue (arg0);
4706 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4708 arg0 = TREE_OPERAND (t, 0) = tem;
4709 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
4710 TREE_OPERAND (t, 2) = arg1;
4711 arg1 = TREE_OPERAND (t, 1);
4715 /* If we have A op B ? A : C, we may be able to convert this to a
4716 simpler expression, depending on the operation and the values
4717 of B and C. IEEE floating point prevents this though,
4718 because A or B might be -0.0 or a NaN. */
4720 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4721 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4722 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4724 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4725 arg1, TREE_OPERAND (arg0, 1)))
4727 tree arg2 = TREE_OPERAND (t, 2);
4728 enum tree_code comp_code = TREE_CODE (arg0);
4730 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4731 depending on the comparison operation. */
4732 if (integer_zerop (TREE_OPERAND (arg0, 1))
4733 && TREE_CODE (arg2) == NEGATE_EXPR
4734 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4738 return pedantic_non_lvalue
4739 (fold (build1 (NEGATE_EXPR, type, arg1)));
4741 return pedantic_non_lvalue (convert (type, arg1));
4744 return pedantic_non_lvalue
4745 (fold (build1 (ABS_EXPR, type, arg1)));
4748 return pedantic_non_lvalue
4749 (fold (build1 (NEGATE_EXPR, type,
4750 fold (build1 (ABS_EXPR, type, arg1)))));
4753 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4756 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4758 if (comp_code == NE_EXPR)
4759 return pedantic_non_lvalue (convert (type, arg1));
4760 else if (comp_code == EQ_EXPR)
4761 return pedantic_non_lvalue (convert (type, integer_zero_node));
4764 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4765 or max (A, B), depending on the operation. */
4767 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4768 arg2, TREE_OPERAND (arg0, 0)))
4772 return pedantic_non_lvalue (convert (type, arg2));
4774 return pedantic_non_lvalue (convert (type, arg1));
4777 return pedantic_non_lvalue
4778 (fold (build (MIN_EXPR, type, arg1, arg2)));
4781 return pedantic_non_lvalue
4782 (fold (build (MAX_EXPR, type, arg1, arg2)));
4785 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4786 we might still be able to simplify this. For example,
4787 if C1 is one less or one more than C2, this might have started
4788 out as a MIN or MAX and been transformed by this function.
4789 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4791 if (INTEGRAL_TYPE_P (type)
4792 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4793 && TREE_CODE (arg2) == INTEGER_CST)
4797 /* We can replace A with C1 in this case. */
4798 arg1 = TREE_OPERAND (t, 1)
4799 = convert (type, TREE_OPERAND (arg0, 1));
4803 /* If C1 is C2 + 1, this is min(A, C2). */
4804 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4805 && operand_equal_p (TREE_OPERAND (arg0, 1),
4806 const_binop (PLUS_EXPR, arg2,
4807 integer_one_node, 0), 1))
4808 return pedantic_non_lvalue
4809 (fold (build (MIN_EXPR, type, arg1, arg2)));
4813 /* If C1 is C2 - 1, this is min(A, C2). */
4814 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4815 && operand_equal_p (TREE_OPERAND (arg0, 1),
4816 const_binop (MINUS_EXPR, arg2,
4817 integer_one_node, 0), 1))
4818 return pedantic_non_lvalue
4819 (fold (build (MIN_EXPR, type, arg1, arg2)));
4823 /* If C1 is C2 - 1, this is max(A, C2). */
4824 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4825 && operand_equal_p (TREE_OPERAND (arg0, 1),
4826 const_binop (MINUS_EXPR, arg2,
4827 integer_one_node, 0), 1))
4828 return pedantic_non_lvalue
4829 (fold (build (MAX_EXPR, type, arg1, arg2)));
4833 /* If C1 is C2 + 1, this is max(A, C2). */
4834 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4835 && operand_equal_p (TREE_OPERAND (arg0, 1),
4836 const_binop (PLUS_EXPR, arg2,
4837 integer_one_node, 0), 1))
4838 return pedantic_non_lvalue
4839 (fold (build (MAX_EXPR, type, arg1, arg2)));
4844 /* Convert A ? 1 : 0 to simply A. */
4845 if (integer_onep (TREE_OPERAND (t, 1))
4846 && integer_zerop (TREE_OPERAND (t, 2))
4847 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
4848 call to fold will try to move the conversion inside
4849 a COND, which will recurse. In that case, the COND_EXPR
4850 is probably the best choice, so leave it alone. */
4851 && type == TREE_TYPE (arg0))
4852 return pedantic_non_lvalue (arg0);
4855 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
4856 operation is simply A & 2. */
4858 if (integer_zerop (TREE_OPERAND (t, 2))
4859 && TREE_CODE (arg0) == NE_EXPR
4860 && integer_zerop (TREE_OPERAND (arg0, 1))
4861 && integer_pow2p (arg1)
4862 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
4863 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
4865 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
4870 /* When pedantic, a compound expression can be neither an lvalue
4871 nor an integer constant expression. */
4872 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
4874 /* Don't let (0, 0) be null pointer constant. */
4875 if (integer_zerop (arg1))
4876 return non_lvalue (arg1);
4881 return build_complex (arg0, arg1);
4885 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4887 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4888 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
4889 TREE_OPERAND (arg0, 1));
4890 else if (TREE_CODE (arg0) == COMPLEX_CST)
4891 return TREE_REALPART (arg0);
4892 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4893 return fold (build (TREE_CODE (arg0), type,
4894 fold (build1 (REALPART_EXPR, type,
4895 TREE_OPERAND (arg0, 0))),
4896 fold (build1 (REALPART_EXPR,
4897 type, TREE_OPERAND (arg0, 1)))));
4901 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4902 return convert (type, integer_zero_node);
4903 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4904 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
4905 TREE_OPERAND (arg0, 0));
4906 else if (TREE_CODE (arg0) == COMPLEX_CST)
4907 return TREE_IMAGPART (arg0);
4908 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4909 return fold (build (TREE_CODE (arg0), type,
4910 fold (build1 (IMAGPART_EXPR, type,
4911 TREE_OPERAND (arg0, 0))),
4912 fold (build1 (IMAGPART_EXPR, type,
4913 TREE_OPERAND (arg0, 1)))));
4918 } /* switch (code) */