1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
31 /* The entry points in this file are fold, size_int_wide, size_binop
34 fold takes a tree as argument and returns a simplified tree.
36 size_binop takes a tree code for an arithmetic operation
37 and two operands that are trees, and produces a tree for the
38 result, assuming the type comes from `sizetype'.
40 size_int takes an integer value, and creates a tree constant
41 with type from `sizetype'.
43 force_fit_type takes a constant and prior overflow indicator, and
44 forces the value to fit the type. It returns an overflow indicator. */
56 static void encode PARAMS ((HOST_WIDE_INT *,
57 unsigned HOST_WIDE_INT,
59 static void decode PARAMS ((HOST_WIDE_INT *,
60 unsigned HOST_WIDE_INT *,
62 static tree negate_expr PARAMS ((tree));
63 static tree split_tree PARAMS ((tree, enum tree_code, tree *, tree *,
65 static tree associate_trees PARAMS ((tree, tree, enum tree_code, tree));
66 static tree int_const_binop PARAMS ((enum tree_code, tree, tree, int, int));
67 static void const_binop_1 PARAMS ((PTR));
68 static tree const_binop PARAMS ((enum tree_code, tree, tree, int));
69 static void fold_convert_1 PARAMS ((PTR));
70 static tree fold_convert PARAMS ((tree, tree));
71 static enum tree_code invert_tree_comparison PARAMS ((enum tree_code));
72 static enum tree_code swap_tree_comparison PARAMS ((enum tree_code));
73 static int truth_value_p PARAMS ((enum tree_code));
74 static int operand_equal_for_comparison_p PARAMS ((tree, tree, tree));
75 static int twoval_comparison_p PARAMS ((tree, tree *, tree *, int *));
76 static tree eval_subst PARAMS ((tree, tree, tree, tree, tree));
77 static tree omit_one_operand PARAMS ((tree, tree, tree));
78 static tree pedantic_omit_one_operand PARAMS ((tree, tree, tree));
79 static tree distribute_bit_expr PARAMS ((enum tree_code, tree, tree, tree));
80 static tree make_bit_field_ref PARAMS ((tree, tree, int, int, int));
81 static tree optimize_bit_field_compare PARAMS ((enum tree_code, tree,
83 static tree decode_field_reference PARAMS ((tree, HOST_WIDE_INT *,
85 enum machine_mode *, int *,
86 int *, tree *, tree *));
87 static int all_ones_mask_p PARAMS ((tree, int));
88 static int simple_operand_p PARAMS ((tree));
89 static tree range_binop PARAMS ((enum tree_code, tree, tree, int,
91 static tree make_range PARAMS ((tree, int *, tree *, tree *));
92 static tree build_range_check PARAMS ((tree, tree, int, tree, tree));
93 static int merge_ranges PARAMS ((int *, tree *, tree *, int, tree, tree,
95 static tree fold_range_test PARAMS ((tree));
96 static tree unextend PARAMS ((tree, int, int, tree));
97 static tree fold_truthop PARAMS ((enum tree_code, tree, tree, tree));
98 static tree optimize_minmax_comparison PARAMS ((tree));
99 static tree extract_muldiv PARAMS ((tree, tree, enum tree_code, tree));
100 static tree strip_compound_expr PARAMS ((tree, tree));
101 static int multiple_of_p PARAMS ((tree, tree, tree));
102 static tree constant_boolean_node PARAMS ((int, tree));
103 static int count_cond PARAMS ((tree, int));
106 #define BRANCH_COST 1
109 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
110 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
111 and SUM1. Then this yields nonzero if overflow occurred during the
114 Overflow occurs if A and B have the same sign, but A and SUM differ in
115 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
117 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
119 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
120 We do that by representing the two-word integer in 4 words, with only
121 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
122 number. The value of the word is LOWPART + HIGHPART * BASE. */
125 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
126 #define HIGHPART(x) \
127 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
128 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
130 /* Unpack a two-word integer into 4 words.
131 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
132 WORDS points to the array of HOST_WIDE_INTs. */
135 encode (words, low, hi)
136 HOST_WIDE_INT *words;
137 unsigned HOST_WIDE_INT low;
140 words[0] = LOWPART (low);
141 words[1] = HIGHPART (low);
142 words[2] = LOWPART (hi);
143 words[3] = HIGHPART (hi);
146 /* Pack an array of 4 words into a two-word integer.
147 WORDS points to the array of words.
148 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
151 decode (words, low, hi)
152 HOST_WIDE_INT *words;
153 unsigned HOST_WIDE_INT *low;
156 *low = words[0] + words[1] * BASE;
157 *hi = words[2] + words[3] * BASE;
160 /* Make the integer constant T valid for its type by setting to 0 or 1 all
161 the bits in the constant that don't belong in the type.
163 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
164 nonzero, a signed overflow has already occurred in calculating T, so
167 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
171 force_fit_type (t, overflow)
175 unsigned HOST_WIDE_INT low;
179 if (TREE_CODE (t) == REAL_CST)
181 #ifdef CHECK_FLOAT_VALUE
182 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
188 else if (TREE_CODE (t) != INTEGER_CST)
191 low = TREE_INT_CST_LOW (t);
192 high = TREE_INT_CST_HIGH (t);
194 if (POINTER_TYPE_P (TREE_TYPE (t)))
197 prec = TYPE_PRECISION (TREE_TYPE (t));
199 /* First clear all bits that are beyond the type's precision. */
201 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
203 else if (prec > HOST_BITS_PER_WIDE_INT)
204 TREE_INT_CST_HIGH (t)
205 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
208 TREE_INT_CST_HIGH (t) = 0;
209 if (prec < HOST_BITS_PER_WIDE_INT)
210 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
213 /* Unsigned types do not suffer sign extension or overflow. */
214 if (TREE_UNSIGNED (TREE_TYPE (t)))
217 /* If the value's sign bit is set, extend the sign. */
218 if (prec != 2 * HOST_BITS_PER_WIDE_INT
219 && (prec > HOST_BITS_PER_WIDE_INT
220 ? 0 != (TREE_INT_CST_HIGH (t)
222 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
223 : 0 != (TREE_INT_CST_LOW (t)
224 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
226 /* Value is negative:
227 set to 1 all the bits that are outside this type's precision. */
228 if (prec > HOST_BITS_PER_WIDE_INT)
229 TREE_INT_CST_HIGH (t)
230 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
233 TREE_INT_CST_HIGH (t) = -1;
234 if (prec < HOST_BITS_PER_WIDE_INT)
235 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
239 /* Return nonzero if signed overflow occurred. */
241 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
245 /* Add two doubleword integers with doubleword result.
246 Each argument is given as two `HOST_WIDE_INT' pieces.
247 One argument is L1 and H1; the other, L2 and H2.
248 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
251 add_double (l1, h1, l2, h2, lv, hv)
252 unsigned HOST_WIDE_INT l1, l2;
253 HOST_WIDE_INT h1, h2;
254 unsigned HOST_WIDE_INT *lv;
257 unsigned HOST_WIDE_INT l;
261 h = h1 + h2 + (l < l1);
265 return OVERFLOW_SUM_SIGN (h1, h2, h);
268 /* Negate a doubleword integer with doubleword result.
269 Return nonzero if the operation overflows, assuming it's signed.
270 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
271 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
274 neg_double (l1, h1, lv, hv)
275 unsigned HOST_WIDE_INT l1;
277 unsigned HOST_WIDE_INT *lv;
284 return (*hv & h1) < 0;
294 /* Multiply two doubleword integers with doubleword result.
295 Return nonzero if the operation overflows, assuming it's signed.
296 Each argument is given as two `HOST_WIDE_INT' pieces.
297 One argument is L1 and H1; the other, L2 and H2.
298 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
301 mul_double (l1, h1, l2, h2, lv, hv)
302 unsigned HOST_WIDE_INT l1, l2;
303 HOST_WIDE_INT h1, h2;
304 unsigned HOST_WIDE_INT *lv;
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 register unsigned HOST_WIDE_INT carry;
311 register int i, j, k;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
318 bzero ((char *) prod, sizeof prod);
320 for (i = 0; i < 4; i++)
323 for (j = 0; j < 4; j++)
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod+4, &toplow, &tophigh);
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
361 lshift_double (l1, h1, count, prec, lv, hv, arith)
362 unsigned HOST_WIDE_INT l1;
363 HOST_WIDE_INT h1, count;
365 unsigned HOST_WIDE_INT *lv;
371 rshift_double (l1, h1, - count, prec, lv, hv, arith);
375 #ifdef SHIFT_COUNT_TRUNCATED
376 if (SHIFT_COUNT_TRUNCATED)
380 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
382 /* Shifting by the host word size is undefined according to the
383 ANSI standard, so we must handle this as a special case. */
387 else if (count >= HOST_BITS_PER_WIDE_INT)
389 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
394 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
395 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
400 /* Shift the doubleword integer in L1, H1 right by COUNT places
401 keeping only PREC bits of result. COUNT must be positive.
402 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
403 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
406 rshift_double (l1, h1, count, prec, lv, hv, arith)
407 unsigned HOST_WIDE_INT l1;
408 HOST_WIDE_INT h1, count;
409 unsigned int prec ATTRIBUTE_UNUSED;
410 unsigned HOST_WIDE_INT *lv;
414 unsigned HOST_WIDE_INT signmask;
417 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
420 #ifdef SHIFT_COUNT_TRUNCATED
421 if (SHIFT_COUNT_TRUNCATED)
425 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
427 /* Shifting by the host word size is undefined according to the
428 ANSI standard, so we must handle this as a special case. */
432 else if (count >= HOST_BITS_PER_WIDE_INT)
435 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
436 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
441 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
442 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
443 | ((unsigned HOST_WIDE_INT) h1 >> count));
447 /* Rotate the doubleword integer in L1, H1 left by COUNT places
448 keeping only PREC bits of result.
449 Rotate right if COUNT is negative.
450 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
453 lrotate_double (l1, h1, count, prec, lv, hv)
454 unsigned HOST_WIDE_INT l1;
455 HOST_WIDE_INT h1, count;
457 unsigned HOST_WIDE_INT *lv;
460 unsigned HOST_WIDE_INT s1l, s2l;
461 HOST_WIDE_INT s1h, s2h;
467 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
468 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
473 /* Rotate the doubleword integer in L1, H1 left by COUNT places
474 keeping only PREC bits of result. COUNT must be positive.
475 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
478 rrotate_double (l1, h1, count, prec, lv, hv)
479 unsigned HOST_WIDE_INT l1;
480 HOST_WIDE_INT h1, count;
482 unsigned HOST_WIDE_INT *lv;
485 unsigned HOST_WIDE_INT s1l, s2l;
486 HOST_WIDE_INT s1h, s2h;
492 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
493 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
498 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
499 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
500 CODE is a tree code for a kind of division, one of
501 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
503 It controls how the quotient is rounded to a integer.
504 Return nonzero if the operation overflows.
505 UNS nonzero says do unsigned division. */
508 div_and_round_double (code, uns,
509 lnum_orig, hnum_orig, lden_orig, hden_orig,
510 lquo, hquo, lrem, hrem)
513 unsigned HOST_WIDE_INT lnum_orig; /* num == numerator == dividend */
514 HOST_WIDE_INT hnum_orig;
515 unsigned HOST_WIDE_INT lden_orig; /* den == denominator == divisor */
516 HOST_WIDE_INT hden_orig;
517 unsigned HOST_WIDE_INT *lquo, *lrem;
518 HOST_WIDE_INT *hquo, *hrem;
521 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
522 HOST_WIDE_INT den[4], quo[4];
524 unsigned HOST_WIDE_INT work;
525 unsigned HOST_WIDE_INT carry = 0;
526 unsigned HOST_WIDE_INT lnum = lnum_orig;
527 HOST_WIDE_INT hnum = hnum_orig;
528 unsigned HOST_WIDE_INT lden = lden_orig;
529 HOST_WIDE_INT hden = hden_orig;
532 if (hden == 0 && lden == 0)
533 overflow = 1, lden = 1;
535 /* calculate quotient sign and convert operands to unsigned. */
541 /* (minimum integer) / (-1) is the only overflow case. */
542 if (neg_double (lnum, hnum, &lnum, &hnum)
543 && ((HOST_WIDE_INT) lden & hden) == -1)
549 neg_double (lden, hden, &lden, &hden);
553 if (hnum == 0 && hden == 0)
554 { /* single precision */
556 /* This unsigned division rounds toward zero. */
562 { /* trivial case: dividend < divisor */
563 /* hden != 0 already checked. */
570 bzero ((char *) quo, sizeof quo);
572 bzero ((char *) num, sizeof num); /* to zero 9th element */
573 bzero ((char *) den, sizeof den);
575 encode (num, lnum, hnum);
576 encode (den, lden, hden);
578 /* Special code for when the divisor < BASE. */
579 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
581 /* hnum != 0 already checked. */
582 for (i = 4 - 1; i >= 0; i--)
584 work = num[i] + carry * BASE;
585 quo[i] = work / lden;
591 /* Full double precision division,
592 with thanks to Don Knuth's "Seminumerical Algorithms". */
593 int num_hi_sig, den_hi_sig;
594 unsigned HOST_WIDE_INT quo_est, scale;
596 /* Find the highest non-zero divisor digit. */
597 for (i = 4 - 1; ; i--)
603 /* Insure that the first digit of the divisor is at least BASE/2.
604 This is required by the quotient digit estimation algorithm. */
606 scale = BASE / (den[den_hi_sig] + 1);
608 { /* scale divisor and dividend */
610 for (i = 0; i <= 4 - 1; i++)
612 work = (num[i] * scale) + carry;
613 num[i] = LOWPART (work);
614 carry = HIGHPART (work);
619 for (i = 0; i <= 4 - 1; i++)
621 work = (den[i] * scale) + carry;
622 den[i] = LOWPART (work);
623 carry = HIGHPART (work);
624 if (den[i] != 0) den_hi_sig = i;
631 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
633 /* Guess the next quotient digit, quo_est, by dividing the first
634 two remaining dividend digits by the high order quotient digit.
635 quo_est is never low and is at most 2 high. */
636 unsigned HOST_WIDE_INT tmp;
638 num_hi_sig = i + den_hi_sig + 1;
639 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
640 if (num[num_hi_sig] != den[den_hi_sig])
641 quo_est = work / den[den_hi_sig];
645 /* Refine quo_est so it's usually correct, and at most one high. */
646 tmp = work - quo_est * den[den_hi_sig];
648 && (den[den_hi_sig - 1] * quo_est
649 > (tmp * BASE + num[num_hi_sig - 2])))
652 /* Try QUO_EST as the quotient digit, by multiplying the
653 divisor by QUO_EST and subtracting from the remaining dividend.
654 Keep in mind that QUO_EST is the I - 1st digit. */
657 for (j = 0; j <= den_hi_sig; j++)
659 work = quo_est * den[j] + carry;
660 carry = HIGHPART (work);
661 work = num[i + j] - LOWPART (work);
662 num[i + j] = LOWPART (work);
663 carry += HIGHPART (work) != 0;
666 /* If quo_est was high by one, then num[i] went negative and
667 we need to correct things. */
668 if (num[num_hi_sig] < carry)
671 carry = 0; /* add divisor back in */
672 for (j = 0; j <= den_hi_sig; j++)
674 work = num[i + j] + den[j] + carry;
675 carry = HIGHPART (work);
676 num[i + j] = LOWPART (work);
679 num [num_hi_sig] += carry;
682 /* Store the quotient digit. */
687 decode (quo, lquo, hquo);
690 /* if result is negative, make it so. */
692 neg_double (*lquo, *hquo, lquo, hquo);
694 /* compute trial remainder: rem = num - (quo * den) */
695 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
696 neg_double (*lrem, *hrem, lrem, hrem);
697 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
702 case TRUNC_MOD_EXPR: /* round toward zero */
703 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
707 case FLOOR_MOD_EXPR: /* round toward negative infinity */
708 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
711 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
719 case CEIL_MOD_EXPR: /* round toward positive infinity */
720 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
722 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
730 case ROUND_MOD_EXPR: /* round to closest integer */
732 unsigned HOST_WIDE_INT labs_rem = *lrem;
733 HOST_WIDE_INT habs_rem = *hrem;
734 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
735 HOST_WIDE_INT habs_den = hden, htwice;
737 /* Get absolute values */
739 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
741 neg_double (lden, hden, &labs_den, &habs_den);
743 /* If (2 * abs (lrem) >= abs (lden)) */
744 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
745 labs_rem, habs_rem, <wice, &htwice);
747 if (((unsigned HOST_WIDE_INT) habs_den
748 < (unsigned HOST_WIDE_INT) htwice)
749 || (((unsigned HOST_WIDE_INT) habs_den
750 == (unsigned HOST_WIDE_INT) htwice)
751 && (labs_den < ltwice)))
755 add_double (*lquo, *hquo,
756 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
759 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
771 /* compute true remainder: rem = num - (quo * den) */
772 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
773 neg_double (*lrem, *hrem, lrem, hrem);
774 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
778 #ifndef REAL_ARITHMETIC
779 /* Effectively truncate a real value to represent the nearest possible value
780 in a narrower mode. The result is actually represented in the same data
781 type as the argument, but its value is usually different.
783 A trap may occur during the FP operations and it is the responsibility
784 of the calling function to have a handler established. */
787 real_value_truncate (mode, arg)
788 enum machine_mode mode;
791 return REAL_VALUE_TRUNCATE (mode, arg);
794 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
796 /* Check for infinity in an IEEE double precision number. */
802 /* The IEEE 64-bit double format. */
807 unsigned exponent : 11;
808 unsigned mantissa1 : 20;
813 unsigned mantissa1 : 20;
814 unsigned exponent : 11;
820 if (u.big_endian.sign == 1)
823 return (u.big_endian.exponent == 2047
824 && u.big_endian.mantissa1 == 0
825 && u.big_endian.mantissa2 == 0);
830 return (u.little_endian.exponent == 2047
831 && u.little_endian.mantissa1 == 0
832 && u.little_endian.mantissa2 == 0);
836 /* Check whether an IEEE double precision number is a NaN. */
842 /* The IEEE 64-bit double format. */
847 unsigned exponent : 11;
848 unsigned mantissa1 : 20;
853 unsigned mantissa1 : 20;
854 unsigned exponent : 11;
860 if (u.big_endian.sign == 1)
863 return (u.big_endian.exponent == 2047
864 && (u.big_endian.mantissa1 != 0
865 || u.big_endian.mantissa2 != 0));
870 return (u.little_endian.exponent == 2047
871 && (u.little_endian.mantissa1 != 0
872 || u.little_endian.mantissa2 != 0));
876 /* Check for a negative IEEE double precision number. */
882 /* The IEEE 64-bit double format. */
887 unsigned exponent : 11;
888 unsigned mantissa1 : 20;
893 unsigned mantissa1 : 20;
894 unsigned exponent : 11;
900 if (u.big_endian.sign == 1)
903 return u.big_endian.sign;
908 return u.little_endian.sign;
911 #else /* Target not IEEE */
913 /* Let's assume other float formats don't have infinity.
914 (This can be overridden by redefining REAL_VALUE_ISINF.) */
918 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
923 /* Let's assume other float formats don't have NaNs.
924 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
928 REAL_VALUE_TYPE x ATTRIBUTE_UNUSED;
933 /* Let's assume other float formats don't have minus zero.
934 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
942 #endif /* Target not IEEE */
944 /* Try to change R into its exact multiplicative inverse in machine mode
945 MODE. Return nonzero function value if successful. */
948 exact_real_inverse (mode, r)
949 enum machine_mode mode;
958 #ifdef CHECK_FLOAT_VALUE
962 /* Usually disable if bounds checks are not reliable. */
963 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
966 /* Set array index to the less significant bits in the unions, depending
967 on the endian-ness of the host doubles.
968 Disable if insufficient information on the data structure. */
969 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
972 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
975 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
978 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
983 if (setjmp (float_error))
985 /* Don't do the optimization if there was an arithmetic error. */
987 set_float_handler (NULL_PTR);
990 set_float_handler (float_error);
992 /* Domain check the argument. */
998 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
1002 /* Compute the reciprocal and check for numerical exactness.
1003 It is unnecessary to check all the significand bits to determine
1004 whether X is a power of 2. If X is not, then it is impossible for
1005 the bottom half significand of both X and 1/X to be all zero bits.
1006 Hence we ignore the data structure of the top half and examine only
1007 the low order bits of the two significands. */
1009 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
1012 /* Truncate to the required mode and range-check the result. */
1013 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
1014 #ifdef CHECK_FLOAT_VALUE
1016 if (CHECK_FLOAT_VALUE (mode, y.d, i))
1020 /* Fail if truncation changed the value. */
1021 if (y.d != t.d || y.d == 0.0)
1024 #ifdef REAL_INFINITY
1025 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
1029 /* Output the reciprocal and return success flag. */
1030 set_float_handler (NULL_PTR);
1035 /* Convert C9X hexadecimal floating point string constant S. Return
1036 real value type in mode MODE. This function uses the host computer's
1037 floating point arithmetic when there is no REAL_ARITHMETIC. */
1040 real_hex_to_f (s, mode)
1042 enum machine_mode mode;
1046 unsigned HOST_WIDE_INT low, high;
1047 int shcount, nrmcount, k;
1048 int sign, expsign, isfloat;
1049 int lost = 0;/* Nonzero low order bits shifted out and discarded. */
1050 int frexpon = 0; /* Bits after the decimal point. */
1051 int expon = 0; /* Value of exponent. */
1052 int decpt = 0; /* How many decimal points. */
1053 int gotp = 0; /* How many P's. */
1060 while (*p == ' ' || *p == '\t')
1063 /* Sign, if any, comes first. */
1071 /* The string is supposed to start with 0x or 0X . */
1075 if (*p == 'x' || *p == 'X')
1089 while ((c = *p) != '\0')
1091 if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1092 || (c >= 'a' && c <= 'f'))
1102 if ((high & 0xf0000000) == 0)
1104 high = (high << 4) + ((low >> 28) & 15);
1105 low = (low << 4) + k;
1112 /* Record nonzero lost bits. */
1125 else if (c == 'p' || c == 'P')
1129 /* Sign of exponent. */
1136 /* Value of exponent.
1137 The exponent field is a decimal integer. */
1140 k = (*p++ & 0x7f) - '0';
1141 expon = 10 * expon + k;
1145 /* F suffix is ambiguous in the significand part
1146 so it must appear after the decimal exponent field. */
1147 if (*p == 'f' || *p == 'F')
1155 else if (c == 'l' || c == 'L')
1164 /* Abort if last character read was not legitimate. */
1166 if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1169 /* There must be either one decimal point or one p. */
1170 if (decpt == 0 && gotp == 0)
1174 if (high == 0 && low == 0)
1186 /* Leave a high guard bit for carry-out. */
1187 if ((high & 0x80000000) != 0)
1190 low = (low >> 1) | (high << 31);
1195 if ((high & 0xffff8000) == 0)
1197 high = (high << 16) + ((low >> 16) & 0xffff);
1202 while ((high & 0xc0000000) == 0)
1204 high = (high << 1) + ((low >> 31) & 1);
1209 if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1211 /* Keep 24 bits precision, bits 0x7fffff80.
1212 Rounding bit is 0x40. */
1213 lost = lost | low | (high & 0x3f);
1217 if ((high & 0x80) || lost)
1224 /* We need real.c to do long double formats, so here default
1225 to double precision. */
1226 #if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1228 Keep 53 bits precision, bits 0x7fffffff fffffc00.
1229 Rounding bit is low word 0x200. */
1230 lost = lost | (low & 0x1ff);
1233 if ((low & 0x400) || lost)
1235 low = (low + 0x200) & 0xfffffc00;
1242 /* Assume it's a VAX with 56-bit significand,
1243 bits 0x7fffffff ffffff80. */
1244 lost = lost | (low & 0x7f);
1247 if ((low & 0x80) || lost)
1249 low = (low + 0x40) & 0xffffff80;
1259 ip = REAL_VALUE_LDEXP (ip, 32) + (double) low;
1260 /* Apply shifts and exponent value as power of 2. */
1261 ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1268 #endif /* no REAL_ARITHMETIC */
1270 /* Given T, an expression, return the negation of T. Allow for T to be
1271 null, in which case return null. */
1283 type = TREE_TYPE (t);
1284 STRIP_SIGN_NOPS (t);
1286 switch (TREE_CODE (t))
1290 if (! TREE_UNSIGNED (type)
1291 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
1292 && ! TREE_OVERFLOW (tem))
1297 return convert (type, TREE_OPERAND (t, 0));
1300 /* - (A - B) -> B - A */
1301 if (! FLOAT_TYPE_P (type) || flag_fast_math)
1302 return convert (type,
1303 fold (build (MINUS_EXPR, TREE_TYPE (t),
1304 TREE_OPERAND (t, 1),
1305 TREE_OPERAND (t, 0))));
1312 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1315 /* Split a tree IN into a constant, literal and variable parts that could be
1316 combined with CODE to make IN. "constant" means an expression with
1317 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1318 commutative arithmetic operation. Store the constant part into *CONP,
1319 the literal in &LITP and return the variable part. If a part isn't
1320 present, set it to null. If the tree does not decompose in this way,
1321 return the entire tree as the variable part and the other parts as null.
1323 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1324 case, we negate an operand that was subtracted. If NEGATE_P is true, we
1325 are negating all of IN.
1327 If IN is itself a literal or constant, return it as appropriate.
1329 Note that we do not guarantee that any of the three values will be the
1330 same type as IN, but they will have the same signedness and mode. */
1333 split_tree (in, code, conp, litp, negate_p)
1335 enum tree_code code;
1344 /* Strip any conversions that don't change the machine mode or signedness. */
1345 STRIP_SIGN_NOPS (in);
1347 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1349 else if (TREE_CONSTANT (in))
1352 else if (TREE_CODE (in) == code
1353 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1354 /* We can associate addition and subtraction together (even
1355 though the C standard doesn't say so) for integers because
1356 the value is not affected. For reals, the value might be
1357 affected, so we can't. */
1358 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1359 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1361 tree op0 = TREE_OPERAND (in, 0);
1362 tree op1 = TREE_OPERAND (in, 1);
1363 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1364 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1366 /* First see if either of the operands is a literal, then a constant. */
1367 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1368 *litp = op0, op0 = 0;
1369 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1370 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1372 if (op0 != 0 && TREE_CONSTANT (op0))
1373 *conp = op0, op0 = 0;
1374 else if (op1 != 0 && TREE_CONSTANT (op1))
1375 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1377 /* If we haven't dealt with either operand, this is not a case we can
1378 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1379 if (op0 != 0 && op1 != 0)
1384 var = op1, neg_var_p = neg1_p;
1386 /* Now do any needed negations. */
1387 if (neg_litp_p) *litp = negate_expr (*litp);
1388 if (neg_conp_p) *conp = negate_expr (*conp);
1389 if (neg_var_p) var = negate_expr (var);
1396 var = negate_expr (var);
1397 *conp = negate_expr (*conp);
1398 *litp = negate_expr (*litp);
1404 /* Re-associate trees split by the above function. T1 and T2 are either
1405 expressions to associate or null. Return the new expression, if any. If
1406 we build an operation, do it in TYPE and with CODE, except if CODE is a
1407 MINUS_EXPR, in which case we use PLUS_EXPR since split_tree will already
1408 have taken care of the negations. */
1411 associate_trees (t1, t2, code, type)
1413 enum tree_code code;
1421 if (code == MINUS_EXPR)
1424 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1425 try to fold this since we will have infinite recursion. But do
1426 deal with any NEGATE_EXPRs. */
1427 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1428 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1430 if (TREE_CODE (t1) == NEGATE_EXPR)
1431 return build (MINUS_EXPR, type, convert (type, t2),
1432 convert (type, TREE_OPERAND (t1, 0)));
1433 else if (TREE_CODE (t2) == NEGATE_EXPR)
1434 return build (MINUS_EXPR, type, convert (type, t1),
1435 convert (type, TREE_OPERAND (t2, 0)));
1437 return build (code, type, convert (type, t1), convert (type, t2));
1440 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1443 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1444 to produce a new constant.
1446 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1447 If FORSIZE is nonzero, compute overflow for unsigned types. */
1450 int_const_binop (code, arg1, arg2, notrunc, forsize)
1451 enum tree_code code;
1452 register tree arg1, arg2;
1453 int notrunc, forsize;
1455 unsigned HOST_WIDE_INT int1l, int2l;
1456 HOST_WIDE_INT int1h, int2h;
1457 unsigned HOST_WIDE_INT low;
1459 unsigned HOST_WIDE_INT garbagel;
1460 HOST_WIDE_INT garbageh;
1462 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1464 int no_overflow = 0;
1466 int1l = TREE_INT_CST_LOW (arg1);
1467 int1h = TREE_INT_CST_HIGH (arg1);
1468 int2l = TREE_INT_CST_LOW (arg2);
1469 int2h = TREE_INT_CST_HIGH (arg2);
1474 low = int1l | int2l, hi = int1h | int2h;
1478 low = int1l ^ int2l, hi = int1h ^ int2h;
1482 low = int1l & int2l, hi = int1h & int2h;
1485 case BIT_ANDTC_EXPR:
1486 low = int1l & ~int2l, hi = int1h & ~int2h;
1492 /* It's unclear from the C standard whether shifts can overflow.
1493 The following code ignores overflow; perhaps a C standard
1494 interpretation ruling is needed. */
1495 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1503 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (TREE_TYPE (arg1)),
1508 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1512 neg_double (int2l, int2h, &low, &hi);
1513 add_double (int1l, int1h, low, hi, &low, &hi);
1514 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1518 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1521 case TRUNC_DIV_EXPR:
1522 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1523 case EXACT_DIV_EXPR:
1524 /* This is a shortcut for a common special case. */
1525 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1526 && ! TREE_CONSTANT_OVERFLOW (arg1)
1527 && ! TREE_CONSTANT_OVERFLOW (arg2)
1528 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1530 if (code == CEIL_DIV_EXPR)
1533 low = int1l / int2l, hi = 0;
1537 /* ... fall through ... */
1539 case ROUND_DIV_EXPR:
1540 if (int2h == 0 && int2l == 1)
1542 low = int1l, hi = int1h;
1545 if (int1l == int2l && int1h == int2h
1546 && ! (int1l == 0 && int1h == 0))
1551 overflow = div_and_round_double (code, uns,
1552 int1l, int1h, int2l, int2h,
1553 &low, &hi, &garbagel, &garbageh);
1556 case TRUNC_MOD_EXPR:
1557 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1558 /* This is a shortcut for a common special case. */
1559 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1560 && ! TREE_CONSTANT_OVERFLOW (arg1)
1561 && ! TREE_CONSTANT_OVERFLOW (arg2)
1562 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1564 if (code == CEIL_MOD_EXPR)
1566 low = int1l % int2l, hi = 0;
1570 /* ... fall through ... */
1572 case ROUND_MOD_EXPR:
1573 overflow = div_and_round_double (code, uns,
1574 int1l, int1h, int2l, int2h,
1575 &garbagel, &garbageh, &low, &hi);
1581 low = (((unsigned HOST_WIDE_INT) int1h
1582 < (unsigned HOST_WIDE_INT) int2h)
1583 || (((unsigned HOST_WIDE_INT) int1h
1584 == (unsigned HOST_WIDE_INT) int2h)
1587 low = (int1h < int2h
1588 || (int1h == int2h && int1l < int2l));
1590 if (low == (code == MIN_EXPR))
1591 low = int1l, hi = int1h;
1593 low = int2l, hi = int2h;
1600 if (forsize && hi == 0 && low < 10000)
1601 return size_int_type_wide (low, TREE_TYPE (arg1));
1604 t = build_int_2 (low, hi);
1605 TREE_TYPE (t) = TREE_TYPE (arg1);
1609 = ((notrunc ? (!uns || forsize) && overflow
1610 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1611 | TREE_OVERFLOW (arg1)
1612 | TREE_OVERFLOW (arg2));
1614 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1615 So check if force_fit_type truncated the value. */
1617 && ! TREE_OVERFLOW (t)
1618 && (TREE_INT_CST_HIGH (t) != hi
1619 || TREE_INT_CST_LOW (t) != low))
1620 TREE_OVERFLOW (t) = 1;
1622 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1623 | TREE_CONSTANT_OVERFLOW (arg1)
1624 | TREE_CONSTANT_OVERFLOW (arg2));
1628 /* Define input and output argument for const_binop_1. */
1631 enum tree_code code; /* Input: tree code for operation*/
1632 tree type; /* Input: tree type for operation. */
1633 REAL_VALUE_TYPE d1, d2; /* Input: floating point operands. */
1634 tree t; /* Output: constant for result. */
1637 /* Do the real arithmetic for const_binop while protected by a
1638 float overflow handler. */
1641 const_binop_1 (data)
1644 struct cb_args *args = (struct cb_args *) data;
1645 REAL_VALUE_TYPE value;
1647 #ifdef REAL_ARITHMETIC
1648 REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1653 value = args->d1 + args->d2;
1657 value = args->d1 - args->d2;
1661 value = args->d1 * args->d2;
1665 #ifndef REAL_INFINITY
1670 value = args->d1 / args->d2;
1674 value = MIN (args->d1, args->d2);
1678 value = MAX (args->d1, args->d2);
1684 #endif /* no REAL_ARITHMETIC */
1687 = build_real (args->type,
1688 real_value_truncate (TYPE_MODE (args->type), value));
1691 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1692 constant. We assume ARG1 and ARG2 have the same data type, or at least
1693 are the same kind of constant and the same machine mode.
1695 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1698 const_binop (code, arg1, arg2, notrunc)
1699 enum tree_code code;
1700 register tree arg1, arg2;
1703 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1705 if (TREE_CODE (arg1) == INTEGER_CST)
1706 return int_const_binop (code, arg1, arg2, notrunc, 0);
1708 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1709 if (TREE_CODE (arg1) == REAL_CST)
1715 struct cb_args args;
1717 d1 = TREE_REAL_CST (arg1);
1718 d2 = TREE_REAL_CST (arg2);
1720 /* If either operand is a NaN, just return it. Otherwise, set up
1721 for floating-point trap; we return an overflow. */
1722 if (REAL_VALUE_ISNAN (d1))
1724 else if (REAL_VALUE_ISNAN (d2))
1727 /* Setup input for const_binop_1() */
1728 args.type = TREE_TYPE (arg1);
1733 if (do_float_handler (const_binop_1, (PTR) &args))
1734 /* Receive output from const_binop_1. */
1738 /* We got an exception from const_binop_1. */
1739 t = copy_node (arg1);
1744 = (force_fit_type (t, overflow)
1745 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1746 TREE_CONSTANT_OVERFLOW (t)
1748 | TREE_CONSTANT_OVERFLOW (arg1)
1749 | TREE_CONSTANT_OVERFLOW (arg2);
1752 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1753 if (TREE_CODE (arg1) == COMPLEX_CST)
1755 register tree type = TREE_TYPE (arg1);
1756 register tree r1 = TREE_REALPART (arg1);
1757 register tree i1 = TREE_IMAGPART (arg1);
1758 register tree r2 = TREE_REALPART (arg2);
1759 register tree i2 = TREE_IMAGPART (arg2);
1765 t = build_complex (type,
1766 const_binop (PLUS_EXPR, r1, r2, notrunc),
1767 const_binop (PLUS_EXPR, i1, i2, notrunc));
1771 t = build_complex (type,
1772 const_binop (MINUS_EXPR, r1, r2, notrunc),
1773 const_binop (MINUS_EXPR, i1, i2, notrunc));
1777 t = build_complex (type,
1778 const_binop (MINUS_EXPR,
1779 const_binop (MULT_EXPR,
1781 const_binop (MULT_EXPR,
1784 const_binop (PLUS_EXPR,
1785 const_binop (MULT_EXPR,
1787 const_binop (MULT_EXPR,
1794 register tree magsquared
1795 = const_binop (PLUS_EXPR,
1796 const_binop (MULT_EXPR, r2, r2, notrunc),
1797 const_binop (MULT_EXPR, i2, i2, notrunc),
1800 t = build_complex (type,
1802 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1803 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1804 const_binop (PLUS_EXPR,
1805 const_binop (MULT_EXPR, r1, r2,
1807 const_binop (MULT_EXPR, i1, i2,
1810 magsquared, notrunc),
1812 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1813 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1814 const_binop (MINUS_EXPR,
1815 const_binop (MULT_EXPR, i1, r2,
1817 const_binop (MULT_EXPR, r1, i2,
1820 magsquared, notrunc));
1832 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1833 bits are given by NUMBER and of the sizetype represented by KIND. */
1836 size_int_wide (number, kind)
1837 HOST_WIDE_INT number;
1838 enum size_type_kind kind;
1840 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1843 /* Likewise, but the desired type is specified explicitly. */
1846 size_int_type_wide (number, type)
1847 HOST_WIDE_INT number;
1850 /* Type-size nodes already made for small sizes. */
1851 static tree size_table[2048 + 1];
1852 static int init_p = 0;
1855 if (ggc_p && ! init_p)
1857 ggc_add_tree_root ((tree *) size_table,
1858 sizeof size_table / sizeof (tree));
1862 /* If this is a positive number that fits in the table we use to hold
1863 cached entries, see if it is already in the table and put it there
1865 if (number >= 0 && number < (int) (sizeof size_table / sizeof size_table[0]))
1867 if (size_table[number] != 0)
1868 for (t = size_table[number]; t != 0; t = TREE_CHAIN (t))
1869 if (TREE_TYPE (t) == type)
1874 /* Make this a permanent node. */
1875 push_obstacks_nochange ();
1876 end_temporary_allocation ();
1879 t = build_int_2 (number, 0);
1880 TREE_TYPE (t) = type;
1881 TREE_CHAIN (t) = size_table[number];
1882 size_table[number] = t;
1890 t = build_int_2 (number, number < 0 ? -1 : 0);
1891 TREE_TYPE (t) = type;
1892 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1896 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1897 is a tree code. The type of the result is taken from the operands.
1898 Both must be the same type integer type and it must be a size type.
1899 If the operands are constant, so is the result. */
1902 size_binop (code, arg0, arg1)
1903 enum tree_code code;
1906 tree type = TREE_TYPE (arg0);
1908 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1909 || type != TREE_TYPE (arg1))
1912 /* Handle the special case of two integer constants faster. */
1913 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1915 /* And some specific cases even faster than that. */
1916 if (code == PLUS_EXPR && integer_zerop (arg0))
1918 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1919 && integer_zerop (arg1))
1921 else if (code == MULT_EXPR && integer_onep (arg0))
1924 /* Handle general case of two integer constants. */
1925 return int_const_binop (code, arg0, arg1, 0, 1);
1928 if (arg0 == error_mark_node || arg1 == error_mark_node)
1929 return error_mark_node;
1931 return fold (build (code, type, arg0, arg1));
1934 /* Given two values, either both of sizetype or both of bitsizetype,
1935 compute the difference between the two values. Return the value
1936 in signed type corresponding to the type of the operands. */
1939 size_diffop (arg0, arg1)
1942 tree type = TREE_TYPE (arg0);
1945 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1946 || type != TREE_TYPE (arg1))
1949 /* If the type is already signed, just do the simple thing. */
1950 if (! TREE_UNSIGNED (type))
1951 return size_binop (MINUS_EXPR, arg0, arg1);
1953 ctype = (type == bitsizetype || type == ubitsizetype
1954 ? sbitsizetype : ssizetype);
1956 /* If either operand is not a constant, do the conversions to the signed
1957 type and subtract. The hardware will do the right thing with any
1958 overflow in the subtraction. */
1959 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1960 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1961 convert (ctype, arg1));
1963 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1964 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1965 overflow) and negate (which can't either). Special-case a result
1966 of zero while we're here. */
1967 if (tree_int_cst_equal (arg0, arg1))
1968 return convert (ctype, integer_zero_node);
1969 else if (tree_int_cst_lt (arg1, arg0))
1970 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1972 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1973 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1976 /* This structure is used to communicate arguments to fold_convert_1. */
1979 tree arg1; /* Input: value to convert. */
1980 tree type; /* Input: type to convert value to. */
1981 tree t; /* Ouput: result of conversion. */
1984 /* Function to convert floating-point constants, protected by floating
1985 point exception handler. */
1988 fold_convert_1 (data)
1991 struct fc_args * args = (struct fc_args *) data;
1993 args->t = build_real (args->type,
1994 real_value_truncate (TYPE_MODE (args->type),
1995 TREE_REAL_CST (args->arg1)));
1998 /* Given T, a tree representing type conversion of ARG1, a constant,
1999 return a constant tree representing the result of conversion. */
2002 fold_convert (t, arg1)
2006 register tree type = TREE_TYPE (t);
2009 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2011 if (TREE_CODE (arg1) == INTEGER_CST)
2013 /* If we would build a constant wider than GCC supports,
2014 leave the conversion unfolded. */
2015 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
2018 /* If we are trying to make a sizetype for a small integer, use
2019 size_int to pick up cached types to reduce duplicate nodes. */
2020 if (TREE_CODE (type) == INTEGER_CST && TYPE_IS_SIZETYPE (type)
2021 && compare_tree_int (arg1, 10000) < 0)
2022 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
2024 /* Given an integer constant, make new constant with new type,
2025 appropriately sign-extended or truncated. */
2026 t = build_int_2 (TREE_INT_CST_LOW (arg1),
2027 TREE_INT_CST_HIGH (arg1));
2028 TREE_TYPE (t) = type;
2029 /* Indicate an overflow if (1) ARG1 already overflowed,
2030 or (2) force_fit_type indicates an overflow.
2031 Tell force_fit_type that an overflow has already occurred
2032 if ARG1 is a too-large unsigned value and T is signed.
2033 But don't indicate an overflow if converting a pointer. */
2035 = ((force_fit_type (t,
2036 (TREE_INT_CST_HIGH (arg1) < 0
2037 && (TREE_UNSIGNED (type)
2038 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
2039 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
2040 || TREE_OVERFLOW (arg1));
2041 TREE_CONSTANT_OVERFLOW (t)
2042 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2044 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2045 else if (TREE_CODE (arg1) == REAL_CST)
2047 /* Don't initialize these, use assignments.
2048 Initialized local aggregates don't work on old compilers. */
2052 tree type1 = TREE_TYPE (arg1);
2055 x = TREE_REAL_CST (arg1);
2056 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
2058 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
2059 if (!no_upper_bound)
2060 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
2062 /* See if X will be in range after truncation towards 0.
2063 To compensate for truncation, move the bounds away from 0,
2064 but reject if X exactly equals the adjusted bounds. */
2065 #ifdef REAL_ARITHMETIC
2066 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
2067 if (!no_upper_bound)
2068 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
2071 if (!no_upper_bound)
2074 /* If X is a NaN, use zero instead and show we have an overflow.
2075 Otherwise, range check. */
2076 if (REAL_VALUE_ISNAN (x))
2077 overflow = 1, x = dconst0;
2078 else if (! (REAL_VALUES_LESS (l, x)
2080 && REAL_VALUES_LESS (x, u)))
2083 #ifndef REAL_ARITHMETIC
2085 HOST_WIDE_INT low, high;
2086 HOST_WIDE_INT half_word
2087 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
2092 high = (HOST_WIDE_INT) (x / half_word / half_word);
2093 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
2094 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
2096 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
2097 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
2100 low = (HOST_WIDE_INT) x;
2101 if (TREE_REAL_CST (arg1) < 0)
2102 neg_double (low, high, &low, &high);
2103 t = build_int_2 (low, high);
2107 HOST_WIDE_INT low, high;
2108 REAL_VALUE_TO_INT (&low, &high, x);
2109 t = build_int_2 (low, high);
2112 TREE_TYPE (t) = type;
2114 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2115 TREE_CONSTANT_OVERFLOW (t)
2116 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2118 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2119 TREE_TYPE (t) = type;
2121 else if (TREE_CODE (type) == REAL_TYPE)
2123 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2124 if (TREE_CODE (arg1) == INTEGER_CST)
2125 return build_real_from_int_cst (type, arg1);
2126 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2127 if (TREE_CODE (arg1) == REAL_CST)
2129 struct fc_args args;
2131 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
2134 TREE_TYPE (arg1) = type;
2138 /* Setup input for fold_convert_1() */
2142 if (do_float_handler (fold_convert_1, (PTR) &args))
2144 /* Receive output from fold_convert_1() */
2149 /* We got an exception from fold_convert_1() */
2151 t = copy_node (arg1);
2155 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
2156 TREE_CONSTANT_OVERFLOW (t)
2157 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2161 TREE_CONSTANT (t) = 1;
2165 /* Return an expr equal to X but certainly not valid as an lvalue. */
2173 /* These things are certainly not lvalues. */
2174 if (TREE_CODE (x) == NON_LVALUE_EXPR
2175 || TREE_CODE (x) == INTEGER_CST
2176 || TREE_CODE (x) == REAL_CST
2177 || TREE_CODE (x) == STRING_CST
2178 || TREE_CODE (x) == ADDR_EXPR)
2181 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2182 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2186 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2187 Zero means allow extended lvalues. */
2189 int pedantic_lvalues;
2191 /* When pedantic, return an expr equal to X but certainly not valid as a
2192 pedantic lvalue. Otherwise, return X. */
2195 pedantic_non_lvalue (x)
2198 if (pedantic_lvalues)
2199 return non_lvalue (x);
2204 /* Given a tree comparison code, return the code that is the logical inverse
2205 of the given code. It is not safe to do this for floating-point
2206 comparisons, except for NE_EXPR and EQ_EXPR. */
2208 static enum tree_code
2209 invert_tree_comparison (code)
2210 enum tree_code code;
2231 /* Similar, but return the comparison that results if the operands are
2232 swapped. This is safe for floating-point. */
2234 static enum tree_code
2235 swap_tree_comparison (code)
2236 enum tree_code code;
2256 /* Return nonzero if CODE is a tree code that represents a truth value. */
2259 truth_value_p (code)
2260 enum tree_code code;
2262 return (TREE_CODE_CLASS (code) == '<'
2263 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2264 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2265 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2268 /* Return nonzero if two operands are necessarily equal.
2269 If ONLY_CONST is non-zero, only return non-zero for constants.
2270 This function tests whether the operands are indistinguishable;
2271 it does not test whether they are equal using C's == operation.
2272 The distinction is important for IEEE floating point, because
2273 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2274 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
2277 operand_equal_p (arg0, arg1, only_const)
2281 /* If both types don't have the same signedness, then we can't consider
2282 them equal. We must check this before the STRIP_NOPS calls
2283 because they may change the signedness of the arguments. */
2284 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2290 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2291 /* This is needed for conversions and for COMPONENT_REF.
2292 Might as well play it safe and always test this. */
2293 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2294 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2295 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2298 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2299 We don't care about side effects in that case because the SAVE_EXPR
2300 takes care of that for us. In all other cases, two expressions are
2301 equal if they have no side effects. If we have two identical
2302 expressions with side effects that should be treated the same due
2303 to the only side effects being identical SAVE_EXPR's, that will
2304 be detected in the recursive calls below. */
2305 if (arg0 == arg1 && ! only_const
2306 && (TREE_CODE (arg0) == SAVE_EXPR
2307 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2310 /* Next handle constant cases, those for which we can return 1 even
2311 if ONLY_CONST is set. */
2312 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2313 switch (TREE_CODE (arg0))
2316 return (! TREE_CONSTANT_OVERFLOW (arg0)
2317 && ! TREE_CONSTANT_OVERFLOW (arg1)
2318 && tree_int_cst_equal (arg0, arg1));
2321 return (! TREE_CONSTANT_OVERFLOW (arg0)
2322 && ! TREE_CONSTANT_OVERFLOW (arg1)
2323 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2324 TREE_REAL_CST (arg1)));
2327 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2329 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2333 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2334 && ! memcmp (TREE_STRING_POINTER (arg0),
2335 TREE_STRING_POINTER (arg1),
2336 TREE_STRING_LENGTH (arg0)));
2339 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2348 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2351 /* Two conversions are equal only if signedness and modes match. */
2352 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2353 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2354 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2357 return operand_equal_p (TREE_OPERAND (arg0, 0),
2358 TREE_OPERAND (arg1, 0), 0);
2362 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2363 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2367 /* For commutative ops, allow the other order. */
2368 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2369 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2370 || TREE_CODE (arg0) == BIT_IOR_EXPR
2371 || TREE_CODE (arg0) == BIT_XOR_EXPR
2372 || TREE_CODE (arg0) == BIT_AND_EXPR
2373 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2374 && operand_equal_p (TREE_OPERAND (arg0, 0),
2375 TREE_OPERAND (arg1, 1), 0)
2376 && operand_equal_p (TREE_OPERAND (arg0, 1),
2377 TREE_OPERAND (arg1, 0), 0));
2380 /* If either of the pointer (or reference) expressions we are dereferencing
2381 contain a side effect, these cannot be equal. */
2382 if (TREE_SIDE_EFFECTS (arg0)
2383 || TREE_SIDE_EFFECTS (arg1))
2386 switch (TREE_CODE (arg0))
2389 return operand_equal_p (TREE_OPERAND (arg0, 0),
2390 TREE_OPERAND (arg1, 0), 0);
2394 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2395 TREE_OPERAND (arg1, 0), 0)
2396 && operand_equal_p (TREE_OPERAND (arg0, 1),
2397 TREE_OPERAND (arg1, 1), 0));
2400 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2401 TREE_OPERAND (arg1, 0), 0)
2402 && operand_equal_p (TREE_OPERAND (arg0, 1),
2403 TREE_OPERAND (arg1, 1), 0)
2404 && operand_equal_p (TREE_OPERAND (arg0, 2),
2405 TREE_OPERAND (arg1, 2), 0));
2411 if (TREE_CODE (arg0) == RTL_EXPR)
2412 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2420 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2421 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2423 When in doubt, return 0. */
2426 operand_equal_for_comparison_p (arg0, arg1, other)
2430 int unsignedp1, unsignedpo;
2431 tree primarg0, primarg1, primother;
2432 unsigned int correct_width;
2434 if (operand_equal_p (arg0, arg1, 0))
2437 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2438 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2441 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2442 and see if the inner values are the same. This removes any
2443 signedness comparison, which doesn't matter here. */
2444 primarg0 = arg0, primarg1 = arg1;
2445 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
2446 if (operand_equal_p (primarg0, primarg1, 0))
2449 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2450 actual comparison operand, ARG0.
2452 First throw away any conversions to wider types
2453 already present in the operands. */
2455 primarg1 = get_narrower (arg1, &unsignedp1);
2456 primother = get_narrower (other, &unsignedpo);
2458 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2459 if (unsignedp1 == unsignedpo
2460 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2461 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2463 tree type = TREE_TYPE (arg0);
2465 /* Make sure shorter operand is extended the right way
2466 to match the longer operand. */
2467 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2468 TREE_TYPE (primarg1)),
2471 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2478 /* See if ARG is an expression that is either a comparison or is performing
2479 arithmetic on comparisons. The comparisons must only be comparing
2480 two different values, which will be stored in *CVAL1 and *CVAL2; if
2481 they are non-zero it means that some operands have already been found.
2482 No variables may be used anywhere else in the expression except in the
2483 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2484 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2486 If this is true, return 1. Otherwise, return zero. */
2489 twoval_comparison_p (arg, cval1, cval2, save_p)
2491 tree *cval1, *cval2;
2494 enum tree_code code = TREE_CODE (arg);
2495 char class = TREE_CODE_CLASS (code);
2497 /* We can handle some of the 'e' cases here. */
2498 if (class == 'e' && code == TRUTH_NOT_EXPR)
2500 else if (class == 'e'
2501 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2502 || code == COMPOUND_EXPR))
2505 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2506 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2508 /* If we've already found a CVAL1 or CVAL2, this expression is
2509 two complex to handle. */
2510 if (*cval1 || *cval2)
2520 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2523 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2524 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2525 cval1, cval2, save_p));
2531 if (code == COND_EXPR)
2532 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2533 cval1, cval2, save_p)
2534 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2535 cval1, cval2, save_p)
2536 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2537 cval1, cval2, save_p));
2541 /* First see if we can handle the first operand, then the second. For
2542 the second operand, we know *CVAL1 can't be zero. It must be that
2543 one side of the comparison is each of the values; test for the
2544 case where this isn't true by failing if the two operands
2547 if (operand_equal_p (TREE_OPERAND (arg, 0),
2548 TREE_OPERAND (arg, 1), 0))
2552 *cval1 = TREE_OPERAND (arg, 0);
2553 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2555 else if (*cval2 == 0)
2556 *cval2 = TREE_OPERAND (arg, 0);
2557 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2562 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2564 else if (*cval2 == 0)
2565 *cval2 = TREE_OPERAND (arg, 1);
2566 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2578 /* ARG is a tree that is known to contain just arithmetic operations and
2579 comparisons. Evaluate the operations in the tree substituting NEW0 for
2580 any occurrence of OLD0 as an operand of a comparison and likewise for
2584 eval_subst (arg, old0, new0, old1, new1)
2586 tree old0, new0, old1, new1;
2588 tree type = TREE_TYPE (arg);
2589 enum tree_code code = TREE_CODE (arg);
2590 char class = TREE_CODE_CLASS (code);
2592 /* We can handle some of the 'e' cases here. */
2593 if (class == 'e' && code == TRUTH_NOT_EXPR)
2595 else if (class == 'e'
2596 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2602 return fold (build1 (code, type,
2603 eval_subst (TREE_OPERAND (arg, 0),
2604 old0, new0, old1, new1)));
2607 return fold (build (code, type,
2608 eval_subst (TREE_OPERAND (arg, 0),
2609 old0, new0, old1, new1),
2610 eval_subst (TREE_OPERAND (arg, 1),
2611 old0, new0, old1, new1)));
2617 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2620 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2623 return fold (build (code, type,
2624 eval_subst (TREE_OPERAND (arg, 0),
2625 old0, new0, old1, new1),
2626 eval_subst (TREE_OPERAND (arg, 1),
2627 old0, new0, old1, new1),
2628 eval_subst (TREE_OPERAND (arg, 2),
2629 old0, new0, old1, new1)));
2633 /* fall through - ??? */
2637 tree arg0 = TREE_OPERAND (arg, 0);
2638 tree arg1 = TREE_OPERAND (arg, 1);
2640 /* We need to check both for exact equality and tree equality. The
2641 former will be true if the operand has a side-effect. In that
2642 case, we know the operand occurred exactly once. */
2644 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2646 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2649 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2651 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2654 return fold (build (code, type, arg0, arg1));
2662 /* Return a tree for the case when the result of an expression is RESULT
2663 converted to TYPE and OMITTED was previously an operand of the expression
2664 but is now not needed (e.g., we folded OMITTED * 0).
2666 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2667 the conversion of RESULT to TYPE. */
2670 omit_one_operand (type, result, omitted)
2671 tree type, result, omitted;
2673 tree t = convert (type, result);
2675 if (TREE_SIDE_EFFECTS (omitted))
2676 return build (COMPOUND_EXPR, type, omitted, t);
2678 return non_lvalue (t);
2681 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2684 pedantic_omit_one_operand (type, result, omitted)
2685 tree type, result, omitted;
2687 tree t = convert (type, result);
2689 if (TREE_SIDE_EFFECTS (omitted))
2690 return build (COMPOUND_EXPR, type, omitted, t);
2692 return pedantic_non_lvalue (t);
2697 /* Return a simplified tree node for the truth-negation of ARG. This
2698 never alters ARG itself. We assume that ARG is an operation that
2699 returns a truth value (0 or 1). */
2702 invert_truthvalue (arg)
2705 tree type = TREE_TYPE (arg);
2706 enum tree_code code = TREE_CODE (arg);
2708 if (code == ERROR_MARK)
2711 /* If this is a comparison, we can simply invert it, except for
2712 floating-point non-equality comparisons, in which case we just
2713 enclose a TRUTH_NOT_EXPR around what we have. */
2715 if (TREE_CODE_CLASS (code) == '<')
2717 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2718 && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2719 return build1 (TRUTH_NOT_EXPR, type, arg);
2721 return build (invert_tree_comparison (code), type,
2722 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2728 return convert (type, build_int_2 (integer_zerop (arg), 0));
2730 case TRUTH_AND_EXPR:
2731 return build (TRUTH_OR_EXPR, type,
2732 invert_truthvalue (TREE_OPERAND (arg, 0)),
2733 invert_truthvalue (TREE_OPERAND (arg, 1)));
2736 return build (TRUTH_AND_EXPR, type,
2737 invert_truthvalue (TREE_OPERAND (arg, 0)),
2738 invert_truthvalue (TREE_OPERAND (arg, 1)));
2740 case TRUTH_XOR_EXPR:
2741 /* Here we can invert either operand. We invert the first operand
2742 unless the second operand is a TRUTH_NOT_EXPR in which case our
2743 result is the XOR of the first operand with the inside of the
2744 negation of the second operand. */
2746 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2747 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2748 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2750 return build (TRUTH_XOR_EXPR, type,
2751 invert_truthvalue (TREE_OPERAND (arg, 0)),
2752 TREE_OPERAND (arg, 1));
2754 case TRUTH_ANDIF_EXPR:
2755 return build (TRUTH_ORIF_EXPR, type,
2756 invert_truthvalue (TREE_OPERAND (arg, 0)),
2757 invert_truthvalue (TREE_OPERAND (arg, 1)));
2759 case TRUTH_ORIF_EXPR:
2760 return build (TRUTH_ANDIF_EXPR, type,
2761 invert_truthvalue (TREE_OPERAND (arg, 0)),
2762 invert_truthvalue (TREE_OPERAND (arg, 1)));
2764 case TRUTH_NOT_EXPR:
2765 return TREE_OPERAND (arg, 0);
2768 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2769 invert_truthvalue (TREE_OPERAND (arg, 1)),
2770 invert_truthvalue (TREE_OPERAND (arg, 2)));
2773 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2774 invert_truthvalue (TREE_OPERAND (arg, 1)));
2776 case WITH_RECORD_EXPR:
2777 return build (WITH_RECORD_EXPR, type,
2778 invert_truthvalue (TREE_OPERAND (arg, 0)),
2779 TREE_OPERAND (arg, 1));
2781 case NON_LVALUE_EXPR:
2782 return invert_truthvalue (TREE_OPERAND (arg, 0));
2787 return build1 (TREE_CODE (arg), type,
2788 invert_truthvalue (TREE_OPERAND (arg, 0)));
2791 if (!integer_onep (TREE_OPERAND (arg, 1)))
2793 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2796 return build1 (TRUTH_NOT_EXPR, type, arg);
2798 case CLEANUP_POINT_EXPR:
2799 return build1 (CLEANUP_POINT_EXPR, type,
2800 invert_truthvalue (TREE_OPERAND (arg, 0)));
2805 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2807 return build1 (TRUTH_NOT_EXPR, type, arg);
2810 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2811 operands are another bit-wise operation with a common input. If so,
2812 distribute the bit operations to save an operation and possibly two if
2813 constants are involved. For example, convert
2814 (A | B) & (A | C) into A | (B & C)
2815 Further simplification will occur if B and C are constants.
2817 If this optimization cannot be done, 0 will be returned. */
2820 distribute_bit_expr (code, type, arg0, arg1)
2821 enum tree_code code;
2828 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2829 || TREE_CODE (arg0) == code
2830 || (TREE_CODE (arg0) != BIT_AND_EXPR
2831 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2834 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2836 common = TREE_OPERAND (arg0, 0);
2837 left = TREE_OPERAND (arg0, 1);
2838 right = TREE_OPERAND (arg1, 1);
2840 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2842 common = TREE_OPERAND (arg0, 0);
2843 left = TREE_OPERAND (arg0, 1);
2844 right = TREE_OPERAND (arg1, 0);
2846 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2848 common = TREE_OPERAND (arg0, 1);
2849 left = TREE_OPERAND (arg0, 0);
2850 right = TREE_OPERAND (arg1, 1);
2852 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2854 common = TREE_OPERAND (arg0, 1);
2855 left = TREE_OPERAND (arg0, 0);
2856 right = TREE_OPERAND (arg1, 0);
2861 return fold (build (TREE_CODE (arg0), type, common,
2862 fold (build (code, type, left, right))));
2865 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2866 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2869 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2872 int bitsize, bitpos;
2875 tree result = build (BIT_FIELD_REF, type, inner,
2876 size_int (bitsize), bitsize_int (bitpos));
2878 TREE_UNSIGNED (result) = unsignedp;
2883 /* Optimize a bit-field compare.
2885 There are two cases: First is a compare against a constant and the
2886 second is a comparison of two items where the fields are at the same
2887 bit position relative to the start of a chunk (byte, halfword, word)
2888 large enough to contain it. In these cases we can avoid the shift
2889 implicit in bitfield extractions.
2891 For constants, we emit a compare of the shifted constant with the
2892 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2893 compared. For two fields at the same position, we do the ANDs with the
2894 similar mask and compare the result of the ANDs.
2896 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2897 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2898 are the left and right operands of the comparison, respectively.
2900 If the optimization described above can be done, we return the resulting
2901 tree. Otherwise we return zero. */
2904 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2905 enum tree_code code;
2909 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2910 tree type = TREE_TYPE (lhs);
2911 tree signed_type, unsigned_type;
2912 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2913 enum machine_mode lmode, rmode, nmode;
2914 int lunsignedp, runsignedp;
2915 int lvolatilep = 0, rvolatilep = 0;
2916 unsigned int alignment;
2917 tree linner, rinner = NULL_TREE;
2921 /* Get all the information about the extractions being done. If the bit size
2922 if the same as the size of the underlying object, we aren't doing an
2923 extraction at all and so can do nothing. We also don't want to
2924 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2925 then will no longer be able to replace it. */
2926 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2927 &lunsignedp, &lvolatilep, &alignment);
2928 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2929 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2934 /* If this is not a constant, we can only do something if bit positions,
2935 sizes, and signedness are the same. */
2936 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2937 &runsignedp, &rvolatilep, &alignment);
2939 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2940 || lunsignedp != runsignedp || offset != 0
2941 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2945 /* See if we can find a mode to refer to this field. We should be able to,
2946 but fail if we can't. */
2947 nmode = get_best_mode (lbitsize, lbitpos,
2948 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2949 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2950 TYPE_ALIGN (TREE_TYPE (rinner))),
2951 word_mode, lvolatilep || rvolatilep);
2952 if (nmode == VOIDmode)
2955 /* Set signed and unsigned types of the precision of this mode for the
2957 signed_type = type_for_mode (nmode, 0);
2958 unsigned_type = type_for_mode (nmode, 1);
2960 /* Compute the bit position and size for the new reference and our offset
2961 within it. If the new reference is the same size as the original, we
2962 won't optimize anything, so return zero. */
2963 nbitsize = GET_MODE_BITSIZE (nmode);
2964 nbitpos = lbitpos & ~ (nbitsize - 1);
2966 if (nbitsize == lbitsize)
2969 if (BYTES_BIG_ENDIAN)
2970 lbitpos = nbitsize - lbitsize - lbitpos;
2972 /* Make the mask to be used against the extracted field. */
2973 mask = build_int_2 (~0, ~0);
2974 TREE_TYPE (mask) = unsigned_type;
2975 force_fit_type (mask, 0);
2976 mask = convert (unsigned_type, mask);
2977 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2978 mask = const_binop (RSHIFT_EXPR, mask,
2979 size_int (nbitsize - lbitsize - lbitpos), 0);
2982 /* If not comparing with constant, just rework the comparison
2984 return build (code, compare_type,
2985 build (BIT_AND_EXPR, unsigned_type,
2986 make_bit_field_ref (linner, unsigned_type,
2987 nbitsize, nbitpos, 1),
2989 build (BIT_AND_EXPR, unsigned_type,
2990 make_bit_field_ref (rinner, unsigned_type,
2991 nbitsize, nbitpos, 1),
2994 /* Otherwise, we are handling the constant case. See if the constant is too
2995 big for the field. Warn and return a tree of for 0 (false) if so. We do
2996 this not only for its own sake, but to avoid having to test for this
2997 error case below. If we didn't, we might generate wrong code.
2999 For unsigned fields, the constant shifted right by the field length should
3000 be all zero. For signed fields, the high-order bits should agree with
3005 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3006 convert (unsigned_type, rhs),
3007 size_int (lbitsize), 0)))
3009 warning ("comparison is always %d due to width of bitfield",
3011 return convert (compare_type,
3013 ? integer_one_node : integer_zero_node));
3018 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
3019 size_int (lbitsize - 1), 0);
3020 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3022 warning ("comparison is always %d due to width of bitfield",
3024 return convert (compare_type,
3026 ? integer_one_node : integer_zero_node));
3030 /* Single-bit compares should always be against zero. */
3031 if (lbitsize == 1 && ! integer_zerop (rhs))
3033 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3034 rhs = convert (type, integer_zero_node);
3037 /* Make a new bitfield reference, shift the constant over the
3038 appropriate number of bits and mask it with the computed mask
3039 (in case this was a signed field). If we changed it, make a new one. */
3040 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3043 TREE_SIDE_EFFECTS (lhs) = 1;
3044 TREE_THIS_VOLATILE (lhs) = 1;
3047 rhs = fold (const_binop (BIT_AND_EXPR,
3048 const_binop (LSHIFT_EXPR,
3049 convert (unsigned_type, rhs),
3050 size_int (lbitpos), 0),
3053 return build (code, compare_type,
3054 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
3058 /* Subroutine for fold_truthop: decode a field reference.
3060 If EXP is a comparison reference, we return the innermost reference.
3062 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3063 set to the starting bit number.
3065 If the innermost field can be completely contained in a mode-sized
3066 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3068 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3069 otherwise it is not changed.
3071 *PUNSIGNEDP is set to the signedness of the field.
3073 *PMASK is set to the mask used. This is either contained in a
3074 BIT_AND_EXPR or derived from the width of the field.
3076 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3078 Return 0 if this is not a component reference or is one that we can't
3079 do anything with. */
3082 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
3083 pvolatilep, pmask, pand_mask)
3085 HOST_WIDE_INT *pbitsize, *pbitpos;
3086 enum machine_mode *pmode;
3087 int *punsignedp, *pvolatilep;
3092 tree mask, inner, offset;
3094 unsigned int precision;
3095 unsigned int alignment;
3097 /* All the optimizations using this function assume integer fields.
3098 There are problems with FP fields since the type_for_size call
3099 below can fail for, e.g., XFmode. */
3100 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3105 if (TREE_CODE (exp) == BIT_AND_EXPR)
3107 and_mask = TREE_OPERAND (exp, 1);
3108 exp = TREE_OPERAND (exp, 0);
3109 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3110 if (TREE_CODE (and_mask) != INTEGER_CST)
3115 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3116 punsignedp, pvolatilep, &alignment);
3117 if ((inner == exp && and_mask == 0)
3118 || *pbitsize < 0 || offset != 0
3119 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3122 /* Compute the mask to access the bitfield. */
3123 unsigned_type = type_for_size (*pbitsize, 1);
3124 precision = TYPE_PRECISION (unsigned_type);
3126 mask = build_int_2 (~0, ~0);
3127 TREE_TYPE (mask) = unsigned_type;
3128 force_fit_type (mask, 0);
3129 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3130 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3132 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3134 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3135 convert (unsigned_type, and_mask), mask));
3138 *pand_mask = and_mask;
3142 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
3146 all_ones_mask_p (mask, size)
3150 tree type = TREE_TYPE (mask);
3151 unsigned int precision = TYPE_PRECISION (type);
3154 tmask = build_int_2 (~0, ~0);
3155 TREE_TYPE (tmask) = signed_type (type);
3156 force_fit_type (tmask, 0);
3158 tree_int_cst_equal (mask,
3159 const_binop (RSHIFT_EXPR,
3160 const_binop (LSHIFT_EXPR, tmask,
3161 size_int (precision - size),
3163 size_int (precision - size), 0));
3166 /* Subroutine for fold_truthop: determine if an operand is simple enough
3167 to be evaluated unconditionally. */
3170 simple_operand_p (exp)
3173 /* Strip any conversions that don't change the machine mode. */
3174 while ((TREE_CODE (exp) == NOP_EXPR
3175 || TREE_CODE (exp) == CONVERT_EXPR)
3176 && (TYPE_MODE (TREE_TYPE (exp))
3177 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3178 exp = TREE_OPERAND (exp, 0);
3180 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3182 && ! TREE_ADDRESSABLE (exp)
3183 && ! TREE_THIS_VOLATILE (exp)
3184 && ! DECL_NONLOCAL (exp)
3185 /* Don't regard global variables as simple. They may be
3186 allocated in ways unknown to the compiler (shared memory,
3187 #pragma weak, etc). */
3188 && ! TREE_PUBLIC (exp)
3189 && ! DECL_EXTERNAL (exp)
3190 /* Loading a static variable is unduly expensive, but global
3191 registers aren't expensive. */
3192 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3195 /* The following functions are subroutines to fold_range_test and allow it to
3196 try to change a logical combination of comparisons into a range test.
3199 X == 2 && X == 3 && X == 4 && X == 5
3203 (unsigned) (X - 2) <= 3
3205 We describe each set of comparisons as being either inside or outside
3206 a range, using a variable named like IN_P, and then describe the
3207 range with a lower and upper bound. If one of the bounds is omitted,
3208 it represents either the highest or lowest value of the type.
3210 In the comments below, we represent a range by two numbers in brackets
3211 preceded by a "+" to designate being inside that range, or a "-" to
3212 designate being outside that range, so the condition can be inverted by
3213 flipping the prefix. An omitted bound is represented by a "-". For
3214 example, "- [-, 10]" means being outside the range starting at the lowest
3215 possible value and ending at 10, in other words, being greater than 10.
3216 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3219 We set up things so that the missing bounds are handled in a consistent
3220 manner so neither a missing bound nor "true" and "false" need to be
3221 handled using a special case. */
3223 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3224 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3225 and UPPER1_P are nonzero if the respective argument is an upper bound
3226 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3227 must be specified for a comparison. ARG1 will be converted to ARG0's
3228 type if both are specified. */
3231 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3232 enum tree_code code;
3235 int upper0_p, upper1_p;
3241 /* If neither arg represents infinity, do the normal operation.
3242 Else, if not a comparison, return infinity. Else handle the special
3243 comparison rules. Note that most of the cases below won't occur, but
3244 are handled for consistency. */
3246 if (arg0 != 0 && arg1 != 0)
3248 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3249 arg0, convert (TREE_TYPE (arg0), arg1)));
3251 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3254 if (TREE_CODE_CLASS (code) != '<')
3257 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3258 for neither. In real maths, we cannot assume open ended ranges are
3259 the same. But, this is computer arithmetic, where numbers are finite.
3260 We can therefore make the transformation of any unbounded range with
3261 the value Z, Z being greater than any representable number. This permits
3262 us to treat unbounded ranges as equal. */
3263 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3264 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3268 result = sgn0 == sgn1;
3271 result = sgn0 != sgn1;
3274 result = sgn0 < sgn1;
3277 result = sgn0 <= sgn1;
3280 result = sgn0 > sgn1;
3283 result = sgn0 >= sgn1;
3289 return convert (type, result ? integer_one_node : integer_zero_node);
3292 /* Given EXP, a logical expression, set the range it is testing into
3293 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3294 actually being tested. *PLOW and *PHIGH will have be made the same type
3295 as the returned expression. If EXP is not a comparison, we will most
3296 likely not be returning a useful value and range. */
3299 make_range (exp, pin_p, plow, phigh)
3304 enum tree_code code;
3305 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3306 tree orig_type = NULL_TREE;
3308 tree low, high, n_low, n_high;
3310 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3311 and see if we can refine the range. Some of the cases below may not
3312 happen, but it doesn't seem worth worrying about this. We "continue"
3313 the outer loop when we've changed something; otherwise we "break"
3314 the switch, which will "break" the while. */
3316 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3320 code = TREE_CODE (exp);
3322 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3324 arg0 = TREE_OPERAND (exp, 0);
3325 if (TREE_CODE_CLASS (code) == '<'
3326 || TREE_CODE_CLASS (code) == '1'
3327 || TREE_CODE_CLASS (code) == '2')
3328 type = TREE_TYPE (arg0);
3329 if (TREE_CODE_CLASS (code) == '2'
3330 || TREE_CODE_CLASS (code) == '<'
3331 || (TREE_CODE_CLASS (code) == 'e'
3332 && tree_code_length[(int) code] > 1))
3333 arg1 = TREE_OPERAND (exp, 1);
3336 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3337 lose a cast by accident. */
3338 if (type != NULL_TREE && orig_type == NULL_TREE)
3343 case TRUTH_NOT_EXPR:
3344 in_p = ! in_p, exp = arg0;
3347 case EQ_EXPR: case NE_EXPR:
3348 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3349 /* We can only do something if the range is testing for zero
3350 and if the second operand is an integer constant. Note that
3351 saying something is "in" the range we make is done by
3352 complementing IN_P since it will set in the initial case of
3353 being not equal to zero; "out" is leaving it alone. */
3354 if (low == 0 || high == 0
3355 || ! integer_zerop (low) || ! integer_zerop (high)
3356 || TREE_CODE (arg1) != INTEGER_CST)
3361 case NE_EXPR: /* - [c, c] */
3364 case EQ_EXPR: /* + [c, c] */
3365 in_p = ! in_p, low = high = arg1;
3367 case GT_EXPR: /* - [-, c] */
3368 low = 0, high = arg1;
3370 case GE_EXPR: /* + [c, -] */
3371 in_p = ! in_p, low = arg1, high = 0;
3373 case LT_EXPR: /* - [c, -] */
3374 low = arg1, high = 0;
3376 case LE_EXPR: /* + [-, c] */
3377 in_p = ! in_p, low = 0, high = arg1;
3385 /* If this is an unsigned comparison, we also know that EXP is
3386 greater than or equal to zero. We base the range tests we make
3387 on that fact, so we record it here so we can parse existing
3389 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3391 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3392 1, convert (type, integer_zero_node),
3396 in_p = n_in_p, low = n_low, high = n_high;
3398 /* If the high bound is missing, but we
3399 have a low bound, reverse the range so
3400 it goes from zero to the low bound minus 1. */
3401 if (high == 0 && low)
3404 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3405 integer_one_node, 0);
3406 low = convert (type, integer_zero_node);
3412 /* (-x) IN [a,b] -> x in [-b, -a] */
3413 n_low = range_binop (MINUS_EXPR, type,
3414 convert (type, integer_zero_node), 0, high, 1);
3415 n_high = range_binop (MINUS_EXPR, type,
3416 convert (type, integer_zero_node), 0, low, 0);
3417 low = n_low, high = n_high;
3423 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3424 convert (type, integer_one_node));
3427 case PLUS_EXPR: case MINUS_EXPR:
3428 if (TREE_CODE (arg1) != INTEGER_CST)
3431 /* If EXP is signed, any overflow in the computation is undefined,
3432 so we don't worry about it so long as our computations on
3433 the bounds don't overflow. For unsigned, overflow is defined
3434 and this is exactly the right thing. */
3435 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3436 type, low, 0, arg1, 0);
3437 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3438 type, high, 1, arg1, 0);
3439 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3440 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3443 /* Check for an unsigned range which has wrapped around the maximum
3444 value thus making n_high < n_low, and normalize it. */
3445 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3447 low = range_binop (PLUS_EXPR, type, n_high, 0,
3448 integer_one_node, 0);
3449 high = range_binop (MINUS_EXPR, type, n_low, 0,
3450 integer_one_node, 0);
3454 low = n_low, high = n_high;
3459 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3460 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3463 if (! INTEGRAL_TYPE_P (type)
3464 || (low != 0 && ! int_fits_type_p (low, type))
3465 || (high != 0 && ! int_fits_type_p (high, type)))
3468 n_low = low, n_high = high;
3471 n_low = convert (type, n_low);
3474 n_high = convert (type, n_high);
3476 /* If we're converting from an unsigned to a signed type,
3477 we will be doing the comparison as unsigned. The tests above
3478 have already verified that LOW and HIGH are both positive.
3480 So we have to make sure that the original unsigned value will
3481 be interpreted as positive. */
3482 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3484 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3487 /* A range without an upper bound is, naturally, unbounded.
3488 Since convert would have cropped a very large value, use
3489 the max value for the destination type. */
3491 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3492 : TYPE_MAX_VALUE (type);
3494 high_positive = fold (build (RSHIFT_EXPR, type,
3495 convert (type, high_positive),
3496 convert (type, integer_one_node)));
3498 /* If the low bound is specified, "and" the range with the
3499 range for which the original unsigned value will be
3503 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3505 1, convert (type, integer_zero_node),
3509 in_p = (n_in_p == in_p);
3513 /* Otherwise, "or" the range with the range of the input
3514 that will be interpreted as negative. */
3515 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3517 1, convert (type, integer_zero_node),
3521 in_p = (in_p != n_in_p);
3526 low = n_low, high = n_high;
3536 /* If EXP is a constant, we can evaluate whether this is true or false. */
3537 if (TREE_CODE (exp) == INTEGER_CST)
3539 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3541 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3547 *pin_p = in_p, *plow = low, *phigh = high;
3551 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3552 type, TYPE, return an expression to test if EXP is in (or out of, depending
3553 on IN_P) the range. */
3556 build_range_check (type, exp, in_p, low, high)
3562 tree etype = TREE_TYPE (exp);
3566 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3567 return invert_truthvalue (value);
3569 else if (low == 0 && high == 0)
3570 return convert (type, integer_one_node);
3573 return fold (build (LE_EXPR, type, exp, high));
3576 return fold (build (GE_EXPR, type, exp, low));
3578 else if (operand_equal_p (low, high, 0))
3579 return fold (build (EQ_EXPR, type, exp, low));
3581 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3582 return build_range_check (type, exp, 1, 0, high);
3584 else if (integer_zerop (low))
3586 utype = unsigned_type (etype);
3587 return build_range_check (type, convert (utype, exp), 1, 0,
3588 convert (utype, high));
3591 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3592 && ! TREE_OVERFLOW (value))
3593 return build_range_check (type,
3594 fold (build (MINUS_EXPR, etype, exp, low)),
3595 1, convert (etype, integer_zero_node), value);
3600 /* Given two ranges, see if we can merge them into one. Return 1 if we
3601 can, 0 if we can't. Set the output range into the specified parameters. */
3604 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3608 tree low0, high0, low1, high1;
3616 int lowequal = ((low0 == 0 && low1 == 0)
3617 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3618 low0, 0, low1, 0)));
3619 int highequal = ((high0 == 0 && high1 == 0)
3620 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3621 high0, 1, high1, 1)));
3623 /* Make range 0 be the range that starts first, or ends last if they
3624 start at the same value. Swap them if it isn't. */
3625 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3628 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3629 high1, 1, high0, 1))))
3631 temp = in0_p, in0_p = in1_p, in1_p = temp;
3632 tem = low0, low0 = low1, low1 = tem;
3633 tem = high0, high0 = high1, high1 = tem;
3636 /* Now flag two cases, whether the ranges are disjoint or whether the
3637 second range is totally subsumed in the first. Note that the tests
3638 below are simplified by the ones above. */
3639 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3640 high0, 1, low1, 0));
3641 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3642 high1, 1, high0, 1));
3644 /* We now have four cases, depending on whether we are including or
3645 excluding the two ranges. */
3648 /* If they don't overlap, the result is false. If the second range
3649 is a subset it is the result. Otherwise, the range is from the start
3650 of the second to the end of the first. */
3652 in_p = 0, low = high = 0;
3654 in_p = 1, low = low1, high = high1;
3656 in_p = 1, low = low1, high = high0;
3659 else if (in0_p && ! in1_p)
3661 /* If they don't overlap, the result is the first range. If they are
3662 equal, the result is false. If the second range is a subset of the
3663 first, and the ranges begin at the same place, we go from just after
3664 the end of the first range to the end of the second. If the second
3665 range is not a subset of the first, or if it is a subset and both
3666 ranges end at the same place, the range starts at the start of the
3667 first range and ends just before the second range.
3668 Otherwise, we can't describe this as a single range. */
3670 in_p = 1, low = low0, high = high0;
3671 else if (lowequal && highequal)
3672 in_p = 0, low = high = 0;
3673 else if (subset && lowequal)
3675 in_p = 1, high = high0;
3676 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3677 integer_one_node, 0);
3679 else if (! subset || highequal)
3681 in_p = 1, low = low0;
3682 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3683 integer_one_node, 0);
3689 else if (! in0_p && in1_p)
3691 /* If they don't overlap, the result is the second range. If the second
3692 is a subset of the first, the result is false. Otherwise,
3693 the range starts just after the first range and ends at the
3694 end of the second. */
3696 in_p = 1, low = low1, high = high1;
3697 else if (subset || highequal)
3698 in_p = 0, low = high = 0;
3701 in_p = 1, high = high1;
3702 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3703 integer_one_node, 0);
3709 /* The case where we are excluding both ranges. Here the complex case
3710 is if they don't overlap. In that case, the only time we have a
3711 range is if they are adjacent. If the second is a subset of the
3712 first, the result is the first. Otherwise, the range to exclude
3713 starts at the beginning of the first range and ends at the end of the
3717 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3718 range_binop (PLUS_EXPR, NULL_TREE,
3720 integer_one_node, 1),
3722 in_p = 0, low = low0, high = high1;
3727 in_p = 0, low = low0, high = high0;
3729 in_p = 0, low = low0, high = high1;
3732 *pin_p = in_p, *plow = low, *phigh = high;
3736 /* EXP is some logical combination of boolean tests. See if we can
3737 merge it into some range test. Return the new tree if so. */
3740 fold_range_test (exp)
3743 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3744 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3745 int in0_p, in1_p, in_p;
3746 tree low0, low1, low, high0, high1, high;
3747 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3748 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3751 /* If this is an OR operation, invert both sides; we will invert
3752 again at the end. */
3754 in0_p = ! in0_p, in1_p = ! in1_p;
3756 /* If both expressions are the same, if we can merge the ranges, and we
3757 can build the range test, return it or it inverted. If one of the
3758 ranges is always true or always false, consider it to be the same
3759 expression as the other. */
3760 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3761 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3763 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3765 : rhs != 0 ? rhs : integer_zero_node,
3767 return or_op ? invert_truthvalue (tem) : tem;
3769 /* On machines where the branch cost is expensive, if this is a
3770 short-circuited branch and the underlying object on both sides
3771 is the same, make a non-short-circuit operation. */
3772 else if (BRANCH_COST >= 2
3773 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3774 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3775 && operand_equal_p (lhs, rhs, 0))
3777 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3778 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3779 which cases we can't do this. */
3780 if (simple_operand_p (lhs))
3781 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3782 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3783 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3784 TREE_OPERAND (exp, 1));
3786 else if (global_bindings_p () == 0
3787 && ! contains_placeholder_p (lhs))
3789 tree common = save_expr (lhs);
3791 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3792 or_op ? ! in0_p : in0_p,
3794 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3795 or_op ? ! in1_p : in1_p,
3797 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3798 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3799 TREE_TYPE (exp), lhs, rhs);
3806 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3807 bit value. Arrange things so the extra bits will be set to zero if and
3808 only if C is signed-extended to its full width. If MASK is nonzero,
3809 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3812 unextend (c, p, unsignedp, mask)
3818 tree type = TREE_TYPE (c);
3819 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3822 if (p == modesize || unsignedp)
3825 /* We work by getting just the sign bit into the low-order bit, then
3826 into the high-order bit, then sign-extend. We then XOR that value
3828 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3829 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3831 /* We must use a signed type in order to get an arithmetic right shift.
3832 However, we must also avoid introducing accidental overflows, so that
3833 a subsequent call to integer_zerop will work. Hence we must
3834 do the type conversion here. At this point, the constant is either
3835 zero or one, and the conversion to a signed type can never overflow.
3836 We could get an overflow if this conversion is done anywhere else. */
3837 if (TREE_UNSIGNED (type))
3838 temp = convert (signed_type (type), temp);
3840 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3841 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3843 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3844 /* If necessary, convert the type back to match the type of C. */
3845 if (TREE_UNSIGNED (type))
3846 temp = convert (type, temp);
3848 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3851 /* Find ways of folding logical expressions of LHS and RHS:
3852 Try to merge two comparisons to the same innermost item.
3853 Look for range tests like "ch >= '0' && ch <= '9'".
3854 Look for combinations of simple terms on machines with expensive branches
3855 and evaluate the RHS unconditionally.
3857 For example, if we have p->a == 2 && p->b == 4 and we can make an
3858 object large enough to span both A and B, we can do this with a comparison
3859 against the object ANDed with the a mask.
3861 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3862 operations to do this with one comparison.
3864 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3865 function and the one above.
3867 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3868 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3870 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3873 We return the simplified tree or 0 if no optimization is possible. */
3876 fold_truthop (code, truth_type, lhs, rhs)
3877 enum tree_code code;
3878 tree truth_type, lhs, rhs;
3880 /* If this is the "or" of two comparisons, we can do something if we
3881 the comparisons are NE_EXPR. If this is the "and", we can do something
3882 if the comparisons are EQ_EXPR. I.e.,
3883 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3885 WANTED_CODE is this operation code. For single bit fields, we can
3886 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3887 comparison for one-bit fields. */
3889 enum tree_code wanted_code;
3890 enum tree_code lcode, rcode;
3891 tree ll_arg, lr_arg, rl_arg, rr_arg;
3892 tree ll_inner, lr_inner, rl_inner, rr_inner;
3893 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3894 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3895 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3896 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3897 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3898 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3899 enum machine_mode lnmode, rnmode;
3900 tree ll_mask, lr_mask, rl_mask, rr_mask;
3901 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3902 tree l_const, r_const;
3903 tree lntype, rntype, result;
3904 int first_bit, end_bit;
3907 /* Start by getting the comparison codes. Fail if anything is volatile.
3908 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3909 it were surrounded with a NE_EXPR. */
3911 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3914 lcode = TREE_CODE (lhs);
3915 rcode = TREE_CODE (rhs);
3917 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3918 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3920 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3921 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3923 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3926 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3927 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3929 ll_arg = TREE_OPERAND (lhs, 0);
3930 lr_arg = TREE_OPERAND (lhs, 1);
3931 rl_arg = TREE_OPERAND (rhs, 0);
3932 rr_arg = TREE_OPERAND (rhs, 1);
3934 /* If the RHS can be evaluated unconditionally and its operands are
3935 simple, it wins to evaluate the RHS unconditionally on machines
3936 with expensive branches. In this case, this isn't a comparison
3937 that can be merged. Avoid doing this if the RHS is a floating-point
3938 comparison since those can trap. */
3940 if (BRANCH_COST >= 2
3941 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3942 && simple_operand_p (rl_arg)
3943 && simple_operand_p (rr_arg))
3944 return build (code, truth_type, lhs, rhs);
3946 /* See if the comparisons can be merged. Then get all the parameters for
3949 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3950 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3954 ll_inner = decode_field_reference (ll_arg,
3955 &ll_bitsize, &ll_bitpos, &ll_mode,
3956 &ll_unsignedp, &volatilep, &ll_mask,
3958 lr_inner = decode_field_reference (lr_arg,
3959 &lr_bitsize, &lr_bitpos, &lr_mode,
3960 &lr_unsignedp, &volatilep, &lr_mask,
3962 rl_inner = decode_field_reference (rl_arg,
3963 &rl_bitsize, &rl_bitpos, &rl_mode,
3964 &rl_unsignedp, &volatilep, &rl_mask,
3966 rr_inner = decode_field_reference (rr_arg,
3967 &rr_bitsize, &rr_bitpos, &rr_mode,
3968 &rr_unsignedp, &volatilep, &rr_mask,
3971 /* It must be true that the inner operation on the lhs of each
3972 comparison must be the same if we are to be able to do anything.
3973 Then see if we have constants. If not, the same must be true for
3975 if (volatilep || ll_inner == 0 || rl_inner == 0
3976 || ! operand_equal_p (ll_inner, rl_inner, 0))
3979 if (TREE_CODE (lr_arg) == INTEGER_CST
3980 && TREE_CODE (rr_arg) == INTEGER_CST)
3981 l_const = lr_arg, r_const = rr_arg;
3982 else if (lr_inner == 0 || rr_inner == 0
3983 || ! operand_equal_p (lr_inner, rr_inner, 0))
3986 l_const = r_const = 0;
3988 /* If either comparison code is not correct for our logical operation,
3989 fail. However, we can convert a one-bit comparison against zero into
3990 the opposite comparison against that bit being set in the field. */
3992 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3993 if (lcode != wanted_code)
3995 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3997 /* Make the left operand unsigned, since we are only interested
3998 in the value of one bit. Otherwise we are doing the wrong
4007 /* This is analogous to the code for l_const above. */
4008 if (rcode != wanted_code)
4010 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4019 /* See if we can find a mode that contains both fields being compared on
4020 the left. If we can't, fail. Otherwise, update all constants and masks
4021 to be relative to a field of that size. */
4022 first_bit = MIN (ll_bitpos, rl_bitpos);
4023 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4024 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4025 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4027 if (lnmode == VOIDmode)
4030 lnbitsize = GET_MODE_BITSIZE (lnmode);
4031 lnbitpos = first_bit & ~ (lnbitsize - 1);
4032 lntype = type_for_size (lnbitsize, 1);
4033 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4035 if (BYTES_BIG_ENDIAN)
4037 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4038 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4041 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
4042 size_int (xll_bitpos), 0);
4043 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
4044 size_int (xrl_bitpos), 0);
4048 l_const = convert (lntype, l_const);
4049 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4050 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4051 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4052 fold (build1 (BIT_NOT_EXPR,
4056 warning ("comparison is always %d", wanted_code == NE_EXPR);
4058 return convert (truth_type,
4059 wanted_code == NE_EXPR
4060 ? integer_one_node : integer_zero_node);
4065 r_const = convert (lntype, r_const);
4066 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4067 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4068 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4069 fold (build1 (BIT_NOT_EXPR,
4073 warning ("comparison is always %d", wanted_code == NE_EXPR);
4075 return convert (truth_type,
4076 wanted_code == NE_EXPR
4077 ? integer_one_node : integer_zero_node);
4081 /* If the right sides are not constant, do the same for it. Also,
4082 disallow this optimization if a size or signedness mismatch occurs
4083 between the left and right sides. */
4086 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4087 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4088 /* Make sure the two fields on the right
4089 correspond to the left without being swapped. */
4090 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4093 first_bit = MIN (lr_bitpos, rr_bitpos);
4094 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4095 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4096 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4098 if (rnmode == VOIDmode)
4101 rnbitsize = GET_MODE_BITSIZE (rnmode);
4102 rnbitpos = first_bit & ~ (rnbitsize - 1);
4103 rntype = type_for_size (rnbitsize, 1);
4104 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4106 if (BYTES_BIG_ENDIAN)
4108 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4109 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4112 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4113 size_int (xlr_bitpos), 0);
4114 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4115 size_int (xrr_bitpos), 0);
4117 /* Make a mask that corresponds to both fields being compared.
4118 Do this for both items being compared. If the operands are the
4119 same size and the bits being compared are in the same position
4120 then we can do this by masking both and comparing the masked
4122 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4123 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4124 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4126 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4127 ll_unsignedp || rl_unsignedp);
4128 if (! all_ones_mask_p (ll_mask, lnbitsize))
4129 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4131 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4132 lr_unsignedp || rr_unsignedp);
4133 if (! all_ones_mask_p (lr_mask, rnbitsize))
4134 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4136 return build (wanted_code, truth_type, lhs, rhs);
4139 /* There is still another way we can do something: If both pairs of
4140 fields being compared are adjacent, we may be able to make a wider
4141 field containing them both.
4143 Note that we still must mask the lhs/rhs expressions. Furthermore,
4144 the mask must be shifted to account for the shift done by
4145 make_bit_field_ref. */
4146 if ((ll_bitsize + ll_bitpos == rl_bitpos
4147 && lr_bitsize + lr_bitpos == rr_bitpos)
4148 || (ll_bitpos == rl_bitpos + rl_bitsize
4149 && lr_bitpos == rr_bitpos + rr_bitsize))
4153 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4154 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4155 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4156 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4158 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4159 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4160 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4161 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4163 /* Convert to the smaller type before masking out unwanted bits. */
4165 if (lntype != rntype)
4167 if (lnbitsize > rnbitsize)
4169 lhs = convert (rntype, lhs);
4170 ll_mask = convert (rntype, ll_mask);
4173 else if (lnbitsize < rnbitsize)
4175 rhs = convert (lntype, rhs);
4176 lr_mask = convert (lntype, lr_mask);
4181 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4182 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4184 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4185 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4187 return build (wanted_code, truth_type, lhs, rhs);
4193 /* Handle the case of comparisons with constants. If there is something in
4194 common between the masks, those bits of the constants must be the same.
4195 If not, the condition is always false. Test for this to avoid generating
4196 incorrect code below. */
4197 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4198 if (! integer_zerop (result)
4199 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4200 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4202 if (wanted_code == NE_EXPR)
4204 warning ("`or' of unmatched not-equal tests is always 1");
4205 return convert (truth_type, integer_one_node);
4209 warning ("`and' of mutually exclusive equal-tests is always 0");
4210 return convert (truth_type, integer_zero_node);
4214 /* Construct the expression we will return. First get the component
4215 reference we will make. Unless the mask is all ones the width of
4216 that field, perform the mask operation. Then compare with the
4218 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4219 ll_unsignedp || rl_unsignedp);
4221 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4222 if (! all_ones_mask_p (ll_mask, lnbitsize))
4223 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4225 return build (wanted_code, truth_type, result,
4226 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4229 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4233 optimize_minmax_comparison (t)
4236 tree type = TREE_TYPE (t);
4237 tree arg0 = TREE_OPERAND (t, 0);
4238 enum tree_code op_code;
4239 tree comp_const = TREE_OPERAND (t, 1);
4241 int consts_equal, consts_lt;
4244 STRIP_SIGN_NOPS (arg0);
4246 op_code = TREE_CODE (arg0);
4247 minmax_const = TREE_OPERAND (arg0, 1);
4248 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4249 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4250 inner = TREE_OPERAND (arg0, 0);
4252 /* If something does not permit us to optimize, return the original tree. */
4253 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4254 || TREE_CODE (comp_const) != INTEGER_CST
4255 || TREE_CONSTANT_OVERFLOW (comp_const)
4256 || TREE_CODE (minmax_const) != INTEGER_CST
4257 || TREE_CONSTANT_OVERFLOW (minmax_const))
4260 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4261 and GT_EXPR, doing the rest with recursive calls using logical
4263 switch (TREE_CODE (t))
4265 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4267 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4271 fold (build (TRUTH_ORIF_EXPR, type,
4272 optimize_minmax_comparison
4273 (build (EQ_EXPR, type, arg0, comp_const)),
4274 optimize_minmax_comparison
4275 (build (GT_EXPR, type, arg0, comp_const))));
4278 if (op_code == MAX_EXPR && consts_equal)
4279 /* MAX (X, 0) == 0 -> X <= 0 */
4280 return fold (build (LE_EXPR, type, inner, comp_const));
4282 else if (op_code == MAX_EXPR && consts_lt)
4283 /* MAX (X, 0) == 5 -> X == 5 */
4284 return fold (build (EQ_EXPR, type, inner, comp_const));
4286 else if (op_code == MAX_EXPR)
4287 /* MAX (X, 0) == -1 -> false */
4288 return omit_one_operand (type, integer_zero_node, inner);
4290 else if (consts_equal)
4291 /* MIN (X, 0) == 0 -> X >= 0 */
4292 return fold (build (GE_EXPR, type, inner, comp_const));
4295 /* MIN (X, 0) == 5 -> false */
4296 return omit_one_operand (type, integer_zero_node, inner);
4299 /* MIN (X, 0) == -1 -> X == -1 */
4300 return fold (build (EQ_EXPR, type, inner, comp_const));
4303 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4304 /* MAX (X, 0) > 0 -> X > 0
4305 MAX (X, 0) > 5 -> X > 5 */
4306 return fold (build (GT_EXPR, type, inner, comp_const));
4308 else if (op_code == MAX_EXPR)
4309 /* MAX (X, 0) > -1 -> true */
4310 return omit_one_operand (type, integer_one_node, inner);
4312 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4313 /* MIN (X, 0) > 0 -> false
4314 MIN (X, 0) > 5 -> false */
4315 return omit_one_operand (type, integer_zero_node, inner);
4318 /* MIN (X, 0) > -1 -> X > -1 */
4319 return fold (build (GT_EXPR, type, inner, comp_const));
4326 /* T is an integer expression that is being multiplied, divided, or taken a
4327 modulus (CODE says which and what kind of divide or modulus) by a
4328 constant C. See if we can eliminate that operation by folding it with
4329 other operations already in T. WIDE_TYPE, if non-null, is a type that
4330 should be used for the computation if wider than our type.
4332 For example, if we are dividing (X * 8) + (Y + 16) by 4, we can return
4333 (X * 2) + (Y + 4). We must, however, be assured that either the original
4334 expression would not overflow or that overflow is undefined for the type
4335 in the language in question.
4337 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4338 the machine has a multiply-accumulate insn or that this is part of an
4339 addressing calculation.
4341 If we return a non-null expression, it is an equivalent form of the
4342 original computation, but need not be in the original type. */
4345 extract_muldiv (t, c, code, wide_type)
4348 enum tree_code code;
4351 tree type = TREE_TYPE (t);
4352 enum tree_code tcode = TREE_CODE (t);
4353 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4354 > GET_MODE_SIZE (TYPE_MODE (type)))
4355 ? wide_type : type);
4357 int same_p = tcode == code;
4358 tree op0 = NULL_TREE, op1 = NULL_TREE;
4360 /* Don't deal with constants of zero here; they confuse the code below. */
4361 if (integer_zerop (c))
4364 if (TREE_CODE_CLASS (tcode) == '1')
4365 op0 = TREE_OPERAND (t, 0);
4367 if (TREE_CODE_CLASS (tcode) == '2')
4368 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4370 /* Note that we need not handle conditional operations here since fold
4371 already handles those cases. So just do arithmetic here. */
4375 /* For a constant, we can always simplify if we are a multiply
4376 or (for divide and modulus) if it is a multiple of our constant. */
4377 if (code == MULT_EXPR
4378 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4379 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4382 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4383 /* Pass the constant down and see if we can make a simplification. If
4384 we can, replace this expression with the inner simplification for
4385 possible later conversion to our or some other type. */
4386 if (0 != (t1 = extract_muldiv (op0, convert (TREE_TYPE (op0), c), code,
4387 code == MULT_EXPR ? ctype : NULL_TREE)))
4391 case NEGATE_EXPR: case ABS_EXPR:
4392 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4393 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4396 case MIN_EXPR: case MAX_EXPR:
4397 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4398 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4399 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4401 if (tree_int_cst_sgn (c) < 0)
4402 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4404 return fold (build (tcode, ctype, convert (ctype, t1),
4405 convert (ctype, t2)));
4409 case WITH_RECORD_EXPR:
4410 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4411 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4412 TREE_OPERAND (t, 1));
4416 /* If this has not been evaluated and the operand has no side effects,
4417 we can see if we can do something inside it and make a new one.
4418 Note that this test is overly conservative since we can do this
4419 if the only reason it had side effects is that it was another
4420 similar SAVE_EXPR, but that isn't worth bothering with. */
4421 if (SAVE_EXPR_RTL (t) == 0 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0))
4422 && 0 != (t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code,
4424 return save_expr (t1);
4427 case LSHIFT_EXPR: case RSHIFT_EXPR:
4428 /* If the second operand is constant, this is a multiplication
4429 or floor division, by a power of two, so we can treat it that
4430 way unless the multiplier or divisor overflows. */
4431 if (TREE_CODE (op1) == INTEGER_CST
4432 && 0 != (t1 = convert (ctype,
4433 const_binop (LSHIFT_EXPR, size_one_node,
4435 && ! TREE_OVERFLOW (t1))
4436 return extract_muldiv (build (tcode == LSHIFT_EXPR
4437 ? MULT_EXPR : FLOOR_DIV_EXPR,
4438 ctype, convert (ctype, op0), t1),
4439 c, code, wide_type);
4442 case PLUS_EXPR: case MINUS_EXPR:
4443 /* See if we can eliminate the operation on both sides. If we can, we
4444 can return a new PLUS or MINUS. If we can't, the only remaining
4445 cases where we can do anything are if the second operand is a
4447 t1 = extract_muldiv (op0, c, code, wide_type);
4448 t2 = extract_muldiv (op1, c, code, wide_type);
4449 if (t1 != 0 && t2 != 0)
4450 return fold (build (tcode, ctype, convert (ctype, t1),
4451 convert (ctype, t2)));
4453 /* If this was a subtraction, negate OP1 and set it to be an addition.
4454 This simplifies the logic below. */
4455 if (tcode == MINUS_EXPR)
4456 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4458 if (TREE_CODE (op1) != INTEGER_CST)
4461 /* If either OP1 or C are negative, this optimization is not safe for
4462 some of the division and remainder types while for others we need
4463 to change the code. */
4464 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4466 if (code == CEIL_DIV_EXPR)
4467 code = FLOOR_DIV_EXPR;
4468 else if (code == CEIL_MOD_EXPR)
4469 code = FLOOR_MOD_EXPR;
4470 else if (code == FLOOR_DIV_EXPR)
4471 code = CEIL_DIV_EXPR;
4472 else if (code == FLOOR_MOD_EXPR)
4473 code = CEIL_MOD_EXPR;
4474 else if (code != MULT_EXPR)
4478 /* Now do the operation and verify it doesn't overflow. */
4479 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4480 if (op1 == 0 || TREE_OVERFLOW (op1))
4483 /* If we were able to eliminate our operation from the first side,
4484 apply our operation to the second side and reform the PLUS. */
4485 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4486 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4488 /* The last case is if we are a multiply. In that case, we can
4489 apply the distributive law to commute the multiply and addition
4490 if the multiplication of the constants doesn't overflow. */
4491 if (code == MULT_EXPR)
4492 return fold (build (tcode, ctype, fold (build (code, ctype,
4493 convert (ctype, op0),
4494 convert (ctype, c))),
4500 /* We have a special case here if we are doing something like
4501 (C * 8) % 4 since we know that's zero. */
4502 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4503 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4504 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4505 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4506 return omit_one_operand (type, integer_zero_node, op0);
4508 /* ... fall through ... */
4510 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4511 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4512 /* If we can extract our operation from the LHS, do so and return a
4513 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4514 do something only if the second operand is a constant. */
4516 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4517 return fold (build (tcode, ctype, convert (ctype, t1),
4518 convert (ctype, op1)));
4519 else if (tcode == MULT_EXPR && code == MULT_EXPR
4520 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4521 return fold (build (tcode, ctype, convert (ctype, op0),
4522 convert (ctype, t1)));
4523 else if (TREE_CODE (op1) != INTEGER_CST)
4526 /* If these are the same operation types, we can associate them
4527 assuming no overflow. */
4529 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4530 convert (ctype, c), 0))
4531 && ! TREE_OVERFLOW (t1))
4532 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4534 /* If these operations "cancel" each other, we have the main
4535 optimizations of this pass, which occur when either constant is a
4536 multiple of the other, in which case we replace this with either an
4537 operation or CODE or TCODE.
4539 If we have an unsigned type that is not a sizetype, we canot do
4540 this since it will change the result if the original computation
4542 if ((! TREE_UNSIGNED (ctype)
4543 || (TREE_CODE (ctype) == INTEGER_TYPE
4544 && TYPE_IS_SIZETYPE (ctype)))
4545 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4546 || (tcode == MULT_EXPR
4547 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4548 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4550 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4551 return fold (build (tcode, ctype, convert (ctype, op0),
4553 const_binop (TRUNC_DIV_EXPR,
4555 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4556 return fold (build (code, ctype, convert (ctype, op0),
4558 const_binop (TRUNC_DIV_EXPR,
4570 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4571 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4572 that we may sometimes modify the tree. */
4575 strip_compound_expr (t, s)
4579 enum tree_code code = TREE_CODE (t);
4581 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4582 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4583 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4584 return TREE_OPERAND (t, 1);
4586 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4587 don't bother handling any other types. */
4588 else if (code == COND_EXPR)
4590 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4591 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4592 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4594 else if (TREE_CODE_CLASS (code) == '1')
4595 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4596 else if (TREE_CODE_CLASS (code) == '<'
4597 || TREE_CODE_CLASS (code) == '2')
4599 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4600 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4606 /* Return a node which has the indicated constant VALUE (either 0 or
4607 1), and is of the indicated TYPE. */
4610 constant_boolean_node (value, type)
4614 if (type == integer_type_node)
4615 return value ? integer_one_node : integer_zero_node;
4616 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4617 return truthvalue_conversion (value ? integer_one_node :
4621 tree t = build_int_2 (value, 0);
4623 TREE_TYPE (t) = type;
4628 /* Utility function for the following routine, to see how complex a nesting of
4629 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4630 we don't care (to avoid spending too much time on complex expressions.). */
4633 count_cond (expr, lim)
4639 if (TREE_CODE (expr) != COND_EXPR)
4644 true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4645 false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4646 return MIN (lim, 1 + true + false);
4649 /* Perform constant folding and related simplification of EXPR.
4650 The related simplifications include x*1 => x, x*0 => 0, etc.,
4651 and application of the associative law.
4652 NOP_EXPR conversions may be removed freely (as long as we
4653 are careful not to change the C type of the overall expression)
4654 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4655 but we can constant-fold them if they have constant operands. */
4661 register tree t = expr;
4662 tree t1 = NULL_TREE;
4664 tree type = TREE_TYPE (expr);
4665 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4666 register enum tree_code code = TREE_CODE (t);
4669 /* WINS will be nonzero when the switch is done
4670 if all operands are constant. */
4673 /* Don't try to process an RTL_EXPR since its operands aren't trees.
4674 Likewise for a SAVE_EXPR that's already been evaluated. */
4675 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4678 /* Return right away if already constant. */
4679 if (TREE_CONSTANT (t))
4681 if (code == CONST_DECL)
4682 return DECL_INITIAL (t);
4686 #ifdef MAX_INTEGER_COMPUTATION_MODE
4687 check_max_integer_computation_mode (expr);
4690 kind = TREE_CODE_CLASS (code);
4691 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4695 /* Special case for conversion ops that can have fixed point args. */
4696 arg0 = TREE_OPERAND (t, 0);
4698 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
4700 STRIP_SIGN_NOPS (arg0);
4702 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4703 subop = TREE_REALPART (arg0);
4707 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4708 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4709 && TREE_CODE (subop) != REAL_CST
4710 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4712 /* Note that TREE_CONSTANT isn't enough:
4713 static var addresses are constant but we can't
4714 do arithmetic on them. */
4717 else if (kind == 'e' || kind == '<'
4718 || kind == '1' || kind == '2' || kind == 'r')
4720 register int len = tree_code_length[(int) code];
4722 for (i = 0; i < len; i++)
4724 tree op = TREE_OPERAND (t, i);
4728 continue; /* Valid for CALL_EXPR, at least. */
4730 if (kind == '<' || code == RSHIFT_EXPR)
4732 /* Signedness matters here. Perhaps we can refine this
4734 STRIP_SIGN_NOPS (op);
4737 /* Strip any conversions that don't change the mode. */
4740 if (TREE_CODE (op) == COMPLEX_CST)
4741 subop = TREE_REALPART (op);
4745 if (TREE_CODE (subop) != INTEGER_CST
4746 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4747 && TREE_CODE (subop) != REAL_CST
4748 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4750 /* Note that TREE_CONSTANT isn't enough:
4751 static var addresses are constant but we can't
4752 do arithmetic on them. */
4762 /* If this is a commutative operation, and ARG0 is a constant, move it
4763 to ARG1 to reduce the number of tests below. */
4764 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4765 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4766 || code == BIT_AND_EXPR)
4767 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4769 tem = arg0; arg0 = arg1; arg1 = tem;
4771 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4772 TREE_OPERAND (t, 1) = tem;
4775 /* Now WINS is set as described above,
4776 ARG0 is the first operand of EXPR,
4777 and ARG1 is the second operand (if it has more than one operand).
4779 First check for cases where an arithmetic operation is applied to a
4780 compound, conditional, or comparison operation. Push the arithmetic
4781 operation inside the compound or conditional to see if any folding
4782 can then be done. Convert comparison to conditional for this purpose.
4783 The also optimizes non-constant cases that used to be done in
4786 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4787 one of the operands is a comparison and the other is a comparison, a
4788 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
4789 code below would make the expression more complex. Change it to a
4790 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
4791 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
4793 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4794 || code == EQ_EXPR || code == NE_EXPR)
4795 && ((truth_value_p (TREE_CODE (arg0))
4796 && (truth_value_p (TREE_CODE (arg1))
4797 || (TREE_CODE (arg1) == BIT_AND_EXPR
4798 && integer_onep (TREE_OPERAND (arg1, 1)))))
4799 || (truth_value_p (TREE_CODE (arg1))
4800 && (truth_value_p (TREE_CODE (arg0))
4801 || (TREE_CODE (arg0) == BIT_AND_EXPR
4802 && integer_onep (TREE_OPERAND (arg0, 1)))))))
4804 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4805 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4809 if (code == EQ_EXPR)
4810 t = invert_truthvalue (t);
4815 if (TREE_CODE_CLASS (code) == '1')
4817 if (TREE_CODE (arg0) == COMPOUND_EXPR)
4818 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4819 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4820 else if (TREE_CODE (arg0) == COND_EXPR)
4822 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4823 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4824 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4826 /* If this was a conversion, and all we did was to move into
4827 inside the COND_EXPR, bring it back out. But leave it if
4828 it is a conversion from integer to integer and the
4829 result precision is no wider than a word since such a
4830 conversion is cheap and may be optimized away by combine,
4831 while it couldn't if it were outside the COND_EXPR. Then return
4832 so we don't get into an infinite recursion loop taking the
4833 conversion out and then back in. */
4835 if ((code == NOP_EXPR || code == CONVERT_EXPR
4836 || code == NON_LVALUE_EXPR)
4837 && TREE_CODE (t) == COND_EXPR
4838 && TREE_CODE (TREE_OPERAND (t, 1)) == code
4839 && TREE_CODE (TREE_OPERAND (t, 2)) == code
4840 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4841 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4842 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4844 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
4845 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4846 t = build1 (code, type,
4848 TREE_TYPE (TREE_OPERAND
4849 (TREE_OPERAND (t, 1), 0)),
4850 TREE_OPERAND (t, 0),
4851 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4852 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4855 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4856 return fold (build (COND_EXPR, type, arg0,
4857 fold (build1 (code, type, integer_one_node)),
4858 fold (build1 (code, type, integer_zero_node))));
4860 else if (TREE_CODE_CLASS (code) == '2'
4861 || TREE_CODE_CLASS (code) == '<')
4863 if (TREE_CODE (arg1) == COMPOUND_EXPR)
4864 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4865 fold (build (code, type,
4866 arg0, TREE_OPERAND (arg1, 1))));
4867 else if ((TREE_CODE (arg1) == COND_EXPR
4868 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4869 && TREE_CODE_CLASS (code) != '<'))
4870 && (TREE_CODE (arg0) != COND_EXPR
4871 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4872 && (! TREE_SIDE_EFFECTS (arg0)
4873 || (global_bindings_p () == 0
4874 && ! contains_placeholder_p (arg0))))
4876 tree test, true_value, false_value;
4877 tree lhs = 0, rhs = 0;
4879 if (TREE_CODE (arg1) == COND_EXPR)
4881 test = TREE_OPERAND (arg1, 0);
4882 true_value = TREE_OPERAND (arg1, 1);
4883 false_value = TREE_OPERAND (arg1, 2);
4887 tree testtype = TREE_TYPE (arg1);
4889 true_value = convert (testtype, integer_one_node);
4890 false_value = convert (testtype, integer_zero_node);
4893 /* If ARG0 is complex we want to make sure we only evaluate
4894 it once. Though this is only required if it is volatile, it
4895 might be more efficient even if it is not. However, if we
4896 succeed in folding one part to a constant, we do not need
4897 to make this SAVE_EXPR. Since we do this optimization
4898 primarily to see if we do end up with constant and this
4899 SAVE_EXPR interferes with later optimizations, suppressing
4900 it when we can is important.
4902 If we are not in a function, we can't make a SAVE_EXPR, so don't
4903 try to do so. Don't try to see if the result is a constant
4904 if an arm is a COND_EXPR since we get exponential behavior
4907 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4908 && global_bindings_p () == 0
4909 && ((TREE_CODE (arg0) != VAR_DECL
4910 && TREE_CODE (arg0) != PARM_DECL)
4911 || TREE_SIDE_EFFECTS (arg0)))
4913 if (TREE_CODE (true_value) != COND_EXPR)
4914 lhs = fold (build (code, type, arg0, true_value));
4916 if (TREE_CODE (false_value) != COND_EXPR)
4917 rhs = fold (build (code, type, arg0, false_value));
4919 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4920 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4921 arg0 = save_expr (arg0), lhs = rhs = 0;
4925 lhs = fold (build (code, type, arg0, true_value));
4927 rhs = fold (build (code, type, arg0, false_value));
4929 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4931 if (TREE_CODE (arg0) == SAVE_EXPR)
4932 return build (COMPOUND_EXPR, type,
4933 convert (void_type_node, arg0),
4934 strip_compound_expr (test, arg0));
4936 return convert (type, test);
4939 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4940 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4941 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4942 else if ((TREE_CODE (arg0) == COND_EXPR
4943 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4944 && TREE_CODE_CLASS (code) != '<'))
4945 && (TREE_CODE (arg1) != COND_EXPR
4946 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4947 && (! TREE_SIDE_EFFECTS (arg1)
4948 || (global_bindings_p () == 0
4949 && ! contains_placeholder_p (arg1))))
4951 tree test, true_value, false_value;
4952 tree lhs = 0, rhs = 0;
4954 if (TREE_CODE (arg0) == COND_EXPR)
4956 test = TREE_OPERAND (arg0, 0);
4957 true_value = TREE_OPERAND (arg0, 1);
4958 false_value = TREE_OPERAND (arg0, 2);
4962 tree testtype = TREE_TYPE (arg0);
4964 true_value = convert (testtype, integer_one_node);
4965 false_value = convert (testtype, integer_zero_node);
4968 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4969 && global_bindings_p () == 0
4970 && ((TREE_CODE (arg1) != VAR_DECL
4971 && TREE_CODE (arg1) != PARM_DECL)
4972 || TREE_SIDE_EFFECTS (arg1)))
4974 if (TREE_CODE (true_value) != COND_EXPR)
4975 lhs = fold (build (code, type, true_value, arg1));
4977 if (TREE_CODE (false_value) != COND_EXPR)
4978 rhs = fold (build (code, type, false_value, arg1));
4980 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4981 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4982 arg1 = save_expr (arg1), lhs = rhs = 0;
4986 lhs = fold (build (code, type, true_value, arg1));
4989 rhs = fold (build (code, type, false_value, arg1));
4991 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4992 if (TREE_CODE (arg1) == SAVE_EXPR)
4993 return build (COMPOUND_EXPR, type,
4994 convert (void_type_node, arg1),
4995 strip_compound_expr (test, arg1));
4997 return convert (type, test);
5000 else if (TREE_CODE_CLASS (code) == '<'
5001 && TREE_CODE (arg0) == COMPOUND_EXPR)
5002 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5003 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5004 else if (TREE_CODE_CLASS (code) == '<'
5005 && TREE_CODE (arg1) == COMPOUND_EXPR)
5006 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5007 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5019 return fold (DECL_INITIAL (t));
5024 case FIX_TRUNC_EXPR:
5025 /* Other kinds of FIX are not handled properly by fold_convert. */
5027 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5028 return TREE_OPERAND (t, 0);
5030 /* Handle cases of two conversions in a row. */
5031 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5032 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5034 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5035 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5036 tree final_type = TREE_TYPE (t);
5037 int inside_int = INTEGRAL_TYPE_P (inside_type);
5038 int inside_ptr = POINTER_TYPE_P (inside_type);
5039 int inside_float = FLOAT_TYPE_P (inside_type);
5040 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5041 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5042 int inter_int = INTEGRAL_TYPE_P (inter_type);
5043 int inter_ptr = POINTER_TYPE_P (inter_type);
5044 int inter_float = FLOAT_TYPE_P (inter_type);
5045 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5046 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5047 int final_int = INTEGRAL_TYPE_P (final_type);
5048 int final_ptr = POINTER_TYPE_P (final_type);
5049 int final_float = FLOAT_TYPE_P (final_type);
5050 unsigned int final_prec = TYPE_PRECISION (final_type);
5051 int final_unsignedp = TREE_UNSIGNED (final_type);
5053 /* In addition to the cases of two conversions in a row
5054 handled below, if we are converting something to its own
5055 type via an object of identical or wider precision, neither
5056 conversion is needed. */
5057 if (inside_type == final_type
5058 && ((inter_int && final_int) || (inter_float && final_float))
5059 && inter_prec >= final_prec)
5060 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
5062 /* Likewise, if the intermediate and final types are either both
5063 float or both integer, we don't need the middle conversion if
5064 it is wider than the final type and doesn't change the signedness
5065 (for integers). Avoid this if the final type is a pointer
5066 since then we sometimes need the inner conversion. Likewise if
5067 the outer has a precision not equal to the size of its mode. */
5068 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5069 || (inter_float && inside_float))
5070 && inter_prec >= inside_prec
5071 && (inter_float || inter_unsignedp == inside_unsignedp)
5072 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5073 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5075 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5077 /* If we have a sign-extension of a zero-extended value, we can
5078 replace that by a single zero-extension. */
5079 if (inside_int && inter_int && final_int
5080 && inside_prec < inter_prec && inter_prec < final_prec
5081 && inside_unsignedp && !inter_unsignedp)
5082 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5084 /* Two conversions in a row are not needed unless:
5085 - some conversion is floating-point (overstrict for now), or
5086 - the intermediate type is narrower than both initial and
5088 - the intermediate type and innermost type differ in signedness,
5089 and the outermost type is wider than the intermediate, or
5090 - the initial type is a pointer type and the precisions of the
5091 intermediate and final types differ, or
5092 - the final type is a pointer type and the precisions of the
5093 initial and intermediate types differ. */
5094 if (! inside_float && ! inter_float && ! final_float
5095 && (inter_prec > inside_prec || inter_prec > final_prec)
5096 && ! (inside_int && inter_int
5097 && inter_unsignedp != inside_unsignedp
5098 && inter_prec < final_prec)
5099 && ((inter_unsignedp && inter_prec > inside_prec)
5100 == (final_unsignedp && final_prec > inter_prec))
5101 && ! (inside_ptr && inter_prec != final_prec)
5102 && ! (final_ptr && inside_prec != inter_prec)
5103 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5104 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5106 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5109 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5110 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5111 /* Detect assigning a bitfield. */
5112 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5113 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5115 /* Don't leave an assignment inside a conversion
5116 unless assigning a bitfield. */
5117 tree prev = TREE_OPERAND (t, 0);
5118 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5119 /* First do the assignment, then return converted constant. */
5120 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5126 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5129 return fold_convert (t, arg0);
5131 #if 0 /* This loses on &"foo"[0]. */
5136 /* Fold an expression like: "foo"[2] */
5137 if (TREE_CODE (arg0) == STRING_CST
5138 && TREE_CODE (arg1) == INTEGER_CST
5139 && compare_tree_int (arg1, TREE_STRING_LENGTH (arg0)) < 0)
5141 t = build_int_2 (TREE_STRING_POINTER (arg0)[TREE_INT_CST_LOW (arg))], 0);
5142 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
5143 force_fit_type (t, 0);
5150 if (TREE_CODE (arg0) == CONSTRUCTOR)
5152 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5159 TREE_CONSTANT (t) = wins;
5165 if (TREE_CODE (arg0) == INTEGER_CST)
5167 HOST_WIDE_INT low, high;
5168 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5169 TREE_INT_CST_HIGH (arg0),
5171 t = build_int_2 (low, high);
5172 TREE_TYPE (t) = type;
5174 = (TREE_OVERFLOW (arg0)
5175 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5176 TREE_CONSTANT_OVERFLOW (t)
5177 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5179 else if (TREE_CODE (arg0) == REAL_CST)
5180 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5182 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5183 return TREE_OPERAND (arg0, 0);
5185 /* Convert - (a - b) to (b - a) for non-floating-point. */
5186 else if (TREE_CODE (arg0) == MINUS_EXPR
5187 && (! FLOAT_TYPE_P (type) || flag_fast_math))
5188 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5189 TREE_OPERAND (arg0, 0));
5196 if (TREE_CODE (arg0) == INTEGER_CST)
5198 if (! TREE_UNSIGNED (type)
5199 && TREE_INT_CST_HIGH (arg0) < 0)
5201 HOST_WIDE_INT low, high;
5202 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5203 TREE_INT_CST_HIGH (arg0),
5205 t = build_int_2 (low, high);
5206 TREE_TYPE (t) = type;
5208 = (TREE_OVERFLOW (arg0)
5209 | force_fit_type (t, overflow));
5210 TREE_CONSTANT_OVERFLOW (t)
5211 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5214 else if (TREE_CODE (arg0) == REAL_CST)
5216 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5217 t = build_real (type,
5218 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5221 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
5222 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
5226 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5227 return convert (type, arg0);
5228 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5229 return build (COMPLEX_EXPR, type,
5230 TREE_OPERAND (arg0, 0),
5231 negate_expr (TREE_OPERAND (arg0, 1)));
5232 else if (TREE_CODE (arg0) == COMPLEX_CST)
5233 return build_complex (type, TREE_OPERAND (arg0, 0),
5234 negate_expr (TREE_OPERAND (arg0, 1)));
5235 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5236 return fold (build (TREE_CODE (arg0), type,
5237 fold (build1 (CONJ_EXPR, type,
5238 TREE_OPERAND (arg0, 0))),
5239 fold (build1 (CONJ_EXPR,
5240 type, TREE_OPERAND (arg0, 1)))));
5241 else if (TREE_CODE (arg0) == CONJ_EXPR)
5242 return TREE_OPERAND (arg0, 0);
5248 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5249 ~ TREE_INT_CST_HIGH (arg0));
5250 TREE_TYPE (t) = type;
5251 force_fit_type (t, 0);
5252 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5253 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5255 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5256 return TREE_OPERAND (arg0, 0);
5260 /* A + (-B) -> A - B */
5261 if (TREE_CODE (arg1) == NEGATE_EXPR)
5262 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5263 /* (-A) + B -> B - A */
5264 if (TREE_CODE (arg0) == NEGATE_EXPR)
5265 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5266 else if (! FLOAT_TYPE_P (type))
5268 if (integer_zerop (arg1))
5269 return non_lvalue (convert (type, arg0));
5271 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5272 with a constant, and the two constants have no bits in common,
5273 we should treat this as a BIT_IOR_EXPR since this may produce more
5275 if (TREE_CODE (arg0) == BIT_AND_EXPR
5276 && TREE_CODE (arg1) == BIT_AND_EXPR
5277 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5278 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5279 && integer_zerop (const_binop (BIT_AND_EXPR,
5280 TREE_OPERAND (arg0, 1),
5281 TREE_OPERAND (arg1, 1), 0)))
5283 code = BIT_IOR_EXPR;
5287 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5288 (plus (plus (mult) (mult)) (foo)) so that we can
5289 take advantage of the factoring cases below. */
5290 if ((TREE_CODE (arg0) == PLUS_EXPR
5291 && TREE_CODE (arg1) == MULT_EXPR)
5292 || (TREE_CODE (arg1) == PLUS_EXPR
5293 && TREE_CODE (arg0) == MULT_EXPR))
5295 tree parg0, parg1, parg, marg;
5297 if (TREE_CODE (arg0) == PLUS_EXPR)
5298 parg = arg0, marg = arg1;
5300 parg = arg1, marg = arg0;
5301 parg0 = TREE_OPERAND (parg, 0);
5302 parg1 = TREE_OPERAND (parg, 1);
5306 if (TREE_CODE (parg0) == MULT_EXPR
5307 && TREE_CODE (parg1) != MULT_EXPR)
5308 return fold (build (PLUS_EXPR, type,
5309 fold (build (PLUS_EXPR, type, parg0, marg)),
5311 if (TREE_CODE (parg0) != MULT_EXPR
5312 && TREE_CODE (parg1) == MULT_EXPR)
5313 return fold (build (PLUS_EXPR, type,
5314 fold (build (PLUS_EXPR, type, parg1, marg)),
5318 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5320 tree arg00, arg01, arg10, arg11;
5321 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5323 /* (A * C) + (B * C) -> (A+B) * C.
5324 We are most concerned about the case where C is a constant,
5325 but other combinations show up during loop reduction. Since
5326 it is not difficult, try all four possibilities. */
5328 arg00 = TREE_OPERAND (arg0, 0);
5329 arg01 = TREE_OPERAND (arg0, 1);
5330 arg10 = TREE_OPERAND (arg1, 0);
5331 arg11 = TREE_OPERAND (arg1, 1);
5334 if (operand_equal_p (arg01, arg11, 0))
5335 same = arg01, alt0 = arg00, alt1 = arg10;
5336 else if (operand_equal_p (arg00, arg10, 0))
5337 same = arg00, alt0 = arg01, alt1 = arg11;
5338 else if (operand_equal_p (arg00, arg11, 0))
5339 same = arg00, alt0 = arg01, alt1 = arg10;
5340 else if (operand_equal_p (arg01, arg10, 0))
5341 same = arg01, alt0 = arg00, alt1 = arg11;
5343 /* No identical multiplicands; see if we can find a common
5344 power-of-two factor in non-power-of-two multiplies. This
5345 can help in multi-dimensional array access. */
5346 else if (TREE_CODE (arg01) == INTEGER_CST
5347 && TREE_CODE (arg11) == INTEGER_CST
5348 && TREE_INT_CST_HIGH (arg01) == 0
5349 && TREE_INT_CST_HIGH (arg11) == 0)
5351 HOST_WIDE_INT int01, int11, tmp;
5352 int01 = TREE_INT_CST_LOW (arg01);
5353 int11 = TREE_INT_CST_LOW (arg11);
5355 /* Move min of absolute values to int11. */
5356 if ((int01 >= 0 ? int01 : -int01)
5357 < (int11 >= 0 ? int11 : -int11))
5359 tmp = int01, int01 = int11, int11 = tmp;
5360 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5361 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5364 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5366 alt0 = fold (build (MULT_EXPR, type, arg00,
5367 build_int_2 (int01 / int11, 0)));
5374 return fold (build (MULT_EXPR, type,
5375 fold (build (PLUS_EXPR, type, alt0, alt1)),
5379 /* In IEEE floating point, x+0 may not equal x. */
5380 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5382 && real_zerop (arg1))
5383 return non_lvalue (convert (type, arg0));
5384 /* x+(-0) equals x, even for IEEE. */
5385 else if (TREE_CODE (arg1) == REAL_CST
5386 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
5387 return non_lvalue (convert (type, arg0));
5390 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5391 is a rotate of A by C1 bits. */
5392 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5393 is a rotate of A by B bits. */
5395 register enum tree_code code0, code1;
5396 code0 = TREE_CODE (arg0);
5397 code1 = TREE_CODE (arg1);
5398 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5399 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5400 && operand_equal_p (TREE_OPERAND (arg0, 0),
5401 TREE_OPERAND (arg1,0), 0)
5402 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5404 register tree tree01, tree11;
5405 register enum tree_code code01, code11;
5407 tree01 = TREE_OPERAND (arg0, 1);
5408 tree11 = TREE_OPERAND (arg1, 1);
5409 STRIP_NOPS (tree01);
5410 STRIP_NOPS (tree11);
5411 code01 = TREE_CODE (tree01);
5412 code11 = TREE_CODE (tree11);
5413 if (code01 == INTEGER_CST
5414 && code11 == INTEGER_CST
5415 && TREE_INT_CST_HIGH (tree01) == 0
5416 && TREE_INT_CST_HIGH (tree11) == 0
5417 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5418 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5419 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5420 code0 == LSHIFT_EXPR ? tree01 : tree11);
5421 else if (code11 == MINUS_EXPR)
5423 tree tree110, tree111;
5424 tree110 = TREE_OPERAND (tree11, 0);
5425 tree111 = TREE_OPERAND (tree11, 1);
5426 STRIP_NOPS (tree110);
5427 STRIP_NOPS (tree111);
5428 if (TREE_CODE (tree110) == INTEGER_CST
5429 && 0 == compare_tree_int (tree110,
5431 (TREE_TYPE (TREE_OPERAND
5433 && operand_equal_p (tree01, tree111, 0))
5434 return build ((code0 == LSHIFT_EXPR
5437 type, TREE_OPERAND (arg0, 0), tree01);
5439 else if (code01 == MINUS_EXPR)
5441 tree tree010, tree011;
5442 tree010 = TREE_OPERAND (tree01, 0);
5443 tree011 = TREE_OPERAND (tree01, 1);
5444 STRIP_NOPS (tree010);
5445 STRIP_NOPS (tree011);
5446 if (TREE_CODE (tree010) == INTEGER_CST
5447 && 0 == compare_tree_int (tree010,
5449 (TREE_TYPE (TREE_OPERAND
5451 && operand_equal_p (tree11, tree011, 0))
5452 return build ((code0 != LSHIFT_EXPR
5455 type, TREE_OPERAND (arg0, 0), tree11);
5462 /* In most languages, can't associate operations on floats through
5463 parentheses. Rather than remember where the parentheses were, we
5464 don't associate floats at all. It shouldn't matter much. However,
5465 associating multiplications is only very slightly inaccurate, so do
5466 that if -ffast-math is specified. */
5469 && (! FLOAT_TYPE_P (type)
5470 || (flag_fast_math && code != MULT_EXPR)))
5472 tree var0, con0, lit0, var1, con1, lit1;
5474 /* Split both trees into variables, constants, and literals. Then
5475 associate each group together, the constants with literals,
5476 then the result with variables. This increases the chances of
5477 literals being recombined later and of generating relocatable
5478 expressions for the sum of a constant and literal. */
5479 var0 = split_tree (arg0, code, &con0, &lit0, 0);
5480 var1 = split_tree (arg1, code, &con1, &lit1, code == MINUS_EXPR);
5482 /* Only do something if we found more than two objects. Otherwise,
5483 nothing has changed and we risk infinite recursion. */
5484 if (2 < ((var0 != 0) + (var1 != 0) + (con0 != 0) + (con1 != 0)
5485 + (lit0 != 0) + (lit1 != 0)))
5487 var0 = associate_trees (var0, var1, code, type);
5488 con0 = associate_trees (con0, con1, code, type);
5489 lit0 = associate_trees (lit0, lit1, code, type);
5490 con0 = associate_trees (con0, lit0, code, type);
5491 return convert (type, associate_trees (var0, con0, code, type));
5496 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
5497 if (TREE_CODE (arg1) == REAL_CST)
5499 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
5501 t1 = const_binop (code, arg0, arg1, 0);
5502 if (t1 != NULL_TREE)
5504 /* The return value should always have
5505 the same type as the original expression. */
5506 if (TREE_TYPE (t1) != TREE_TYPE (t))
5507 t1 = convert (TREE_TYPE (t), t1);
5514 /* A - (-B) -> A + B */
5515 if (TREE_CODE (arg1) == NEGATE_EXPR)
5516 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5517 /* (-A) - CST -> (-CST) - A for floating point (what about ints ?) */
5518 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
5520 fold (build (MINUS_EXPR, type,
5521 build_real (TREE_TYPE (arg1),
5522 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1))),
5523 TREE_OPERAND (arg0, 0)));
5525 if (! FLOAT_TYPE_P (type))
5527 if (! wins && integer_zerop (arg0))
5528 return convert (type, negate_expr (arg1));
5529 if (integer_zerop (arg1))
5530 return non_lvalue (convert (type, arg0));
5532 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5533 about the case where C is a constant, just try one of the
5534 four possibilities. */
5536 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5537 && operand_equal_p (TREE_OPERAND (arg0, 1),
5538 TREE_OPERAND (arg1, 1), 0))
5539 return fold (build (MULT_EXPR, type,
5540 fold (build (MINUS_EXPR, type,
5541 TREE_OPERAND (arg0, 0),
5542 TREE_OPERAND (arg1, 0))),
5543 TREE_OPERAND (arg0, 1)));
5546 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5549 /* Except with IEEE floating point, 0-x equals -x. */
5550 if (! wins && real_zerop (arg0))
5551 return convert (type, negate_expr (arg1));
5552 /* Except with IEEE floating point, x-0 equals x. */
5553 if (real_zerop (arg1))
5554 return non_lvalue (convert (type, arg0));
5557 /* Fold &x - &x. This can happen from &x.foo - &x.
5558 This is unsafe for certain floats even in non-IEEE formats.
5559 In IEEE, it is unsafe because it does wrong for NaNs.
5560 Also note that operand_equal_p is always false if an operand
5563 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
5564 && operand_equal_p (arg0, arg1, 0))
5565 return convert (type, integer_zero_node);
5570 /* (-A) * (-B) -> A * B */
5571 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5572 return fold (build (MULT_EXPR, type, TREE_OPERAND (arg0, 0),
5573 TREE_OPERAND (arg1, 0)));
5575 if (! FLOAT_TYPE_P (type))
5577 if (integer_zerop (arg1))
5578 return omit_one_operand (type, arg1, arg0);
5579 if (integer_onep (arg1))
5580 return non_lvalue (convert (type, arg0));
5582 /* (a * (1 << b)) is (a << b) */
5583 if (TREE_CODE (arg1) == LSHIFT_EXPR
5584 && integer_onep (TREE_OPERAND (arg1, 0)))
5585 return fold (build (LSHIFT_EXPR, type, arg0,
5586 TREE_OPERAND (arg1, 1)));
5587 if (TREE_CODE (arg0) == LSHIFT_EXPR
5588 && integer_onep (TREE_OPERAND (arg0, 0)))
5589 return fold (build (LSHIFT_EXPR, type, arg1,
5590 TREE_OPERAND (arg0, 1)));
5592 if (TREE_CODE (arg1) == INTEGER_CST
5593 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5595 return convert (type, tem);
5600 /* x*0 is 0, except for IEEE floating point. */
5601 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5603 && real_zerop (arg1))
5604 return omit_one_operand (type, arg1, arg0);
5605 /* In IEEE floating point, x*1 is not equivalent to x for snans.
5606 However, ANSI says we can drop signals,
5607 so we can do this anyway. */
5608 if (real_onep (arg1))
5609 return non_lvalue (convert (type, arg0));
5611 if (! wins && real_twop (arg1) && global_bindings_p () == 0
5612 && ! contains_placeholder_p (arg0))
5614 tree arg = save_expr (arg0);
5615 return build (PLUS_EXPR, type, arg, arg);
5622 if (integer_all_onesp (arg1))
5623 return omit_one_operand (type, arg1, arg0);
5624 if (integer_zerop (arg1))
5625 return non_lvalue (convert (type, arg0));
5626 t1 = distribute_bit_expr (code, type, arg0, arg1);
5627 if (t1 != NULL_TREE)
5630 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
5632 This results in more efficient code for machines without a NAND
5633 instruction. Combine will canonicalize to the first form
5634 which will allow use of NAND instructions provided by the
5635 backend if they exist. */
5636 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5637 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5639 return fold (build1 (BIT_NOT_EXPR, type,
5640 build (BIT_AND_EXPR, type,
5641 TREE_OPERAND (arg0, 0),
5642 TREE_OPERAND (arg1, 0))));
5645 /* See if this can be simplified into a rotate first. If that
5646 is unsuccessful continue in the association code. */
5650 if (integer_zerop (arg1))
5651 return non_lvalue (convert (type, arg0));
5652 if (integer_all_onesp (arg1))
5653 return fold (build1 (BIT_NOT_EXPR, type, arg0));
5655 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
5656 with a constant, and the two constants have no bits in common,
5657 we should treat this as a BIT_IOR_EXPR since this may produce more
5659 if (TREE_CODE (arg0) == BIT_AND_EXPR
5660 && TREE_CODE (arg1) == BIT_AND_EXPR
5661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5662 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5663 && integer_zerop (const_binop (BIT_AND_EXPR,
5664 TREE_OPERAND (arg0, 1),
5665 TREE_OPERAND (arg1, 1), 0)))
5667 code = BIT_IOR_EXPR;
5671 /* See if this can be simplified into a rotate first. If that
5672 is unsuccessful continue in the association code. */
5677 if (integer_all_onesp (arg1))
5678 return non_lvalue (convert (type, arg0));
5679 if (integer_zerop (arg1))
5680 return omit_one_operand (type, arg1, arg0);
5681 t1 = distribute_bit_expr (code, type, arg0, arg1);
5682 if (t1 != NULL_TREE)
5684 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
5685 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5686 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5689 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5691 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5692 && (~TREE_INT_CST_LOW (arg0)
5693 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5694 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5696 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5697 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5700 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5702 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5703 && (~TREE_INT_CST_LOW (arg1)
5704 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5705 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5708 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
5710 This results in more efficient code for machines without a NOR
5711 instruction. Combine will canonicalize to the first form
5712 which will allow use of NOR instructions provided by the
5713 backend if they exist. */
5714 if (TREE_CODE (arg0) == BIT_NOT_EXPR
5715 && TREE_CODE (arg1) == BIT_NOT_EXPR)
5717 return fold (build1 (BIT_NOT_EXPR, type,
5718 build (BIT_IOR_EXPR, type,
5719 TREE_OPERAND (arg0, 0),
5720 TREE_OPERAND (arg1, 0))));
5725 case BIT_ANDTC_EXPR:
5726 if (integer_all_onesp (arg0))
5727 return non_lvalue (convert (type, arg1));
5728 if (integer_zerop (arg0))
5729 return omit_one_operand (type, arg0, arg1);
5730 if (TREE_CODE (arg1) == INTEGER_CST)
5732 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5733 code = BIT_AND_EXPR;
5739 /* In most cases, do nothing with a divide by zero. */
5740 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5741 #ifndef REAL_INFINITY
5742 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5745 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5747 /* (-A) / (-B) -> A / B */
5748 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == NEGATE_EXPR)
5749 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
5750 TREE_OPERAND (arg1, 0)));
5752 /* In IEEE floating point, x/1 is not equivalent to x for snans.
5753 However, ANSI says we can drop signals, so we can do this anyway. */
5754 if (real_onep (arg1))
5755 return non_lvalue (convert (type, arg0));
5757 /* If ARG1 is a constant, we can convert this to a multiply by the
5758 reciprocal. This does not have the same rounding properties,
5759 so only do this if -ffast-math. We can actually always safely
5760 do it if ARG1 is a power of two, but it's hard to tell if it is
5761 or not in a portable manner. */
5762 if (TREE_CODE (arg1) == REAL_CST)
5765 && 0 != (tem = const_binop (code, build_real (type, dconst1),
5767 return fold (build (MULT_EXPR, type, arg0, tem));
5768 /* Find the reciprocal if optimizing and the result is exact. */
5772 r = TREE_REAL_CST (arg1);
5773 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5775 tem = build_real (type, r);
5776 return fold (build (MULT_EXPR, type, arg0, tem));
5782 case TRUNC_DIV_EXPR:
5783 case ROUND_DIV_EXPR:
5784 case FLOOR_DIV_EXPR:
5786 case EXACT_DIV_EXPR:
5787 if (integer_onep (arg1))
5788 return non_lvalue (convert (type, arg0));
5789 if (integer_zerop (arg1))
5792 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5793 operation, EXACT_DIV_EXPR.
5795 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5796 At one time others generated faster code, it's not clear if they do
5797 after the last round to changes to the DIV code in expmed.c. */
5798 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5799 && multiple_of_p (type, arg0, arg1))
5800 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5802 if (TREE_CODE (arg1) == INTEGER_CST
5803 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5805 return convert (type, tem);
5810 case FLOOR_MOD_EXPR:
5811 case ROUND_MOD_EXPR:
5812 case TRUNC_MOD_EXPR:
5813 if (integer_onep (arg1))
5814 return omit_one_operand (type, integer_zero_node, arg0);
5815 if (integer_zerop (arg1))
5818 if (TREE_CODE (arg1) == INTEGER_CST
5819 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
5821 return convert (type, tem);
5829 if (integer_zerop (arg1))
5830 return non_lvalue (convert (type, arg0));
5831 /* Since negative shift count is not well-defined,
5832 don't try to compute it in the compiler. */
5833 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5835 /* Rewrite an LROTATE_EXPR by a constant into an
5836 RROTATE_EXPR by a new constant. */
5837 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5839 TREE_SET_CODE (t, RROTATE_EXPR);
5840 code = RROTATE_EXPR;
5841 TREE_OPERAND (t, 1) = arg1
5844 convert (TREE_TYPE (arg1),
5845 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5847 if (tree_int_cst_sgn (arg1) < 0)
5851 /* If we have a rotate of a bit operation with the rotate count and
5852 the second operand of the bit operation both constant,
5853 permute the two operations. */
5854 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5855 && (TREE_CODE (arg0) == BIT_AND_EXPR
5856 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5857 || TREE_CODE (arg0) == BIT_IOR_EXPR
5858 || TREE_CODE (arg0) == BIT_XOR_EXPR)
5859 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5860 return fold (build (TREE_CODE (arg0), type,
5861 fold (build (code, type,
5862 TREE_OPERAND (arg0, 0), arg1)),
5863 fold (build (code, type,
5864 TREE_OPERAND (arg0, 1), arg1))));
5866 /* Two consecutive rotates adding up to the width of the mode can
5868 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5869 && TREE_CODE (arg0) == RROTATE_EXPR
5870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5871 && TREE_INT_CST_HIGH (arg1) == 0
5872 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5873 && ((TREE_INT_CST_LOW (arg1)
5874 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5875 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
5876 return TREE_OPERAND (arg0, 0);
5881 if (operand_equal_p (arg0, arg1, 0))
5882 return omit_one_operand (type, arg0, arg1);
5883 if (INTEGRAL_TYPE_P (type)
5884 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5885 return omit_one_operand (type, arg1, arg0);
5889 if (operand_equal_p (arg0, arg1, 0))
5890 return omit_one_operand (type, arg0, arg1);
5891 if (INTEGRAL_TYPE_P (type)
5892 && TYPE_MAX_VALUE (type)
5893 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5894 return omit_one_operand (type, arg1, arg0);
5897 case TRUTH_NOT_EXPR:
5898 /* Note that the operand of this must be an int
5899 and its values must be 0 or 1.
5900 ("true" is a fixed value perhaps depending on the language,
5901 but we don't handle values other than 1 correctly yet.) */
5902 tem = invert_truthvalue (arg0);
5903 /* Avoid infinite recursion. */
5904 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5906 return convert (type, tem);
5908 case TRUTH_ANDIF_EXPR:
5909 /* Note that the operands of this must be ints
5910 and their values must be 0 or 1.
5911 ("true" is a fixed value perhaps depending on the language.) */
5912 /* If first arg is constant zero, return it. */
5913 if (integer_zerop (arg0))
5914 return convert (type, arg0);
5915 case TRUTH_AND_EXPR:
5916 /* If either arg is constant true, drop it. */
5917 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5918 return non_lvalue (convert (type, arg1));
5919 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5920 return non_lvalue (convert (type, arg0));
5921 /* If second arg is constant zero, result is zero, but first arg
5922 must be evaluated. */
5923 if (integer_zerop (arg1))
5924 return omit_one_operand (type, arg1, arg0);
5925 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5926 case will be handled here. */
5927 if (integer_zerop (arg0))
5928 return omit_one_operand (type, arg0, arg1);
5931 /* We only do these simplifications if we are optimizing. */
5935 /* Check for things like (A || B) && (A || C). We can convert this
5936 to A || (B && C). Note that either operator can be any of the four
5937 truth and/or operations and the transformation will still be
5938 valid. Also note that we only care about order for the
5939 ANDIF and ORIF operators. If B contains side effects, this
5940 might change the truth-value of A. */
5941 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5942 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5943 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5944 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5945 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5946 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5948 tree a00 = TREE_OPERAND (arg0, 0);
5949 tree a01 = TREE_OPERAND (arg0, 1);
5950 tree a10 = TREE_OPERAND (arg1, 0);
5951 tree a11 = TREE_OPERAND (arg1, 1);
5952 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5953 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5954 && (code == TRUTH_AND_EXPR
5955 || code == TRUTH_OR_EXPR));
5957 if (operand_equal_p (a00, a10, 0))
5958 return fold (build (TREE_CODE (arg0), type, a00,
5959 fold (build (code, type, a01, a11))));
5960 else if (commutative && operand_equal_p (a00, a11, 0))
5961 return fold (build (TREE_CODE (arg0), type, a00,
5962 fold (build (code, type, a01, a10))));
5963 else if (commutative && operand_equal_p (a01, a10, 0))
5964 return fold (build (TREE_CODE (arg0), type, a01,
5965 fold (build (code, type, a00, a11))));
5967 /* This case if tricky because we must either have commutative
5968 operators or else A10 must not have side-effects. */
5970 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5971 && operand_equal_p (a01, a11, 0))
5972 return fold (build (TREE_CODE (arg0), type,
5973 fold (build (code, type, a00, a10)),
5977 /* See if we can build a range comparison. */
5978 if (0 != (tem = fold_range_test (t)))
5981 /* Check for the possibility of merging component references. If our
5982 lhs is another similar operation, try to merge its rhs with our
5983 rhs. Then try to merge our lhs and rhs. */
5984 if (TREE_CODE (arg0) == code
5985 && 0 != (tem = fold_truthop (code, type,
5986 TREE_OPERAND (arg0, 1), arg1)))
5987 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5989 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5994 case TRUTH_ORIF_EXPR:
5995 /* Note that the operands of this must be ints
5996 and their values must be 0 or true.
5997 ("true" is a fixed value perhaps depending on the language.) */
5998 /* If first arg is constant true, return it. */
5999 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6000 return convert (type, arg0);
6002 /* If either arg is constant zero, drop it. */
6003 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6004 return non_lvalue (convert (type, arg1));
6005 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
6006 return non_lvalue (convert (type, arg0));
6007 /* If second arg is constant true, result is true, but we must
6008 evaluate first arg. */
6009 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6010 return omit_one_operand (type, arg1, arg0);
6011 /* Likewise for first arg, but note this only occurs here for
6013 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6014 return omit_one_operand (type, arg0, arg1);
6017 case TRUTH_XOR_EXPR:
6018 /* If either arg is constant zero, drop it. */
6019 if (integer_zerop (arg0))
6020 return non_lvalue (convert (type, arg1));
6021 if (integer_zerop (arg1))
6022 return non_lvalue (convert (type, arg0));
6023 /* If either arg is constant true, this is a logical inversion. */
6024 if (integer_onep (arg0))
6025 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6026 if (integer_onep (arg1))
6027 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6036 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6038 /* (-a) CMP (-b) -> b CMP a */
6039 if (TREE_CODE (arg0) == NEGATE_EXPR
6040 && TREE_CODE (arg1) == NEGATE_EXPR)
6041 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6042 TREE_OPERAND (arg0, 0)));
6043 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6044 if (TREE_CODE (arg0) == NEGATE_EXPR && TREE_CODE (arg1) == REAL_CST)
6047 (swap_tree_comparison (code), type,
6048 TREE_OPERAND (arg0, 0),
6049 build_real (TREE_TYPE (arg1),
6050 REAL_VALUE_NEGATE (TREE_REAL_CST (arg1)))));
6051 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6052 /* a CMP (-0) -> a CMP 0 */
6053 if (TREE_CODE (arg1) == REAL_CST
6054 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (arg1)))
6055 return fold (build (code, type, arg0,
6056 build_real (TREE_TYPE (arg1), dconst0)));
6060 /* If one arg is a constant integer, put it last. */
6061 if (TREE_CODE (arg0) == INTEGER_CST
6062 && TREE_CODE (arg1) != INTEGER_CST)
6064 TREE_OPERAND (t, 0) = arg1;
6065 TREE_OPERAND (t, 1) = arg0;
6066 arg0 = TREE_OPERAND (t, 0);
6067 arg1 = TREE_OPERAND (t, 1);
6068 code = swap_tree_comparison (code);
6069 TREE_SET_CODE (t, code);
6072 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6073 First, see if one arg is constant; find the constant arg
6074 and the other one. */
6076 tree constop = 0, varop = NULL_TREE;
6077 int constopnum = -1;
6079 if (TREE_CONSTANT (arg1))
6080 constopnum = 1, constop = arg1, varop = arg0;
6081 if (TREE_CONSTANT (arg0))
6082 constopnum = 0, constop = arg0, varop = arg1;
6084 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6086 /* This optimization is invalid for ordered comparisons
6087 if CONST+INCR overflows or if foo+incr might overflow.
6088 This optimization is invalid for floating point due to rounding.
6089 For pointer types we assume overflow doesn't happen. */
6090 if (POINTER_TYPE_P (TREE_TYPE (varop))
6091 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6092 && (code == EQ_EXPR || code == NE_EXPR)))
6095 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6096 constop, TREE_OPERAND (varop, 1)));
6097 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
6099 /* If VAROP is a reference to a bitfield, we must mask
6100 the constant by the width of the field. */
6101 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6102 && DECL_BIT_FIELD(TREE_OPERAND
6103 (TREE_OPERAND (varop, 0), 1)))
6106 = TREE_INT_CST_LOW (DECL_SIZE
6108 (TREE_OPERAND (varop, 0), 1)));
6109 tree mask, unsigned_type;
6110 unsigned int precision;
6111 tree folded_compare;
6113 /* First check whether the comparison would come out
6114 always the same. If we don't do that we would
6115 change the meaning with the masking. */
6116 if (constopnum == 0)
6117 folded_compare = fold (build (code, type, constop,
6118 TREE_OPERAND (varop, 0)));
6120 folded_compare = fold (build (code, type,
6121 TREE_OPERAND (varop, 0),
6123 if (integer_zerop (folded_compare)
6124 || integer_onep (folded_compare))
6125 return omit_one_operand (type, folded_compare, varop);
6127 unsigned_type = type_for_size (size, 1);
6128 precision = TYPE_PRECISION (unsigned_type);
6129 mask = build_int_2 (~0, ~0);
6130 TREE_TYPE (mask) = unsigned_type;
6131 force_fit_type (mask, 0);
6132 mask = const_binop (RSHIFT_EXPR, mask,
6133 size_int (precision - size), 0);
6134 newconst = fold (build (BIT_AND_EXPR,
6135 TREE_TYPE (varop), newconst,
6136 convert (TREE_TYPE (varop),
6141 t = build (code, type, TREE_OPERAND (t, 0),
6142 TREE_OPERAND (t, 1));
6143 TREE_OPERAND (t, constopnum) = newconst;
6147 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
6149 if (POINTER_TYPE_P (TREE_TYPE (varop))
6150 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6151 && (code == EQ_EXPR || code == NE_EXPR)))
6154 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
6155 constop, TREE_OPERAND (varop, 1)));
6156 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
6158 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
6159 && DECL_BIT_FIELD(TREE_OPERAND
6160 (TREE_OPERAND (varop, 0), 1)))
6163 = TREE_INT_CST_LOW (DECL_SIZE
6165 (TREE_OPERAND (varop, 0), 1)));
6166 tree mask, unsigned_type;
6167 unsigned int precision;
6168 tree folded_compare;
6170 if (constopnum == 0)
6171 folded_compare = fold (build (code, type, constop,
6172 TREE_OPERAND (varop, 0)));
6174 folded_compare = fold (build (code, type,
6175 TREE_OPERAND (varop, 0),
6177 if (integer_zerop (folded_compare)
6178 || integer_onep (folded_compare))
6179 return omit_one_operand (type, folded_compare, varop);
6181 unsigned_type = type_for_size (size, 1);
6182 precision = TYPE_PRECISION (unsigned_type);
6183 mask = build_int_2 (~0, ~0);
6184 TREE_TYPE (mask) = TREE_TYPE (varop);
6185 force_fit_type (mask, 0);
6186 mask = const_binop (RSHIFT_EXPR, mask,
6187 size_int (precision - size), 0);
6188 newconst = fold (build (BIT_AND_EXPR,
6189 TREE_TYPE (varop), newconst,
6190 convert (TREE_TYPE (varop),
6195 t = build (code, type, TREE_OPERAND (t, 0),
6196 TREE_OPERAND (t, 1));
6197 TREE_OPERAND (t, constopnum) = newconst;
6203 /* Change X >= CST to X > (CST - 1) if CST is positive. */
6204 if (TREE_CODE (arg1) == INTEGER_CST
6205 && TREE_CODE (arg0) != INTEGER_CST
6206 && tree_int_cst_sgn (arg1) > 0)
6208 switch (TREE_CODE (t))
6212 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6213 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6218 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
6219 t = build (code, type, TREE_OPERAND (t, 0), arg1);
6227 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
6228 a MINUS_EXPR of a constant, we can convert it into a comparison with
6229 a revised constant as long as no overflow occurs. */
6230 if ((code == EQ_EXPR || code == NE_EXPR)
6231 && TREE_CODE (arg1) == INTEGER_CST
6232 && (TREE_CODE (arg0) == PLUS_EXPR
6233 || TREE_CODE (arg0) == MINUS_EXPR)
6234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6235 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6236 ? MINUS_EXPR : PLUS_EXPR,
6237 arg1, TREE_OPERAND (arg0, 1), 0))
6238 && ! TREE_CONSTANT_OVERFLOW (tem))
6239 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6241 /* Similarly for a NEGATE_EXPR. */
6242 else if ((code == EQ_EXPR || code == NE_EXPR)
6243 && TREE_CODE (arg0) == NEGATE_EXPR
6244 && TREE_CODE (arg1) == INTEGER_CST
6245 && 0 != (tem = negate_expr (arg1))
6246 && TREE_CODE (tem) == INTEGER_CST
6247 && ! TREE_CONSTANT_OVERFLOW (tem))
6248 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6250 /* If we have X - Y == 0, we can convert that to X == Y and similarly
6251 for !=. Don't do this for ordered comparisons due to overflow. */
6252 else if ((code == NE_EXPR || code == EQ_EXPR)
6253 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
6254 return fold (build (code, type,
6255 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
6257 /* If we are widening one operand of an integer comparison,
6258 see if the other operand is similarly being widened. Perhaps we
6259 can do the comparison in the narrower type. */
6260 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
6261 && TREE_CODE (arg0) == NOP_EXPR
6262 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
6263 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
6264 && (TREE_TYPE (t1) == TREE_TYPE (tem)
6265 || (TREE_CODE (t1) == INTEGER_CST
6266 && int_fits_type_p (t1, TREE_TYPE (tem)))))
6267 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
6269 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
6270 constant, we can simplify it. */
6271 else if (TREE_CODE (arg1) == INTEGER_CST
6272 && (TREE_CODE (arg0) == MIN_EXPR
6273 || TREE_CODE (arg0) == MAX_EXPR)
6274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6275 return optimize_minmax_comparison (t);
6277 /* If we are comparing an ABS_EXPR with a constant, we can
6278 convert all the cases into explicit comparisons, but they may
6279 well not be faster than doing the ABS and one comparison.
6280 But ABS (X) <= C is a range comparison, which becomes a subtraction
6281 and a comparison, and is probably faster. */
6282 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6283 && TREE_CODE (arg0) == ABS_EXPR
6284 && ! TREE_SIDE_EFFECTS (arg0)
6285 && (0 != (tem = negate_expr (arg1)))
6286 && TREE_CODE (tem) == INTEGER_CST
6287 && ! TREE_CONSTANT_OVERFLOW (tem))
6288 return fold (build (TRUTH_ANDIF_EXPR, type,
6289 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
6290 build (LE_EXPR, type,
6291 TREE_OPERAND (arg0, 0), arg1)));
6293 /* If this is an EQ or NE comparison with zero and ARG0 is
6294 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
6295 two operations, but the latter can be done in one less insn
6296 on machines that have only two-operand insns or on which a
6297 constant cannot be the first operand. */
6298 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
6299 && TREE_CODE (arg0) == BIT_AND_EXPR)
6301 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
6302 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
6304 fold (build (code, type,
6305 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6307 TREE_TYPE (TREE_OPERAND (arg0, 0)),
6308 TREE_OPERAND (arg0, 1),
6309 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
6310 convert (TREE_TYPE (arg0),
6313 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
6314 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
6316 fold (build (code, type,
6317 build (BIT_AND_EXPR, TREE_TYPE (arg0),
6319 TREE_TYPE (TREE_OPERAND (arg0, 1)),
6320 TREE_OPERAND (arg0, 0),
6321 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
6322 convert (TREE_TYPE (arg0),
6327 /* If this is an NE or EQ comparison of zero against the result of a
6328 signed MOD operation whose second operand is a power of 2, make
6329 the MOD operation unsigned since it is simpler and equivalent. */
6330 if ((code == NE_EXPR || code == EQ_EXPR)
6331 && integer_zerop (arg1)
6332 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
6333 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
6334 || TREE_CODE (arg0) == CEIL_MOD_EXPR
6335 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
6336 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
6337 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6339 tree newtype = unsigned_type (TREE_TYPE (arg0));
6340 tree newmod = build (TREE_CODE (arg0), newtype,
6341 convert (newtype, TREE_OPERAND (arg0, 0)),
6342 convert (newtype, TREE_OPERAND (arg0, 1)));
6344 return build (code, type, newmod, convert (newtype, arg1));
6347 /* If this is an NE comparison of zero with an AND of one, remove the
6348 comparison since the AND will give the correct value. */
6349 if (code == NE_EXPR && integer_zerop (arg1)
6350 && TREE_CODE (arg0) == BIT_AND_EXPR
6351 && integer_onep (TREE_OPERAND (arg0, 1)))
6352 return convert (type, arg0);
6354 /* If we have (A & C) == C where C is a power of 2, convert this into
6355 (A & C) != 0. Similarly for NE_EXPR. */
6356 if ((code == EQ_EXPR || code == NE_EXPR)
6357 && TREE_CODE (arg0) == BIT_AND_EXPR
6358 && integer_pow2p (TREE_OPERAND (arg0, 1))
6359 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
6360 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
6361 arg0, integer_zero_node);
6363 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
6364 and similarly for >= into !=. */
6365 if ((code == LT_EXPR || code == GE_EXPR)
6366 && TREE_UNSIGNED (TREE_TYPE (arg0))
6367 && TREE_CODE (arg1) == LSHIFT_EXPR
6368 && integer_onep (TREE_OPERAND (arg1, 0)))
6369 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6370 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6371 TREE_OPERAND (arg1, 1)),
6372 convert (TREE_TYPE (arg0), integer_zero_node));
6374 else if ((code == LT_EXPR || code == GE_EXPR)
6375 && TREE_UNSIGNED (TREE_TYPE (arg0))
6376 && (TREE_CODE (arg1) == NOP_EXPR
6377 || TREE_CODE (arg1) == CONVERT_EXPR)
6378 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
6379 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
6381 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
6382 convert (TREE_TYPE (arg0),
6383 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
6384 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
6385 convert (TREE_TYPE (arg0), integer_zero_node));
6387 /* Simplify comparison of something with itself. (For IEEE
6388 floating-point, we can only do some of these simplifications.) */
6389 if (operand_equal_p (arg0, arg1, 0))
6396 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6397 return constant_boolean_node (1, type);
6399 TREE_SET_CODE (t, code);
6403 /* For NE, we can only do this simplification if integer. */
6404 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
6406 /* ... fall through ... */
6409 return constant_boolean_node (0, type);
6415 /* An unsigned comparison against 0 can be simplified. */
6416 if (integer_zerop (arg1)
6417 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6418 || POINTER_TYPE_P (TREE_TYPE (arg1)))
6419 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6421 switch (TREE_CODE (t))
6425 TREE_SET_CODE (t, NE_EXPR);
6429 TREE_SET_CODE (t, EQ_EXPR);
6432 return omit_one_operand (type,
6433 convert (type, integer_one_node),
6436 return omit_one_operand (type,
6437 convert (type, integer_zero_node),
6444 /* Comparisons with the highest or lowest possible integer of
6445 the specified size will have known values and an unsigned
6446 <= 0x7fffffff can be simplified. */
6448 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
6450 if (TREE_CODE (arg1) == INTEGER_CST
6451 && ! TREE_CONSTANT_OVERFLOW (arg1)
6452 && width <= HOST_BITS_PER_WIDE_INT
6453 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
6454 || POINTER_TYPE_P (TREE_TYPE (arg1))))
6456 if (TREE_INT_CST_HIGH (arg1) == 0
6457 && (TREE_INT_CST_LOW (arg1)
6458 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6459 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6460 switch (TREE_CODE (t))
6463 return omit_one_operand (type,
6464 convert (type, integer_zero_node),
6467 TREE_SET_CODE (t, EQ_EXPR);
6471 return omit_one_operand (type,
6472 convert (type, integer_one_node),
6475 TREE_SET_CODE (t, NE_EXPR);
6482 else if (TREE_INT_CST_HIGH (arg1) == -1
6483 && (- TREE_INT_CST_LOW (arg1)
6484 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)))
6485 && ! TREE_UNSIGNED (TREE_TYPE (arg1)))
6486 switch (TREE_CODE (t))
6489 return omit_one_operand (type,
6490 convert (type, integer_zero_node),
6493 TREE_SET_CODE (t, EQ_EXPR);
6497 return omit_one_operand (type,
6498 convert (type, integer_one_node),
6501 TREE_SET_CODE (t, NE_EXPR);
6508 else if (TREE_INT_CST_HIGH (arg1) == 0
6509 && (TREE_INT_CST_LOW (arg1)
6510 == ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1)
6511 && TREE_UNSIGNED (TREE_TYPE (arg1)))
6513 switch (TREE_CODE (t))
6516 return fold (build (GE_EXPR, type,
6517 convert (signed_type (TREE_TYPE (arg0)),
6519 convert (signed_type (TREE_TYPE (arg1)),
6520 integer_zero_node)));
6522 return fold (build (LT_EXPR, type,
6523 convert (signed_type (TREE_TYPE (arg0)),
6525 convert (signed_type (TREE_TYPE (arg1)),
6526 integer_zero_node)));
6534 /* If we are comparing an expression that just has comparisons
6535 of two integer values, arithmetic expressions of those comparisons,
6536 and constants, we can simplify it. There are only three cases
6537 to check: the two values can either be equal, the first can be
6538 greater, or the second can be greater. Fold the expression for
6539 those three values. Since each value must be 0 or 1, we have
6540 eight possibilities, each of which corresponds to the constant 0
6541 or 1 or one of the six possible comparisons.
6543 This handles common cases like (a > b) == 0 but also handles
6544 expressions like ((x > y) - (y > x)) > 0, which supposedly
6545 occur in macroized code. */
6547 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
6549 tree cval1 = 0, cval2 = 0;
6552 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
6553 /* Don't handle degenerate cases here; they should already
6554 have been handled anyway. */
6555 && cval1 != 0 && cval2 != 0
6556 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
6557 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
6558 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
6559 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
6560 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
6561 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
6562 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
6564 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
6565 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
6567 /* We can't just pass T to eval_subst in case cval1 or cval2
6568 was the same as ARG1. */
6571 = fold (build (code, type,
6572 eval_subst (arg0, cval1, maxval, cval2, minval),
6575 = fold (build (code, type,
6576 eval_subst (arg0, cval1, maxval, cval2, maxval),
6579 = fold (build (code, type,
6580 eval_subst (arg0, cval1, minval, cval2, maxval),
6583 /* All three of these results should be 0 or 1. Confirm they
6584 are. Then use those values to select the proper code
6587 if ((integer_zerop (high_result)
6588 || integer_onep (high_result))
6589 && (integer_zerop (equal_result)
6590 || integer_onep (equal_result))
6591 && (integer_zerop (low_result)
6592 || integer_onep (low_result)))
6594 /* Make a 3-bit mask with the high-order bit being the
6595 value for `>', the next for '=', and the low for '<'. */
6596 switch ((integer_onep (high_result) * 4)
6597 + (integer_onep (equal_result) * 2)
6598 + integer_onep (low_result))
6602 return omit_one_operand (type, integer_zero_node, arg0);
6623 return omit_one_operand (type, integer_one_node, arg0);
6626 t = build (code, type, cval1, cval2);
6628 return save_expr (t);
6635 /* If this is a comparison of a field, we may be able to simplify it. */
6636 if ((TREE_CODE (arg0) == COMPONENT_REF
6637 || TREE_CODE (arg0) == BIT_FIELD_REF)
6638 && (code == EQ_EXPR || code == NE_EXPR)
6639 /* Handle the constant case even without -O
6640 to make sure the warnings are given. */
6641 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6643 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6647 /* If this is a comparison of complex values and either or both sides
6648 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6649 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6650 This may prevent needless evaluations. */
6651 if ((code == EQ_EXPR || code == NE_EXPR)
6652 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6653 && (TREE_CODE (arg0) == COMPLEX_EXPR
6654 || TREE_CODE (arg1) == COMPLEX_EXPR
6655 || TREE_CODE (arg0) == COMPLEX_CST
6656 || TREE_CODE (arg1) == COMPLEX_CST))
6658 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6659 tree real0, imag0, real1, imag1;
6661 arg0 = save_expr (arg0);
6662 arg1 = save_expr (arg1);
6663 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6664 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6665 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6666 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6668 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6671 fold (build (code, type, real0, real1)),
6672 fold (build (code, type, imag0, imag1))));
6675 /* From here on, the only cases we handle are when the result is
6676 known to be a constant.
6678 To compute GT, swap the arguments and do LT.
6679 To compute GE, do LT and invert the result.
6680 To compute LE, swap the arguments, do LT and invert the result.
6681 To compute NE, do EQ and invert the result.
6683 Therefore, the code below must handle only EQ and LT. */
6685 if (code == LE_EXPR || code == GT_EXPR)
6687 tem = arg0, arg0 = arg1, arg1 = tem;
6688 code = swap_tree_comparison (code);
6691 /* Note that it is safe to invert for real values here because we
6692 will check below in the one case that it matters. */
6696 if (code == NE_EXPR || code == GE_EXPR)
6699 code = invert_tree_comparison (code);
6702 /* Compute a result for LT or EQ if args permit;
6703 otherwise return T. */
6704 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6706 if (code == EQ_EXPR)
6707 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
6709 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6710 ? INT_CST_LT_UNSIGNED (arg0, arg1)
6711 : INT_CST_LT (arg0, arg1)),
6715 #if 0 /* This is no longer useful, but breaks some real code. */
6716 /* Assume a nonexplicit constant cannot equal an explicit one,
6717 since such code would be undefined anyway.
6718 Exception: on sysvr4, using #pragma weak,
6719 a label can come out as 0. */
6720 else if (TREE_CODE (arg1) == INTEGER_CST
6721 && !integer_zerop (arg1)
6722 && TREE_CONSTANT (arg0)
6723 && TREE_CODE (arg0) == ADDR_EXPR
6725 t1 = build_int_2 (0, 0);
6727 /* Two real constants can be compared explicitly. */
6728 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6730 /* If either operand is a NaN, the result is false with two
6731 exceptions: First, an NE_EXPR is true on NaNs, but that case
6732 is already handled correctly since we will be inverting the
6733 result for NE_EXPR. Second, if we had inverted a LE_EXPR
6734 or a GE_EXPR into a LT_EXPR, we must return true so that it
6735 will be inverted into false. */
6737 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6738 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6739 t1 = build_int_2 (invert && code == LT_EXPR, 0);
6741 else if (code == EQ_EXPR)
6742 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6743 TREE_REAL_CST (arg1)),
6746 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6747 TREE_REAL_CST (arg1)),
6751 if (t1 == NULL_TREE)
6755 TREE_INT_CST_LOW (t1) ^= 1;
6757 TREE_TYPE (t1) = type;
6758 if (TREE_CODE (type) == BOOLEAN_TYPE)
6759 return truthvalue_conversion (t1);
6763 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6764 so all simple results must be passed through pedantic_non_lvalue. */
6765 if (TREE_CODE (arg0) == INTEGER_CST)
6766 return pedantic_non_lvalue
6767 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6768 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6769 return pedantic_omit_one_operand (type, arg1, arg0);
6771 /* If the second operand is zero, invert the comparison and swap
6772 the second and third operands. Likewise if the second operand
6773 is constant and the third is not or if the third operand is
6774 equivalent to the first operand of the comparison. */
6776 if (integer_zerop (arg1)
6777 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6778 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6779 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6780 TREE_OPERAND (t, 2),
6781 TREE_OPERAND (arg0, 1))))
6783 /* See if this can be inverted. If it can't, possibly because
6784 it was a floating-point inequality comparison, don't do
6786 tem = invert_truthvalue (arg0);
6788 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6790 t = build (code, type, tem,
6791 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6793 /* arg1 should be the first argument of the new T. */
6794 arg1 = TREE_OPERAND (t, 1);
6799 /* If we have A op B ? A : C, we may be able to convert this to a
6800 simpler expression, depending on the operation and the values
6801 of B and C. IEEE floating point prevents this though,
6802 because A or B might be -0.0 or a NaN. */
6804 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6805 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6806 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6809 arg1, TREE_OPERAND (arg0, 1)))
6811 tree arg2 = TREE_OPERAND (t, 2);
6812 enum tree_code comp_code = TREE_CODE (arg0);
6816 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6817 depending on the comparison operation. */
6818 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6819 ? real_zerop (TREE_OPERAND (arg0, 1))
6820 : integer_zerop (TREE_OPERAND (arg0, 1)))
6821 && TREE_CODE (arg2) == NEGATE_EXPR
6822 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6827 pedantic_non_lvalue (convert (type, negate_expr (arg1)));
6829 return pedantic_non_lvalue (convert (type, arg1));
6832 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6833 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6834 return pedantic_non_lvalue
6835 (convert (type, fold (build1 (ABS_EXPR,
6836 TREE_TYPE (arg1), arg1))));
6839 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6840 arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6841 return pedantic_non_lvalue
6842 (negate_expr (convert (type,
6843 fold (build1 (ABS_EXPR,
6850 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
6853 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6855 if (comp_code == NE_EXPR)
6856 return pedantic_non_lvalue (convert (type, arg1));
6857 else if (comp_code == EQ_EXPR)
6858 return pedantic_non_lvalue (convert (type, integer_zero_node));
6861 /* If this is A op B ? A : B, this is either A, B, min (A, B),
6862 or max (A, B), depending on the operation. */
6864 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6865 arg2, TREE_OPERAND (arg0, 0)))
6867 tree comp_op0 = TREE_OPERAND (arg0, 0);
6868 tree comp_op1 = TREE_OPERAND (arg0, 1);
6869 tree comp_type = TREE_TYPE (comp_op0);
6874 return pedantic_non_lvalue (convert (type, arg2));
6876 return pedantic_non_lvalue (convert (type, arg1));
6879 /* In C++ a ?: expression can be an lvalue, so put the
6880 operand which will be used if they are equal first
6881 so that we can convert this back to the
6882 corresponding COND_EXPR. */
6883 return pedantic_non_lvalue
6884 (convert (type, (fold (build (MIN_EXPR, comp_type,
6885 (comp_code == LE_EXPR
6886 ? comp_op0 : comp_op1),
6887 (comp_code == LE_EXPR
6888 ? comp_op1 : comp_op0))))));
6892 return pedantic_non_lvalue
6893 (convert (type, fold (build (MAX_EXPR, comp_type,
6894 (comp_code == GE_EXPR
6895 ? comp_op0 : comp_op1),
6896 (comp_code == GE_EXPR
6897 ? comp_op1 : comp_op0)))));
6904 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6905 we might still be able to simplify this. For example,
6906 if C1 is one less or one more than C2, this might have started
6907 out as a MIN or MAX and been transformed by this function.
6908 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
6910 if (INTEGRAL_TYPE_P (type)
6911 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6912 && TREE_CODE (arg2) == INTEGER_CST)
6916 /* We can replace A with C1 in this case. */
6917 arg1 = convert (type, TREE_OPERAND (arg0, 1));
6918 t = build (code, type, TREE_OPERAND (t, 0), arg1,
6919 TREE_OPERAND (t, 2));
6923 /* If C1 is C2 + 1, this is min(A, C2). */
6924 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6925 && operand_equal_p (TREE_OPERAND (arg0, 1),
6926 const_binop (PLUS_EXPR, arg2,
6927 integer_one_node, 0), 1))
6928 return pedantic_non_lvalue
6929 (fold (build (MIN_EXPR, type, arg1, arg2)));
6933 /* If C1 is C2 - 1, this is min(A, C2). */
6934 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6935 && operand_equal_p (TREE_OPERAND (arg0, 1),
6936 const_binop (MINUS_EXPR, arg2,
6937 integer_one_node, 0), 1))
6938 return pedantic_non_lvalue
6939 (fold (build (MIN_EXPR, type, arg1, arg2)));
6943 /* If C1 is C2 - 1, this is max(A, C2). */
6944 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6945 && operand_equal_p (TREE_OPERAND (arg0, 1),
6946 const_binop (MINUS_EXPR, arg2,
6947 integer_one_node, 0), 1))
6948 return pedantic_non_lvalue
6949 (fold (build (MAX_EXPR, type, arg1, arg2)));
6953 /* If C1 is C2 + 1, this is max(A, C2). */
6954 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6955 && operand_equal_p (TREE_OPERAND (arg0, 1),
6956 const_binop (PLUS_EXPR, arg2,
6957 integer_one_node, 0), 1))
6958 return pedantic_non_lvalue
6959 (fold (build (MAX_EXPR, type, arg1, arg2)));
6968 /* If the second operand is simpler than the third, swap them
6969 since that produces better jump optimization results. */
6970 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
6971 || TREE_CODE (arg1) == SAVE_EXPR)
6972 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6973 || DECL_P (TREE_OPERAND (t, 2))
6974 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6976 /* See if this can be inverted. If it can't, possibly because
6977 it was a floating-point inequality comparison, don't do
6979 tem = invert_truthvalue (arg0);
6981 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6983 t = build (code, type, tem,
6984 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6986 /* arg1 should be the first argument of the new T. */
6987 arg1 = TREE_OPERAND (t, 1);
6992 /* Convert A ? 1 : 0 to simply A. */
6993 if (integer_onep (TREE_OPERAND (t, 1))
6994 && integer_zerop (TREE_OPERAND (t, 2))
6995 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6996 call to fold will try to move the conversion inside
6997 a COND, which will recurse. In that case, the COND_EXPR
6998 is probably the best choice, so leave it alone. */
6999 && type == TREE_TYPE (arg0))
7000 return pedantic_non_lvalue (arg0);
7002 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
7003 operation is simply A & 2. */
7005 if (integer_zerop (TREE_OPERAND (t, 2))
7006 && TREE_CODE (arg0) == NE_EXPR
7007 && integer_zerop (TREE_OPERAND (arg0, 1))
7008 && integer_pow2p (arg1)
7009 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
7010 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
7012 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
7017 /* When pedantic, a compound expression can be neither an lvalue
7018 nor an integer constant expression. */
7019 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
7021 /* Don't let (0, 0) be null pointer constant. */
7022 if (integer_zerop (arg1))
7023 return build1 (NOP_EXPR, type, arg1);
7024 return convert (type, arg1);
7028 return build_complex (type, arg0, arg1);
7032 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7034 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7035 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7036 TREE_OPERAND (arg0, 1));
7037 else if (TREE_CODE (arg0) == COMPLEX_CST)
7038 return TREE_REALPART (arg0);
7039 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7040 return fold (build (TREE_CODE (arg0), type,
7041 fold (build1 (REALPART_EXPR, type,
7042 TREE_OPERAND (arg0, 0))),
7043 fold (build1 (REALPART_EXPR,
7044 type, TREE_OPERAND (arg0, 1)))));
7048 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7049 return convert (type, integer_zero_node);
7050 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7051 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7052 TREE_OPERAND (arg0, 0));
7053 else if (TREE_CODE (arg0) == COMPLEX_CST)
7054 return TREE_IMAGPART (arg0);
7055 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7056 return fold (build (TREE_CODE (arg0), type,
7057 fold (build1 (IMAGPART_EXPR, type,
7058 TREE_OPERAND (arg0, 0))),
7059 fold (build1 (IMAGPART_EXPR, type,
7060 TREE_OPERAND (arg0, 1)))));
7063 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
7065 case CLEANUP_POINT_EXPR:
7066 if (! has_cleanups (arg0))
7067 return TREE_OPERAND (t, 0);
7070 enum tree_code code0 = TREE_CODE (arg0);
7071 int kind0 = TREE_CODE_CLASS (code0);
7072 tree arg00 = TREE_OPERAND (arg0, 0);
7075 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
7076 return fold (build1 (code0, type,
7077 fold (build1 (CLEANUP_POINT_EXPR,
7078 TREE_TYPE (arg00), arg00))));
7080 if (kind0 == '<' || kind0 == '2'
7081 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
7082 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
7083 || code0 == TRUTH_XOR_EXPR)
7085 arg01 = TREE_OPERAND (arg0, 1);
7087 if (TREE_CONSTANT (arg00)
7088 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
7089 && ! has_cleanups (arg00)))
7090 return fold (build (code0, type, arg00,
7091 fold (build1 (CLEANUP_POINT_EXPR,
7092 TREE_TYPE (arg01), arg01))));
7094 if (TREE_CONSTANT (arg01))
7095 return fold (build (code0, type,
7096 fold (build1 (CLEANUP_POINT_EXPR,
7097 TREE_TYPE (arg00), arg00)),
7106 } /* switch (code) */
7109 /* Determine if first argument is a multiple of second argument. Return 0 if
7110 it is not, or we cannot easily determined it to be.
7112 An example of the sort of thing we care about (at this point; this routine
7113 could surely be made more general, and expanded to do what the *_DIV_EXPR's
7114 fold cases do now) is discovering that
7116 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7122 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
7124 This code also handles discovering that
7126 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
7128 is a multiple of 8 so we don't have to worry about dealing with a
7131 Note that we *look* inside a SAVE_EXPR only to determine how it was
7132 calculated; it is not safe for fold to do much of anything else with the
7133 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
7134 at run time. For example, the latter example above *cannot* be implemented
7135 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
7136 evaluation time of the original SAVE_EXPR is not necessarily the same at
7137 the time the new expression is evaluated. The only optimization of this
7138 sort that would be valid is changing
7140 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
7144 SAVE_EXPR (I) * SAVE_EXPR (J)
7146 (where the same SAVE_EXPR (J) is used in the original and the
7147 transformed version). */
7150 multiple_of_p (type, top, bottom)
7155 if (operand_equal_p (top, bottom, 0))
7158 if (TREE_CODE (type) != INTEGER_TYPE)
7161 switch (TREE_CODE (top))
7164 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7165 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7169 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
7170 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
7173 /* Can't handle conversions from non-integral or wider integral type. */
7174 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
7175 || (TYPE_PRECISION (type)
7176 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
7179 /* .. fall through ... */
7182 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
7185 if ((TREE_CODE (bottom) != INTEGER_CST)
7186 || (tree_int_cst_sgn (top) < 0)
7187 || (tree_int_cst_sgn (bottom) < 0))
7189 return integer_zerop (const_binop (TRUNC_MOD_EXPR,