1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92-97, 1998 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
52 /* Handle floating overflow for `const_binop'. */
53 static jmp_buf float_error;
55 static void encode PROTO((HOST_WIDE_INT *,
56 HOST_WIDE_INT, HOST_WIDE_INT));
57 static void decode PROTO((HOST_WIDE_INT *,
58 HOST_WIDE_INT *, HOST_WIDE_INT *));
59 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
60 HOST_WIDE_INT, HOST_WIDE_INT,
61 HOST_WIDE_INT, HOST_WIDE_INT *,
62 HOST_WIDE_INT *, HOST_WIDE_INT *,
64 static int split_tree PROTO((tree, enum tree_code, tree *,
66 static tree int_const_binop PROTO((enum tree_code, tree, tree, int, int));
67 static tree const_binop PROTO((enum tree_code, tree, tree, int));
68 static tree fold_convert PROTO((tree, tree));
69 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
70 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
71 static int truth_value_p PROTO((enum tree_code));
72 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
73 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
74 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
75 static tree omit_one_operand PROTO((tree, tree, tree));
76 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
77 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
78 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
79 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
81 static tree decode_field_reference PROTO((tree, int *, int *,
82 enum machine_mode *, int *,
83 int *, tree *, tree *));
84 static int all_ones_mask_p PROTO((tree, int));
85 static int simple_operand_p PROTO((tree));
86 static tree range_binop PROTO((enum tree_code, tree, tree, int,
88 static tree make_range PROTO((tree, int *, tree *, tree *));
89 static tree build_range_check PROTO((tree, tree, int, tree, tree));
90 static int merge_ranges PROTO((int *, tree *, tree *, int, tree, tree,
92 static tree fold_range_test PROTO((tree));
93 static tree unextend PROTO((tree, int, int, tree));
94 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
95 static tree strip_compound_expr PROTO((tree, tree));
96 static int multiple_of_p PROTO((tree, tree, tree));
97 static tree constant_boolean_node PROTO((int, tree));
100 #define BRANCH_COST 1
103 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
104 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
105 Then this yields nonzero if overflow occurred during the addition.
106 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
107 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
108 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
110 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
111 We do that by representing the two-word integer in 4 words, with only
112 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
115 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
116 #define HIGHPART(x) \
117 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
118 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
120 /* Unpack a two-word integer into 4 words.
121 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
122 WORDS points to the array of HOST_WIDE_INTs. */
125 encode (words, low, hi)
126 HOST_WIDE_INT *words;
127 HOST_WIDE_INT low, hi;
129 words[0] = LOWPART (low);
130 words[1] = HIGHPART (low);
131 words[2] = LOWPART (hi);
132 words[3] = HIGHPART (hi);
135 /* Pack an array of 4 words into a two-word integer.
136 WORDS points to the array of words.
137 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
140 decode (words, low, hi)
141 HOST_WIDE_INT *words;
142 HOST_WIDE_INT *low, *hi;
144 *low = words[0] | words[1] * BASE;
145 *hi = words[2] | words[3] * BASE;
148 /* Make the integer constant T valid for its type
149 by setting to 0 or 1 all the bits in the constant
150 that don't belong in the type.
151 Yield 1 if a signed overflow occurs, 0 otherwise.
152 If OVERFLOW is nonzero, a signed overflow has already occurred
153 in calculating T, so propagate it.
155 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
159 force_fit_type (t, overflow)
163 HOST_WIDE_INT low, high;
166 if (TREE_CODE (t) == REAL_CST)
168 #ifdef CHECK_FLOAT_VALUE
169 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
175 else if (TREE_CODE (t) != INTEGER_CST)
178 low = TREE_INT_CST_LOW (t);
179 high = TREE_INT_CST_HIGH (t);
181 if (POINTER_TYPE_P (TREE_TYPE (t)))
184 prec = TYPE_PRECISION (TREE_TYPE (t));
186 /* First clear all bits that are beyond the type's precision. */
188 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
190 else if (prec > HOST_BITS_PER_WIDE_INT)
192 TREE_INT_CST_HIGH (t)
193 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
197 TREE_INT_CST_HIGH (t) = 0;
198 if (prec < HOST_BITS_PER_WIDE_INT)
199 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
202 /* Unsigned types do not suffer sign extension or overflow. */
203 if (TREE_UNSIGNED (TREE_TYPE (t)))
206 /* If the value's sign bit is set, extend the sign. */
207 if (prec != 2 * HOST_BITS_PER_WIDE_INT
208 && (prec > HOST_BITS_PER_WIDE_INT
209 ? (TREE_INT_CST_HIGH (t)
210 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
211 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
213 /* Value is negative:
214 set to 1 all the bits that are outside this type's precision. */
215 if (prec > HOST_BITS_PER_WIDE_INT)
217 TREE_INT_CST_HIGH (t)
218 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222 TREE_INT_CST_HIGH (t) = -1;
223 if (prec < HOST_BITS_PER_WIDE_INT)
224 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
228 /* Yield nonzero if signed overflow occurred. */
230 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
234 /* Add two doubleword integers with doubleword result.
235 Each argument is given as two `HOST_WIDE_INT' pieces.
236 One argument is L1 and H1; the other, L2 and H2.
237 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
240 add_double (l1, h1, l2, h2, lv, hv)
241 HOST_WIDE_INT l1, h1, l2, h2;
242 HOST_WIDE_INT *lv, *hv;
247 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
251 return overflow_sum_sign (h1, h2, h);
254 /* Negate a doubleword integer with doubleword result.
255 Return nonzero if the operation overflows, assuming it's signed.
256 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
257 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
260 neg_double (l1, h1, lv, hv)
261 HOST_WIDE_INT l1, h1;
262 HOST_WIDE_INT *lv, *hv;
268 return (*hv & h1) < 0;
278 /* Multiply two doubleword integers with doubleword result.
279 Return nonzero if the operation overflows, assuming it's signed.
280 Each argument is given as two `HOST_WIDE_INT' pieces.
281 One argument is L1 and H1; the other, L2 and H2.
282 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
285 mul_double (l1, h1, l2, h2, lv, hv)
286 HOST_WIDE_INT l1, h1, l2, h2;
287 HOST_WIDE_INT *lv, *hv;
289 HOST_WIDE_INT arg1[4];
290 HOST_WIDE_INT arg2[4];
291 HOST_WIDE_INT prod[4 * 2];
292 register unsigned HOST_WIDE_INT carry;
293 register int i, j, k;
294 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
296 encode (arg1, l1, h1);
297 encode (arg2, l2, h2);
299 bzero ((char *) prod, sizeof prod);
301 for (i = 0; i < 4; i++)
304 for (j = 0; j < 4; j++)
307 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
308 carry += arg1[i] * arg2[j];
309 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
311 prod[k] = LOWPART (carry);
312 carry = HIGHPART (carry);
317 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
319 /* Check for overflow by calculating the top half of the answer in full;
320 it should agree with the low half's sign bit. */
321 decode (prod+4, &toplow, &tophigh);
324 neg_double (l2, h2, &neglow, &neghigh);
325 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
329 neg_double (l1, h1, &neglow, &neghigh);
330 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
332 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
335 /* Shift the doubleword integer in L1, H1 left by COUNT places
336 keeping only PREC bits of result.
337 Shift right if COUNT is negative.
338 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
339 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
342 lshift_double (l1, h1, count, prec, lv, hv, arith)
343 HOST_WIDE_INT l1, h1, count;
345 HOST_WIDE_INT *lv, *hv;
350 rshift_double (l1, h1, - count, prec, lv, hv, arith);
354 #ifdef SHIFT_COUNT_TRUNCATED
355 if (SHIFT_COUNT_TRUNCATED)
359 if (count >= HOST_BITS_PER_WIDE_INT)
361 *hv = (unsigned HOST_WIDE_INT) l1 << (count - HOST_BITS_PER_WIDE_INT);
366 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
367 | ((unsigned HOST_WIDE_INT) l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
368 *lv = (unsigned HOST_WIDE_INT) l1 << count;
372 /* Shift the doubleword integer in L1, H1 right by COUNT places
373 keeping only PREC bits of result. COUNT must be positive.
374 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
375 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
378 rshift_double (l1, h1, count, prec, lv, hv, arith)
379 HOST_WIDE_INT l1, h1, count;
381 HOST_WIDE_INT *lv, *hv;
384 unsigned HOST_WIDE_INT signmask;
386 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
389 #ifdef SHIFT_COUNT_TRUNCATED
390 if (SHIFT_COUNT_TRUNCATED)
394 if (count >= HOST_BITS_PER_WIDE_INT)
397 *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
398 | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
402 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
403 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
404 *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
405 | ((unsigned HOST_WIDE_INT) h1 >> count));
409 /* Rotate the doubleword integer in L1, H1 left by COUNT places
410 keeping only PREC bits of result.
411 Rotate right if COUNT is negative.
412 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
415 lrotate_double (l1, h1, count, prec, lv, hv)
416 HOST_WIDE_INT l1, h1, count;
418 HOST_WIDE_INT *lv, *hv;
420 HOST_WIDE_INT s1l, s1h, s2l, s2h;
426 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
427 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
432 /* Rotate the doubleword integer in L1, H1 left by COUNT places
433 keeping only PREC bits of result. COUNT must be positive.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 rrotate_double (l1, h1, count, prec, lv, hv)
438 HOST_WIDE_INT l1, h1, count;
440 HOST_WIDE_INT *lv, *hv;
442 HOST_WIDE_INT s1l, s1h, s2l, s2h;
448 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
449 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
454 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
455 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
456 CODE is a tree code for a kind of division, one of
457 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
459 It controls how the quotient is rounded to a integer.
460 Return nonzero if the operation overflows.
461 UNS nonzero says do unsigned division. */
464 div_and_round_double (code, uns,
465 lnum_orig, hnum_orig, lden_orig, hden_orig,
466 lquo, hquo, lrem, hrem)
469 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
470 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
471 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
474 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
475 HOST_WIDE_INT den[4], quo[4];
477 unsigned HOST_WIDE_INT work;
478 register unsigned HOST_WIDE_INT carry = 0;
479 HOST_WIDE_INT lnum = lnum_orig;
480 HOST_WIDE_INT hnum = hnum_orig;
481 HOST_WIDE_INT lden = lden_orig;
482 HOST_WIDE_INT hden = hden_orig;
485 if ((hden == 0) && (lden == 0))
486 overflow = 1, lden = 1;
488 /* calculate quotient sign and convert operands to unsigned. */
494 /* (minimum integer) / (-1) is the only overflow case. */
495 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
501 neg_double (lden, hden, &lden, &hden);
505 if (hnum == 0 && hden == 0)
506 { /* single precision */
508 /* This unsigned division rounds toward zero. */
509 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
514 { /* trivial case: dividend < divisor */
515 /* hden != 0 already checked. */
522 bzero ((char *) quo, sizeof quo);
524 bzero ((char *) num, sizeof num); /* to zero 9th element */
525 bzero ((char *) den, sizeof den);
527 encode (num, lnum, hnum);
528 encode (den, lden, hden);
530 /* Special code for when the divisor < BASE. */
531 if (hden == 0 && lden < BASE)
533 /* hnum != 0 already checked. */
534 for (i = 4 - 1; i >= 0; i--)
536 work = num[i] + carry * BASE;
537 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
538 carry = work % (unsigned HOST_WIDE_INT) lden;
543 /* Full double precision division,
544 with thanks to Don Knuth's "Seminumerical Algorithms". */
545 int num_hi_sig, den_hi_sig;
546 unsigned HOST_WIDE_INT quo_est, scale;
548 /* Find the highest non-zero divisor digit. */
549 for (i = 4 - 1; ; i--)
555 /* Insure that the first digit of the divisor is at least BASE/2.
556 This is required by the quotient digit estimation algorithm. */
558 scale = BASE / (den[den_hi_sig] + 1);
559 if (scale > 1) { /* scale divisor and dividend */
561 for (i = 0; i <= 4 - 1; i++) {
562 work = (num[i] * scale) + carry;
563 num[i] = LOWPART (work);
564 carry = HIGHPART (work);
567 for (i = 0; i <= 4 - 1; i++) {
568 work = (den[i] * scale) + carry;
569 den[i] = LOWPART (work);
570 carry = HIGHPART (work);
571 if (den[i] != 0) den_hi_sig = i;
578 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
579 /* guess the next quotient digit, quo_est, by dividing the first
580 two remaining dividend digits by the high order quotient digit.
581 quo_est is never low and is at most 2 high. */
582 unsigned HOST_WIDE_INT tmp;
584 num_hi_sig = i + den_hi_sig + 1;
585 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
586 if (num[num_hi_sig] != den[den_hi_sig])
587 quo_est = work / den[den_hi_sig];
591 /* refine quo_est so it's usually correct, and at most one high. */
592 tmp = work - quo_est * den[den_hi_sig];
594 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
597 /* Try QUO_EST as the quotient digit, by multiplying the
598 divisor by QUO_EST and subtracting from the remaining dividend.
599 Keep in mind that QUO_EST is the I - 1st digit. */
602 for (j = 0; j <= den_hi_sig; j++)
604 work = quo_est * den[j] + carry;
605 carry = HIGHPART (work);
606 work = num[i + j] - LOWPART (work);
607 num[i + j] = LOWPART (work);
608 carry += HIGHPART (work) != 0;
611 /* if quo_est was high by one, then num[i] went negative and
612 we need to correct things. */
614 if (num[num_hi_sig] < carry)
617 carry = 0; /* add divisor back in */
618 for (j = 0; j <= den_hi_sig; j++)
620 work = num[i + j] + den[j] + carry;
621 carry = HIGHPART (work);
622 num[i + j] = LOWPART (work);
624 num [num_hi_sig] += carry;
627 /* store the quotient digit. */
632 decode (quo, lquo, hquo);
635 /* if result is negative, make it so. */
637 neg_double (*lquo, *hquo, lquo, hquo);
639 /* compute trial remainder: rem = num - (quo * den) */
640 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
641 neg_double (*lrem, *hrem, lrem, hrem);
642 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
647 case TRUNC_MOD_EXPR: /* round toward zero */
648 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
652 case FLOOR_MOD_EXPR: /* round toward negative infinity */
653 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
656 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
659 else return overflow;
663 case CEIL_MOD_EXPR: /* round toward positive infinity */
664 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
666 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
669 else return overflow;
673 case ROUND_MOD_EXPR: /* round to closest integer */
675 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
676 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
678 /* get absolute values */
679 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
680 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
682 /* if (2 * abs (lrem) >= abs (lden)) */
683 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
684 labs_rem, habs_rem, <wice, &htwice);
685 if (((unsigned HOST_WIDE_INT) habs_den
686 < (unsigned HOST_WIDE_INT) htwice)
687 || (((unsigned HOST_WIDE_INT) habs_den
688 == (unsigned HOST_WIDE_INT) htwice)
689 && ((HOST_WIDE_INT unsigned) labs_den
690 < (unsigned HOST_WIDE_INT) ltwice)))
694 add_double (*lquo, *hquo,
695 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
698 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
701 else return overflow;
709 /* compute true remainder: rem = num - (quo * den) */
710 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
711 neg_double (*lrem, *hrem, lrem, hrem);
712 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
716 #ifndef REAL_ARITHMETIC
717 /* Effectively truncate a real value to represent the nearest possible value
718 in a narrower mode. The result is actually represented in the same data
719 type as the argument, but its value is usually different.
721 A trap may occur during the FP operations and it is the responsibility
722 of the calling function to have a handler established. */
725 real_value_truncate (mode, arg)
726 enum machine_mode mode;
729 return REAL_VALUE_TRUNCATE (mode, arg);
732 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
734 /* Check for infinity in an IEEE double precision number. */
740 /* The IEEE 64-bit double format. */
745 unsigned exponent : 11;
746 unsigned mantissa1 : 20;
751 unsigned mantissa1 : 20;
752 unsigned exponent : 11;
758 if (u.big_endian.sign == 1)
761 return (u.big_endian.exponent == 2047
762 && u.big_endian.mantissa1 == 0
763 && u.big_endian.mantissa2 == 0);
768 return (u.little_endian.exponent == 2047
769 && u.little_endian.mantissa1 == 0
770 && u.little_endian.mantissa2 == 0);
774 /* Check whether an IEEE double precision number is a NaN. */
780 /* The IEEE 64-bit double format. */
785 unsigned exponent : 11;
786 unsigned mantissa1 : 20;
791 unsigned mantissa1 : 20;
792 unsigned exponent : 11;
798 if (u.big_endian.sign == 1)
801 return (u.big_endian.exponent == 2047
802 && (u.big_endian.mantissa1 != 0
803 || u.big_endian.mantissa2 != 0));
808 return (u.little_endian.exponent == 2047
809 && (u.little_endian.mantissa1 != 0
810 || u.little_endian.mantissa2 != 0));
814 /* Check for a negative IEEE double precision number. */
820 /* The IEEE 64-bit double format. */
825 unsigned exponent : 11;
826 unsigned mantissa1 : 20;
831 unsigned mantissa1 : 20;
832 unsigned exponent : 11;
838 if (u.big_endian.sign == 1)
841 return u.big_endian.sign;
846 return u.little_endian.sign;
849 #else /* Target not IEEE */
851 /* Let's assume other float formats don't have infinity.
852 (This can be overridden by redefining REAL_VALUE_ISINF.) */
860 /* Let's assume other float formats don't have NaNs.
861 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
869 /* Let's assume other float formats don't have minus zero.
870 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
877 #endif /* Target not IEEE */
879 /* Try to change R into its exact multiplicative inverse in machine mode
880 MODE. Return nonzero function value if successful. */
883 exact_real_inverse (mode, r)
884 enum machine_mode mode;
894 /* Usually disable if bounds checks are not reliable. */
895 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
898 /* Set array index to the less significant bits in the unions, depending
899 on the endian-ness of the host doubles.
900 Disable if insufficient information on the data structure. */
901 #if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
904 #if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
907 #if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
910 #define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
915 if (setjmp (float_error))
917 /* Don't do the optimization if there was an arithmetic error. */
919 set_float_handler (NULL_PTR);
922 set_float_handler (float_error);
924 /* Domain check the argument. */
930 if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
934 /* Compute the reciprocal and check for numerical exactness.
935 It is unnecessary to check all the significand bits to determine
936 whether X is a power of 2. If X is not, then it is impossible for
937 the bottom half significand of both X and 1/X to be all zero bits.
938 Hence we ignore the data structure of the top half and examine only
939 the low order bits of the two significands. */
941 if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
944 /* Truncate to the required mode and range-check the result. */
945 y.d = REAL_VALUE_TRUNCATE (mode, t.d);
946 #ifdef CHECK_FLOAT_VALUE
948 if (CHECK_FLOAT_VALUE (mode, y.d, i))
952 /* Fail if truncation changed the value. */
953 if (y.d != t.d || y.d == 0.0)
957 if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
961 /* Output the reciprocal and return success flag. */
962 set_float_handler (NULL_PTR);
966 #endif /* no REAL_ARITHMETIC */
968 /* Split a tree IN into a constant and a variable part
969 that could be combined with CODE to make IN.
970 CODE must be a commutative arithmetic operation.
971 Store the constant part into *CONP and the variable in &VARP.
972 Return 1 if this was done; zero means the tree IN did not decompose
975 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
976 Therefore, we must tell the caller whether the variable part
977 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
978 The value stored is the coefficient for the variable term.
979 The constant term we return should always be added;
980 we negate it if necessary. */
983 split_tree (in, code, varp, conp, varsignp)
989 register tree outtype = TREE_TYPE (in);
993 /* Strip any conversions that don't change the machine mode. */
994 while ((TREE_CODE (in) == NOP_EXPR
995 || TREE_CODE (in) == CONVERT_EXPR)
996 && (TYPE_MODE (TREE_TYPE (in))
997 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
998 in = TREE_OPERAND (in, 0);
1000 if (TREE_CODE (in) == code
1001 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1002 /* We can associate addition and subtraction together
1003 (even though the C standard doesn't say so)
1004 for integers because the value is not affected.
1005 For reals, the value might be affected, so we can't. */
1006 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1007 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1009 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1010 if (code == INTEGER_CST)
1012 *conp = TREE_OPERAND (in, 0);
1013 *varp = TREE_OPERAND (in, 1);
1014 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1015 && TREE_TYPE (*varp) != outtype)
1016 *varp = convert (outtype, *varp);
1017 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1020 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1022 *conp = TREE_OPERAND (in, 1);
1023 *varp = TREE_OPERAND (in, 0);
1025 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1026 && TREE_TYPE (*varp) != outtype)
1027 *varp = convert (outtype, *varp);
1028 if (TREE_CODE (in) == MINUS_EXPR)
1030 /* If operation is subtraction and constant is second,
1031 must negate it to get an additive constant.
1032 And this cannot be done unless it is a manifest constant.
1033 It could also be the address of a static variable.
1034 We cannot negate that, so give up. */
1035 if (TREE_CODE (*conp) == INTEGER_CST)
1036 /* Subtracting from integer_zero_node loses for long long. */
1037 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1043 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1045 *conp = TREE_OPERAND (in, 0);
1046 *varp = TREE_OPERAND (in, 1);
1047 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1048 && TREE_TYPE (*varp) != outtype)
1049 *varp = convert (outtype, *varp);
1050 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1057 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1058 to produce a new constant.
1060 If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1061 If FORSIZE is nonzero, compute overflow for unsigned types. */
1064 int_const_binop (code, arg1, arg2, notrunc, forsize)
1065 enum tree_code code;
1066 register tree arg1, arg2;
1067 int notrunc, forsize;
1069 HOST_WIDE_INT int1l, int1h, int2l, int2h;
1070 HOST_WIDE_INT low, hi;
1071 HOST_WIDE_INT garbagel, garbageh;
1073 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1075 int no_overflow = 0;
1077 int1l = TREE_INT_CST_LOW (arg1);
1078 int1h = TREE_INT_CST_HIGH (arg1);
1079 int2l = TREE_INT_CST_LOW (arg2);
1080 int2h = TREE_INT_CST_HIGH (arg2);
1085 low = int1l | int2l, hi = int1h | int2h;
1089 low = int1l ^ int2l, hi = int1h ^ int2h;
1093 low = int1l & int2l, hi = int1h & int2h;
1096 case BIT_ANDTC_EXPR:
1097 low = int1l & ~int2l, hi = int1h & ~int2h;
1103 /* It's unclear from the C standard whether shifts can overflow.
1104 The following code ignores overflow; perhaps a C standard
1105 interpretation ruling is needed. */
1106 lshift_double (int1l, int1h, int2l,
1107 TYPE_PRECISION (TREE_TYPE (arg1)),
1116 lrotate_double (int1l, int1h, int2l,
1117 TYPE_PRECISION (TREE_TYPE (arg1)),
1122 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1126 neg_double (int2l, int2h, &low, &hi);
1127 add_double (int1l, int1h, low, hi, &low, &hi);
1128 overflow = overflow_sum_sign (hi, int2h, int1h);
1132 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1135 case TRUNC_DIV_EXPR:
1136 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1137 case EXACT_DIV_EXPR:
1138 /* This is a shortcut for a common special case. */
1139 if (int2h == 0 && int2l > 0
1140 && ! TREE_CONSTANT_OVERFLOW (arg1)
1141 && ! TREE_CONSTANT_OVERFLOW (arg2)
1142 && int1h == 0 && int1l >= 0)
1144 if (code == CEIL_DIV_EXPR)
1146 low = int1l / int2l, hi = 0;
1150 /* ... fall through ... */
1152 case ROUND_DIV_EXPR:
1153 if (int2h == 0 && int2l == 1)
1155 low = int1l, hi = int1h;
1158 if (int1l == int2l && int1h == int2h
1159 && ! (int1l == 0 && int1h == 0))
1164 overflow = div_and_round_double (code, uns,
1165 int1l, int1h, int2l, int2h,
1166 &low, &hi, &garbagel, &garbageh);
1169 case TRUNC_MOD_EXPR:
1170 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1171 /* This is a shortcut for a common special case. */
1172 if (int2h == 0 && int2l > 0
1173 && ! TREE_CONSTANT_OVERFLOW (arg1)
1174 && ! TREE_CONSTANT_OVERFLOW (arg2)
1175 && int1h == 0 && int1l >= 0)
1177 if (code == CEIL_MOD_EXPR)
1179 low = int1l % int2l, hi = 0;
1183 /* ... fall through ... */
1185 case ROUND_MOD_EXPR:
1186 overflow = div_and_round_double (code, uns,
1187 int1l, int1h, int2l, int2h,
1188 &garbagel, &garbageh, &low, &hi);
1195 low = (((unsigned HOST_WIDE_INT) int1h
1196 < (unsigned HOST_WIDE_INT) int2h)
1197 || (((unsigned HOST_WIDE_INT) int1h
1198 == (unsigned HOST_WIDE_INT) int2h)
1199 && ((unsigned HOST_WIDE_INT) int1l
1200 < (unsigned HOST_WIDE_INT) int2l)));
1204 low = ((int1h < int2h)
1205 || ((int1h == int2h)
1206 && ((unsigned HOST_WIDE_INT) int1l
1207 < (unsigned HOST_WIDE_INT) int2l)));
1209 if (low == (code == MIN_EXPR))
1210 low = int1l, hi = int1h;
1212 low = int2l, hi = int2h;
1219 if (TREE_TYPE (arg1) == sizetype && hi == 0
1221 && (TYPE_MAX_VALUE (sizetype) == NULL
1222 || low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype)))
1224 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1228 t = build_int_2 (low, hi);
1229 TREE_TYPE (t) = TREE_TYPE (arg1);
1233 = ((notrunc ? (!uns || forsize) && overflow
1234 : force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1235 | TREE_OVERFLOW (arg1)
1236 | TREE_OVERFLOW (arg2));
1237 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1238 So check if force_fit_type truncated the value. */
1240 && ! TREE_OVERFLOW (t)
1241 && (TREE_INT_CST_HIGH (t) != hi
1242 || TREE_INT_CST_LOW (t) != low))
1243 TREE_OVERFLOW (t) = 1;
1244 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1245 | TREE_CONSTANT_OVERFLOW (arg1)
1246 | TREE_CONSTANT_OVERFLOW (arg2));
1250 /* Combine two constants ARG1 and ARG2 under operation CODE
1251 to produce a new constant.
1252 We assume ARG1 and ARG2 have the same data type,
1253 or at least are the same kind of constant and the same machine mode.
1255 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1258 const_binop (code, arg1, arg2, notrunc)
1259 enum tree_code code;
1260 register tree arg1, arg2;
1263 STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1265 if (TREE_CODE (arg1) == INTEGER_CST)
1266 return int_const_binop (code, arg1, arg2, notrunc, 0);
1268 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1269 if (TREE_CODE (arg1) == REAL_CST)
1274 REAL_VALUE_TYPE value;
1277 d1 = TREE_REAL_CST (arg1);
1278 d2 = TREE_REAL_CST (arg2);
1280 /* If either operand is a NaN, just return it. Otherwise, set up
1281 for floating-point trap; we return an overflow. */
1282 if (REAL_VALUE_ISNAN (d1))
1284 else if (REAL_VALUE_ISNAN (d2))
1286 else if (setjmp (float_error))
1288 t = copy_node (arg1);
1293 set_float_handler (float_error);
1295 #ifdef REAL_ARITHMETIC
1296 REAL_ARITHMETIC (value, code, d1, d2);
1313 #ifndef REAL_INFINITY
1322 value = MIN (d1, d2);
1326 value = MAX (d1, d2);
1332 #endif /* no REAL_ARITHMETIC */
1333 t = build_real (TREE_TYPE (arg1),
1334 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1336 set_float_handler (NULL_PTR);
1339 = (force_fit_type (t, overflow)
1340 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1341 TREE_CONSTANT_OVERFLOW (t)
1343 | TREE_CONSTANT_OVERFLOW (arg1)
1344 | TREE_CONSTANT_OVERFLOW (arg2);
1347 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1348 if (TREE_CODE (arg1) == COMPLEX_CST)
1350 register tree type = TREE_TYPE (arg1);
1351 register tree r1 = TREE_REALPART (arg1);
1352 register tree i1 = TREE_IMAGPART (arg1);
1353 register tree r2 = TREE_REALPART (arg2);
1354 register tree i2 = TREE_IMAGPART (arg2);
1360 t = build_complex (type,
1361 const_binop (PLUS_EXPR, r1, r2, notrunc),
1362 const_binop (PLUS_EXPR, i1, i2, notrunc));
1366 t = build_complex (type,
1367 const_binop (MINUS_EXPR, r1, r2, notrunc),
1368 const_binop (MINUS_EXPR, i1, i2, notrunc));
1372 t = build_complex (type,
1373 const_binop (MINUS_EXPR,
1374 const_binop (MULT_EXPR,
1376 const_binop (MULT_EXPR,
1379 const_binop (PLUS_EXPR,
1380 const_binop (MULT_EXPR,
1382 const_binop (MULT_EXPR,
1389 register tree magsquared
1390 = const_binop (PLUS_EXPR,
1391 const_binop (MULT_EXPR, r2, r2, notrunc),
1392 const_binop (MULT_EXPR, i2, i2, notrunc),
1395 t = build_complex (type,
1397 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1398 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1399 const_binop (PLUS_EXPR,
1400 const_binop (MULT_EXPR, r1, r2,
1402 const_binop (MULT_EXPR, i1, i2,
1405 magsquared, notrunc),
1407 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1408 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1409 const_binop (MINUS_EXPR,
1410 const_binop (MULT_EXPR, i1, r2,
1412 const_binop (MULT_EXPR, r1, i2,
1415 magsquared, notrunc));
1427 /* Return an INTEGER_CST with value V . The type is determined by bit_p:
1428 if it is zero, the type is taken from sizetype; if it is one, the type
1429 is taken from bitsizetype. */
1432 size_int_wide (number, high, bit_p)
1433 unsigned HOST_WIDE_INT number, high;
1437 /* Type-size nodes already made for small sizes. */
1438 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1][2];
1440 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high
1441 && size_table[number][bit_p] != 0)
1442 return size_table[number][bit_p];
1443 if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high)
1445 push_obstacks_nochange ();
1446 /* Make this a permanent node. */
1447 end_temporary_allocation ();
1448 t = build_int_2 (number, 0);
1449 TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1450 size_table[number][bit_p] = t;
1455 t = build_int_2 (number, high);
1456 TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1457 TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1462 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1463 CODE is a tree code. Data type is taken from `sizetype',
1464 If the operands are constant, so is the result. */
1467 size_binop (code, arg0, arg1)
1468 enum tree_code code;
1471 /* Handle the special case of two integer constants faster. */
1472 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1474 /* And some specific cases even faster than that. */
1475 if (code == PLUS_EXPR && integer_zerop (arg0))
1477 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1478 && integer_zerop (arg1))
1480 else if (code == MULT_EXPR && integer_onep (arg0))
1483 /* Handle general case of two integer constants. */
1484 return int_const_binop (code, arg0, arg1, 0, 1);
1487 if (arg0 == error_mark_node || arg1 == error_mark_node)
1488 return error_mark_node;
1490 return fold (build (code, sizetype, arg0, arg1));
1493 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1494 CODE is a tree code. Data type is taken from `ssizetype',
1495 If the operands are constant, so is the result. */
1498 ssize_binop (code, arg0, arg1)
1499 enum tree_code code;
1502 /* Handle the special case of two integer constants faster. */
1503 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1505 /* And some specific cases even faster than that. */
1506 if (code == PLUS_EXPR && integer_zerop (arg0))
1508 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1509 && integer_zerop (arg1))
1511 else if (code == MULT_EXPR && integer_onep (arg0))
1514 /* Handle general case of two integer constants. We convert
1515 arg0 to ssizetype because int_const_binop uses its type for the
1517 arg0 = convert (ssizetype, arg0);
1518 return int_const_binop (code, arg0, arg1, 0, 0);
1521 if (arg0 == error_mark_node || arg1 == error_mark_node)
1522 return error_mark_node;
1524 return fold (build (code, ssizetype, arg0, arg1));
1527 /* Given T, a tree representing type conversion of ARG1, a constant,
1528 return a constant tree representing the result of conversion. */
1531 fold_convert (t, arg1)
1535 register tree type = TREE_TYPE (t);
1538 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1540 if (TREE_CODE (arg1) == INTEGER_CST)
1542 /* If we would build a constant wider than GCC supports,
1543 leave the conversion unfolded. */
1544 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1547 /* Given an integer constant, make new constant with new type,
1548 appropriately sign-extended or truncated. */
1549 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1550 TREE_INT_CST_HIGH (arg1));
1551 TREE_TYPE (t) = type;
1552 /* Indicate an overflow if (1) ARG1 already overflowed,
1553 or (2) force_fit_type indicates an overflow.
1554 Tell force_fit_type that an overflow has already occurred
1555 if ARG1 is a too-large unsigned value and T is signed.
1556 But don't indicate an overflow if converting a pointer. */
1558 = ((force_fit_type (t,
1559 (TREE_INT_CST_HIGH (arg1) < 0
1560 && (TREE_UNSIGNED (type)
1561 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1562 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1563 || TREE_OVERFLOW (arg1));
1564 TREE_CONSTANT_OVERFLOW (t)
1565 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1567 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1568 else if (TREE_CODE (arg1) == REAL_CST)
1570 /* Don't initialize these, use assignments.
1571 Initialized local aggregates don't work on old compilers. */
1575 tree type1 = TREE_TYPE (arg1);
1578 x = TREE_REAL_CST (arg1);
1579 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1581 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1582 if (!no_upper_bound)
1583 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1585 /* See if X will be in range after truncation towards 0.
1586 To compensate for truncation, move the bounds away from 0,
1587 but reject if X exactly equals the adjusted bounds. */
1588 #ifdef REAL_ARITHMETIC
1589 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1590 if (!no_upper_bound)
1591 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1594 if (!no_upper_bound)
1597 /* If X is a NaN, use zero instead and show we have an overflow.
1598 Otherwise, range check. */
1599 if (REAL_VALUE_ISNAN (x))
1600 overflow = 1, x = dconst0;
1601 else if (! (REAL_VALUES_LESS (l, x)
1603 && REAL_VALUES_LESS (x, u)))
1606 #ifndef REAL_ARITHMETIC
1608 HOST_WIDE_INT low, high;
1609 HOST_WIDE_INT half_word
1610 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1615 high = (HOST_WIDE_INT) (x / half_word / half_word);
1616 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1617 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1619 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1620 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1623 low = (HOST_WIDE_INT) x;
1624 if (TREE_REAL_CST (arg1) < 0)
1625 neg_double (low, high, &low, &high);
1626 t = build_int_2 (low, high);
1630 HOST_WIDE_INT low, high;
1631 REAL_VALUE_TO_INT (&low, &high, x);
1632 t = build_int_2 (low, high);
1635 TREE_TYPE (t) = type;
1637 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1638 TREE_CONSTANT_OVERFLOW (t)
1639 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1641 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1642 TREE_TYPE (t) = type;
1644 else if (TREE_CODE (type) == REAL_TYPE)
1646 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1647 if (TREE_CODE (arg1) == INTEGER_CST)
1648 return build_real_from_int_cst (type, arg1);
1649 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1650 if (TREE_CODE (arg1) == REAL_CST)
1652 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1655 TREE_TYPE (arg1) = type;
1658 else if (setjmp (float_error))
1661 t = copy_node (arg1);
1664 set_float_handler (float_error);
1666 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1667 TREE_REAL_CST (arg1)));
1668 set_float_handler (NULL_PTR);
1672 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1673 TREE_CONSTANT_OVERFLOW (t)
1674 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1678 TREE_CONSTANT (t) = 1;
1682 /* Return an expr equal to X but certainly not valid as an lvalue.
1683 Also make sure it is not valid as an null pointer constant. */
1691 /* These things are certainly not lvalues. */
1692 if (TREE_CODE (x) == NON_LVALUE_EXPR
1693 || TREE_CODE (x) == INTEGER_CST
1694 || TREE_CODE (x) == REAL_CST
1695 || TREE_CODE (x) == STRING_CST
1696 || TREE_CODE (x) == ADDR_EXPR)
1698 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1700 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1701 so convert_for_assignment won't strip it.
1702 This is so this 0 won't be treated as a null pointer constant. */
1703 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1704 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1710 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1711 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1715 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1716 Zero means allow extended lvalues. */
1718 int pedantic_lvalues;
1720 /* When pedantic, return an expr equal to X but certainly not valid as a
1721 pedantic lvalue. Otherwise, return X. */
1724 pedantic_non_lvalue (x)
1727 if (pedantic_lvalues)
1728 return non_lvalue (x);
1733 /* Given a tree comparison code, return the code that is the logical inverse
1734 of the given code. It is not safe to do this for floating-point
1735 comparisons, except for NE_EXPR and EQ_EXPR. */
1737 static enum tree_code
1738 invert_tree_comparison (code)
1739 enum tree_code code;
1760 /* Similar, but return the comparison that results if the operands are
1761 swapped. This is safe for floating-point. */
1763 static enum tree_code
1764 swap_tree_comparison (code)
1765 enum tree_code code;
1785 /* Return nonzero if CODE is a tree code that represents a truth value. */
1788 truth_value_p (code)
1789 enum tree_code code;
1791 return (TREE_CODE_CLASS (code) == '<'
1792 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1793 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1794 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1797 /* Return nonzero if two operands are necessarily equal.
1798 If ONLY_CONST is non-zero, only return non-zero for constants.
1799 This function tests whether the operands are indistinguishable;
1800 it does not test whether they are equal using C's == operation.
1801 The distinction is important for IEEE floating point, because
1802 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1803 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1806 operand_equal_p (arg0, arg1, only_const)
1810 /* If both types don't have the same signedness, then we can't consider
1811 them equal. We must check this before the STRIP_NOPS calls
1812 because they may change the signedness of the arguments. */
1813 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1819 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1820 /* This is needed for conversions and for COMPONENT_REF.
1821 Might as well play it safe and always test this. */
1822 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1825 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1826 We don't care about side effects in that case because the SAVE_EXPR
1827 takes care of that for us. In all other cases, two expressions are
1828 equal if they have no side effects. If we have two identical
1829 expressions with side effects that should be treated the same due
1830 to the only side effects being identical SAVE_EXPR's, that will
1831 be detected in the recursive calls below. */
1832 if (arg0 == arg1 && ! only_const
1833 && (TREE_CODE (arg0) == SAVE_EXPR
1834 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1837 /* Next handle constant cases, those for which we can return 1 even
1838 if ONLY_CONST is set. */
1839 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1840 switch (TREE_CODE (arg0))
1843 return (! TREE_CONSTANT_OVERFLOW (arg0)
1844 && ! TREE_CONSTANT_OVERFLOW (arg1)
1845 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1846 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
1849 return (! TREE_CONSTANT_OVERFLOW (arg0)
1850 && ! TREE_CONSTANT_OVERFLOW (arg1)
1851 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1852 TREE_REAL_CST (arg1)));
1855 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1857 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1861 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1862 && ! strncmp (TREE_STRING_POINTER (arg0),
1863 TREE_STRING_POINTER (arg1),
1864 TREE_STRING_LENGTH (arg0)));
1867 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1876 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1879 /* Two conversions are equal only if signedness and modes match. */
1880 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1881 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1882 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1885 return operand_equal_p (TREE_OPERAND (arg0, 0),
1886 TREE_OPERAND (arg1, 0), 0);
1890 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1891 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1895 /* For commutative ops, allow the other order. */
1896 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1897 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1898 || TREE_CODE (arg0) == BIT_IOR_EXPR
1899 || TREE_CODE (arg0) == BIT_XOR_EXPR
1900 || TREE_CODE (arg0) == BIT_AND_EXPR
1901 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1902 && operand_equal_p (TREE_OPERAND (arg0, 0),
1903 TREE_OPERAND (arg1, 1), 0)
1904 && operand_equal_p (TREE_OPERAND (arg0, 1),
1905 TREE_OPERAND (arg1, 0), 0));
1908 switch (TREE_CODE (arg0))
1911 return operand_equal_p (TREE_OPERAND (arg0, 0),
1912 TREE_OPERAND (arg1, 0), 0);
1916 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1917 TREE_OPERAND (arg1, 0), 0)
1918 && operand_equal_p (TREE_OPERAND (arg0, 1),
1919 TREE_OPERAND (arg1, 1), 0));
1922 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1923 TREE_OPERAND (arg1, 0), 0)
1924 && operand_equal_p (TREE_OPERAND (arg0, 1),
1925 TREE_OPERAND (arg1, 1), 0)
1926 && operand_equal_p (TREE_OPERAND (arg0, 2),
1927 TREE_OPERAND (arg1, 2), 0));
1937 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1938 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1940 When in doubt, return 0. */
1943 operand_equal_for_comparison_p (arg0, arg1, other)
1947 int unsignedp1, unsignedpo;
1948 tree primarg0, primarg1, primother;
1949 unsigned correct_width;
1951 if (operand_equal_p (arg0, arg1, 0))
1954 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1955 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1958 /* Discard any conversions that don't change the modes of ARG0 and ARG1
1959 and see if the inner values are the same. This removes any
1960 signedness comparison, which doesn't matter here. */
1961 primarg0 = arg0, primarg1 = arg1;
1962 STRIP_NOPS (primarg0); STRIP_NOPS (primarg1);
1963 if (operand_equal_p (primarg0, primarg1, 0))
1966 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1967 actual comparison operand, ARG0.
1969 First throw away any conversions to wider types
1970 already present in the operands. */
1972 primarg1 = get_narrower (arg1, &unsignedp1);
1973 primother = get_narrower (other, &unsignedpo);
1975 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1976 if (unsignedp1 == unsignedpo
1977 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1978 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1980 tree type = TREE_TYPE (arg0);
1982 /* Make sure shorter operand is extended the right way
1983 to match the longer operand. */
1984 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1985 TREE_TYPE (primarg1)),
1988 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1995 /* See if ARG is an expression that is either a comparison or is performing
1996 arithmetic on comparisons. The comparisons must only be comparing
1997 two different values, which will be stored in *CVAL1 and *CVAL2; if
1998 they are non-zero it means that some operands have already been found.
1999 No variables may be used anywhere else in the expression except in the
2000 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2001 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2003 If this is true, return 1. Otherwise, return zero. */
2006 twoval_comparison_p (arg, cval1, cval2, save_p)
2008 tree *cval1, *cval2;
2011 enum tree_code code = TREE_CODE (arg);
2012 char class = TREE_CODE_CLASS (code);
2014 /* We can handle some of the 'e' cases here. */
2015 if (class == 'e' && code == TRUTH_NOT_EXPR)
2017 else if (class == 'e'
2018 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2019 || code == COMPOUND_EXPR))
2022 /* ??? Disable this since the SAVE_EXPR might already be in use outside
2023 the expression. There may be no way to make this work, but it needs
2024 to be looked at again for 2.6. */
2026 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
2028 /* If we've already found a CVAL1 or CVAL2, this expression is
2029 two complex to handle. */
2030 if (*cval1 || *cval2)
2041 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2044 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2045 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2046 cval1, cval2, save_p));
2052 if (code == COND_EXPR)
2053 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2054 cval1, cval2, save_p)
2055 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2056 cval1, cval2, save_p)
2057 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2058 cval1, cval2, save_p));
2062 /* First see if we can handle the first operand, then the second. For
2063 the second operand, we know *CVAL1 can't be zero. It must be that
2064 one side of the comparison is each of the values; test for the
2065 case where this isn't true by failing if the two operands
2068 if (operand_equal_p (TREE_OPERAND (arg, 0),
2069 TREE_OPERAND (arg, 1), 0))
2073 *cval1 = TREE_OPERAND (arg, 0);
2074 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2076 else if (*cval2 == 0)
2077 *cval2 = TREE_OPERAND (arg, 0);
2078 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2083 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2085 else if (*cval2 == 0)
2086 *cval2 = TREE_OPERAND (arg, 1);
2087 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2099 /* ARG is a tree that is known to contain just arithmetic operations and
2100 comparisons. Evaluate the operations in the tree substituting NEW0 for
2101 any occurrence of OLD0 as an operand of a comparison and likewise for
2105 eval_subst (arg, old0, new0, old1, new1)
2107 tree old0, new0, old1, new1;
2109 tree type = TREE_TYPE (arg);
2110 enum tree_code code = TREE_CODE (arg);
2111 char class = TREE_CODE_CLASS (code);
2113 /* We can handle some of the 'e' cases here. */
2114 if (class == 'e' && code == TRUTH_NOT_EXPR)
2116 else if (class == 'e'
2117 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2123 return fold (build1 (code, type,
2124 eval_subst (TREE_OPERAND (arg, 0),
2125 old0, new0, old1, new1)));
2128 return fold (build (code, type,
2129 eval_subst (TREE_OPERAND (arg, 0),
2130 old0, new0, old1, new1),
2131 eval_subst (TREE_OPERAND (arg, 1),
2132 old0, new0, old1, new1)));
2138 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2141 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2144 return fold (build (code, type,
2145 eval_subst (TREE_OPERAND (arg, 0),
2146 old0, new0, old1, new1),
2147 eval_subst (TREE_OPERAND (arg, 1),
2148 old0, new0, old1, new1),
2149 eval_subst (TREE_OPERAND (arg, 2),
2150 old0, new0, old1, new1)));
2154 /* fall through (???) */
2158 tree arg0 = TREE_OPERAND (arg, 0);
2159 tree arg1 = TREE_OPERAND (arg, 1);
2161 /* We need to check both for exact equality and tree equality. The
2162 former will be true if the operand has a side-effect. In that
2163 case, we know the operand occurred exactly once. */
2165 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2167 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2170 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2172 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2175 return fold (build (code, type, arg0, arg1));
2183 /* Return a tree for the case when the result of an expression is RESULT
2184 converted to TYPE and OMITTED was previously an operand of the expression
2185 but is now not needed (e.g., we folded OMITTED * 0).
2187 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2188 the conversion of RESULT to TYPE. */
2191 omit_one_operand (type, result, omitted)
2192 tree type, result, omitted;
2194 tree t = convert (type, result);
2196 if (TREE_SIDE_EFFECTS (omitted))
2197 return build (COMPOUND_EXPR, type, omitted, t);
2199 return non_lvalue (t);
2202 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2205 pedantic_omit_one_operand (type, result, omitted)
2206 tree type, result, omitted;
2208 tree t = convert (type, result);
2210 if (TREE_SIDE_EFFECTS (omitted))
2211 return build (COMPOUND_EXPR, type, omitted, t);
2213 return pedantic_non_lvalue (t);
2218 /* Return a simplified tree node for the truth-negation of ARG. This
2219 never alters ARG itself. We assume that ARG is an operation that
2220 returns a truth value (0 or 1). */
2223 invert_truthvalue (arg)
2226 tree type = TREE_TYPE (arg);
2227 enum tree_code code = TREE_CODE (arg);
2229 if (code == ERROR_MARK)
2232 /* If this is a comparison, we can simply invert it, except for
2233 floating-point non-equality comparisons, in which case we just
2234 enclose a TRUTH_NOT_EXPR around what we have. */
2236 if (TREE_CODE_CLASS (code) == '<')
2238 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2239 && code != NE_EXPR && code != EQ_EXPR)
2240 return build1 (TRUTH_NOT_EXPR, type, arg);
2242 return build (invert_tree_comparison (code), type,
2243 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2249 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2250 && TREE_INT_CST_HIGH (arg) == 0, 0));
2252 case TRUTH_AND_EXPR:
2253 return build (TRUTH_OR_EXPR, type,
2254 invert_truthvalue (TREE_OPERAND (arg, 0)),
2255 invert_truthvalue (TREE_OPERAND (arg, 1)));
2258 return build (TRUTH_AND_EXPR, type,
2259 invert_truthvalue (TREE_OPERAND (arg, 0)),
2260 invert_truthvalue (TREE_OPERAND (arg, 1)));
2262 case TRUTH_XOR_EXPR:
2263 /* Here we can invert either operand. We invert the first operand
2264 unless the second operand is a TRUTH_NOT_EXPR in which case our
2265 result is the XOR of the first operand with the inside of the
2266 negation of the second operand. */
2268 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2269 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2270 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2272 return build (TRUTH_XOR_EXPR, type,
2273 invert_truthvalue (TREE_OPERAND (arg, 0)),
2274 TREE_OPERAND (arg, 1));
2276 case TRUTH_ANDIF_EXPR:
2277 return build (TRUTH_ORIF_EXPR, type,
2278 invert_truthvalue (TREE_OPERAND (arg, 0)),
2279 invert_truthvalue (TREE_OPERAND (arg, 1)));
2281 case TRUTH_ORIF_EXPR:
2282 return build (TRUTH_ANDIF_EXPR, type,
2283 invert_truthvalue (TREE_OPERAND (arg, 0)),
2284 invert_truthvalue (TREE_OPERAND (arg, 1)));
2286 case TRUTH_NOT_EXPR:
2287 return TREE_OPERAND (arg, 0);
2290 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2291 invert_truthvalue (TREE_OPERAND (arg, 1)),
2292 invert_truthvalue (TREE_OPERAND (arg, 2)));
2295 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2296 invert_truthvalue (TREE_OPERAND (arg, 1)));
2298 case NON_LVALUE_EXPR:
2299 return invert_truthvalue (TREE_OPERAND (arg, 0));
2304 return build1 (TREE_CODE (arg), type,
2305 invert_truthvalue (TREE_OPERAND (arg, 0)));
2308 if (!integer_onep (TREE_OPERAND (arg, 1)))
2310 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2313 return build1 (TRUTH_NOT_EXPR, type, arg);
2315 case CLEANUP_POINT_EXPR:
2316 return build1 (CLEANUP_POINT_EXPR, type,
2317 invert_truthvalue (TREE_OPERAND (arg, 0)));
2322 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2324 return build1 (TRUTH_NOT_EXPR, type, arg);
2327 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2328 operands are another bit-wise operation with a common input. If so,
2329 distribute the bit operations to save an operation and possibly two if
2330 constants are involved. For example, convert
2331 (A | B) & (A | C) into A | (B & C)
2332 Further simplification will occur if B and C are constants.
2334 If this optimization cannot be done, 0 will be returned. */
2337 distribute_bit_expr (code, type, arg0, arg1)
2338 enum tree_code code;
2345 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2346 || TREE_CODE (arg0) == code
2347 || (TREE_CODE (arg0) != BIT_AND_EXPR
2348 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2351 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2353 common = TREE_OPERAND (arg0, 0);
2354 left = TREE_OPERAND (arg0, 1);
2355 right = TREE_OPERAND (arg1, 1);
2357 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2359 common = TREE_OPERAND (arg0, 0);
2360 left = TREE_OPERAND (arg0, 1);
2361 right = TREE_OPERAND (arg1, 0);
2363 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2365 common = TREE_OPERAND (arg0, 1);
2366 left = TREE_OPERAND (arg0, 0);
2367 right = TREE_OPERAND (arg1, 1);
2369 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2371 common = TREE_OPERAND (arg0, 1);
2372 left = TREE_OPERAND (arg0, 0);
2373 right = TREE_OPERAND (arg1, 0);
2378 return fold (build (TREE_CODE (arg0), type, common,
2379 fold (build (code, type, left, right))));
2382 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2383 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2386 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2389 int bitsize, bitpos;
2392 tree result = build (BIT_FIELD_REF, type, inner,
2393 size_int (bitsize), bitsize_int (bitpos, 0L));
2395 TREE_UNSIGNED (result) = unsignedp;
2400 /* Optimize a bit-field compare.
2402 There are two cases: First is a compare against a constant and the
2403 second is a comparison of two items where the fields are at the same
2404 bit position relative to the start of a chunk (byte, halfword, word)
2405 large enough to contain it. In these cases we can avoid the shift
2406 implicit in bitfield extractions.
2408 For constants, we emit a compare of the shifted constant with the
2409 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2410 compared. For two fields at the same position, we do the ANDs with the
2411 similar mask and compare the result of the ANDs.
2413 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2414 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2415 are the left and right operands of the comparison, respectively.
2417 If the optimization described above can be done, we return the resulting
2418 tree. Otherwise we return zero. */
2421 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2422 enum tree_code code;
2426 int lbitpos, lbitsize, rbitpos, rbitsize;
2427 int lnbitpos, lnbitsize, rnbitpos = 0, rnbitsize = 0;
2428 tree type = TREE_TYPE (lhs);
2429 tree signed_type, unsigned_type;
2430 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2431 enum machine_mode lmode, rmode, lnmode, rnmode = VOIDmode;
2432 int lunsignedp, runsignedp;
2433 int lvolatilep = 0, rvolatilep = 0;
2435 tree linner, rinner = NULL_TREE;
2439 /* Get all the information about the extractions being done. If the bit size
2440 if the same as the size of the underlying object, we aren't doing an
2441 extraction at all and so can do nothing. */
2442 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2443 &lunsignedp, &lvolatilep, &alignment);
2444 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2450 /* If this is not a constant, we can only do something if bit positions,
2451 sizes, and signedness are the same. */
2452 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2453 &runsignedp, &rvolatilep, &alignment);
2455 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2456 || lunsignedp != runsignedp || offset != 0)
2460 /* See if we can find a mode to refer to this field. We should be able to,
2461 but fail if we can't. */
2462 lnmode = get_best_mode (lbitsize, lbitpos,
2463 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2465 if (lnmode == VOIDmode)
2468 /* Set signed and unsigned types of the precision of this mode for the
2470 signed_type = type_for_mode (lnmode, 0);
2471 unsigned_type = type_for_mode (lnmode, 1);
2475 rnmode = get_best_mode (rbitsize, rbitpos,
2476 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2478 if (rnmode == VOIDmode)
2482 /* Compute the bit position and size for the new reference and our offset
2483 within it. If the new reference is the same size as the original, we
2484 won't optimize anything, so return zero. */
2485 lnbitsize = GET_MODE_BITSIZE (lnmode);
2486 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2487 lbitpos -= lnbitpos;
2488 if (lnbitsize == lbitsize)
2493 rnbitsize = GET_MODE_BITSIZE (rnmode);
2494 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2495 rbitpos -= rnbitpos;
2496 if (rnbitsize == rbitsize)
2500 if (BYTES_BIG_ENDIAN)
2501 lbitpos = lnbitsize - lbitsize - lbitpos;
2503 /* Make the mask to be used against the extracted field. */
2504 mask = build_int_2 (~0, ~0);
2505 TREE_TYPE (mask) = unsigned_type;
2506 force_fit_type (mask, 0);
2507 mask = convert (unsigned_type, mask);
2508 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2509 mask = const_binop (RSHIFT_EXPR, mask,
2510 size_int (lnbitsize - lbitsize - lbitpos), 0);
2513 /* If not comparing with constant, just rework the comparison
2515 return build (code, compare_type,
2516 build (BIT_AND_EXPR, unsigned_type,
2517 make_bit_field_ref (linner, unsigned_type,
2518 lnbitsize, lnbitpos, 1),
2520 build (BIT_AND_EXPR, unsigned_type,
2521 make_bit_field_ref (rinner, unsigned_type,
2522 rnbitsize, rnbitpos, 1),
2525 /* Otherwise, we are handling the constant case. See if the constant is too
2526 big for the field. Warn and return a tree of for 0 (false) if so. We do
2527 this not only for its own sake, but to avoid having to test for this
2528 error case below. If we didn't, we might generate wrong code.
2530 For unsigned fields, the constant shifted right by the field length should
2531 be all zero. For signed fields, the high-order bits should agree with
2536 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2537 convert (unsigned_type, rhs),
2538 size_int (lbitsize), 0)))
2540 warning ("comparison is always %s due to width of bitfield",
2541 code == NE_EXPR ? "one" : "zero");
2542 return convert (compare_type,
2544 ? integer_one_node : integer_zero_node));
2549 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2550 size_int (lbitsize - 1), 0);
2551 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2553 warning ("comparison is always %s due to width of bitfield",
2554 code == NE_EXPR ? "one" : "zero");
2555 return convert (compare_type,
2557 ? integer_one_node : integer_zero_node));
2561 /* Single-bit compares should always be against zero. */
2562 if (lbitsize == 1 && ! integer_zerop (rhs))
2564 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2565 rhs = convert (type, integer_zero_node);
2568 /* Make a new bitfield reference, shift the constant over the
2569 appropriate number of bits and mask it with the computed mask
2570 (in case this was a signed field). If we changed it, make a new one. */
2571 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2574 TREE_SIDE_EFFECTS (lhs) = 1;
2575 TREE_THIS_VOLATILE (lhs) = 1;
2578 rhs = fold (const_binop (BIT_AND_EXPR,
2579 const_binop (LSHIFT_EXPR,
2580 convert (unsigned_type, rhs),
2581 size_int (lbitpos), 0),
2584 return build (code, compare_type,
2585 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2589 /* Subroutine for fold_truthop: decode a field reference.
2591 If EXP is a comparison reference, we return the innermost reference.
2593 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2594 set to the starting bit number.
2596 If the innermost field can be completely contained in a mode-sized
2597 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2599 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2600 otherwise it is not changed.
2602 *PUNSIGNEDP is set to the signedness of the field.
2604 *PMASK is set to the mask used. This is either contained in a
2605 BIT_AND_EXPR or derived from the width of the field.
2607 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2609 Return 0 if this is not a component reference or is one that we can't
2610 do anything with. */
2613 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2614 pvolatilep, pmask, pand_mask)
2616 int *pbitsize, *pbitpos;
2617 enum machine_mode *pmode;
2618 int *punsignedp, *pvolatilep;
2623 tree mask, inner, offset;
2628 /* All the optimizations using this function assume integer fields.
2629 There are problems with FP fields since the type_for_size call
2630 below can fail for, e.g., XFmode. */
2631 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2636 if (TREE_CODE (exp) == BIT_AND_EXPR)
2638 and_mask = TREE_OPERAND (exp, 1);
2639 exp = TREE_OPERAND (exp, 0);
2640 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2641 if (TREE_CODE (and_mask) != INTEGER_CST)
2646 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2647 punsignedp, pvolatilep, &alignment);
2648 if ((inner == exp && and_mask == 0)
2649 || *pbitsize < 0 || offset != 0)
2652 /* Compute the mask to access the bitfield. */
2653 unsigned_type = type_for_size (*pbitsize, 1);
2654 precision = TYPE_PRECISION (unsigned_type);
2656 mask = build_int_2 (~0, ~0);
2657 TREE_TYPE (mask) = unsigned_type;
2658 force_fit_type (mask, 0);
2659 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2660 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2662 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2664 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2665 convert (unsigned_type, and_mask), mask));
2668 *pand_mask = and_mask;
2672 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2676 all_ones_mask_p (mask, size)
2680 tree type = TREE_TYPE (mask);
2681 int precision = TYPE_PRECISION (type);
2684 tmask = build_int_2 (~0, ~0);
2685 TREE_TYPE (tmask) = signed_type (type);
2686 force_fit_type (tmask, 0);
2688 tree_int_cst_equal (mask,
2689 const_binop (RSHIFT_EXPR,
2690 const_binop (LSHIFT_EXPR, tmask,
2691 size_int (precision - size),
2693 size_int (precision - size), 0));
2696 /* Subroutine for fold_truthop: determine if an operand is simple enough
2697 to be evaluated unconditionally. */
2700 simple_operand_p (exp)
2703 /* Strip any conversions that don't change the machine mode. */
2704 while ((TREE_CODE (exp) == NOP_EXPR
2705 || TREE_CODE (exp) == CONVERT_EXPR)
2706 && (TYPE_MODE (TREE_TYPE (exp))
2707 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2708 exp = TREE_OPERAND (exp, 0);
2710 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2711 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2712 && ! TREE_ADDRESSABLE (exp)
2713 && ! TREE_THIS_VOLATILE (exp)
2714 && ! DECL_NONLOCAL (exp)
2715 /* Don't regard global variables as simple. They may be
2716 allocated in ways unknown to the compiler (shared memory,
2717 #pragma weak, etc). */
2718 && ! TREE_PUBLIC (exp)
2719 && ! DECL_EXTERNAL (exp)
2720 /* Loading a static variable is unduly expensive, but global
2721 registers aren't expensive. */
2722 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2725 /* The following functions are subroutines to fold_range_test and allow it to
2726 try to change a logical combination of comparisons into a range test.
2729 X == 2 && X == 3 && X == 4 && X == 5
2733 (unsigned) (X - 2) <= 3
2735 We describe each set of comparisons as being either inside or outside
2736 a range, using a variable named like IN_P, and then describe the
2737 range with a lower and upper bound. If one of the bounds is omitted,
2738 it represents either the highest or lowest value of the type.
2740 In the comments below, we represent a range by two numbers in brackets
2741 preceded by a "+" to designate being inside that range, or a "-" to
2742 designate being outside that range, so the condition can be inverted by
2743 flipping the prefix. An omitted bound is represented by a "-". For
2744 example, "- [-, 10]" means being outside the range starting at the lowest
2745 possible value and ending at 10, in other words, being greater than 10.
2746 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2749 We set up things so that the missing bounds are handled in a consistent
2750 manner so neither a missing bound nor "true" and "false" need to be
2751 handled using a special case. */
2753 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2754 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2755 and UPPER1_P are nonzero if the respective argument is an upper bound
2756 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2757 must be specified for a comparison. ARG1 will be converted to ARG0's
2758 type if both are specified. */
2761 range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
2762 enum tree_code code;
2765 int upper0_p, upper1_p;
2771 /* If neither arg represents infinity, do the normal operation.
2772 Else, if not a comparison, return infinity. Else handle the special
2773 comparison rules. Note that most of the cases below won't occur, but
2774 are handled for consistency. */
2776 if (arg0 != 0 && arg1 != 0)
2778 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2779 arg0, convert (TREE_TYPE (arg0), arg1)));
2781 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2784 if (TREE_CODE_CLASS (code) != '<')
2787 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2788 for neither. Then compute our result treating them as never equal
2789 and comparing bounds to non-bounds as above. */
2790 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2791 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2794 case EQ_EXPR: case NE_EXPR:
2795 result = (code == NE_EXPR);
2797 case LT_EXPR: case LE_EXPR:
2798 result = sgn0 < sgn1;
2800 case GT_EXPR: case GE_EXPR:
2801 result = sgn0 > sgn1;
2807 return convert (type, result ? integer_one_node : integer_zero_node);
2810 /* Given EXP, a logical expression, set the range it is testing into
2811 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2812 actually being tested. *PLOW and *PHIGH will have be made the same type
2813 as the returned expression. If EXP is not a comparison, we will most
2814 likely not be returning a useful value and range. */
2817 make_range (exp, pin_p, plow, phigh)
2822 enum tree_code code;
2823 tree arg0, arg1, type = NULL_TREE;
2825 tree low, high, n_low, n_high;
2827 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2828 and see if we can refine the range. Some of the cases below may not
2829 happen, but it doesn't seem worth worrying about this. We "continue"
2830 the outer loop when we've changed something; otherwise we "break"
2831 the switch, which will "break" the while. */
2833 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2837 code = TREE_CODE (exp);
2838 arg0 = TREE_OPERAND (exp, 0), arg1 = TREE_OPERAND (exp, 1);
2839 if (TREE_CODE_CLASS (code) == '<' || TREE_CODE_CLASS (code) == '1'
2840 || TREE_CODE_CLASS (code) == '2')
2841 type = TREE_TYPE (arg0);
2845 case TRUTH_NOT_EXPR:
2846 in_p = ! in_p, exp = arg0;
2849 case EQ_EXPR: case NE_EXPR:
2850 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
2851 /* We can only do something if the range is testing for zero
2852 and if the second operand is an integer constant. Note that
2853 saying something is "in" the range we make is done by
2854 complementing IN_P since it will set in the initial case of
2855 being not equal to zero; "out" is leaving it alone. */
2856 if (low == 0 || high == 0
2857 || ! integer_zerop (low) || ! integer_zerop (high)
2858 || TREE_CODE (arg1) != INTEGER_CST)
2863 case NE_EXPR: /* - [c, c] */
2866 case EQ_EXPR: /* + [c, c] */
2867 in_p = ! in_p, low = high = arg1;
2869 case GT_EXPR: /* - [-, c] */
2870 low = 0, high = arg1;
2872 case GE_EXPR: /* + [c, -] */
2873 in_p = ! in_p, low = arg1, high = 0;
2875 case LT_EXPR: /* - [c, -] */
2876 low = arg1, high = 0;
2878 case LE_EXPR: /* + [-, c] */
2879 in_p = ! in_p, low = 0, high = arg1;
2887 /* If this is an unsigned comparison, we also know that EXP is
2888 greater than or equal to zero. We base the range tests we make
2889 on that fact, so we record it here so we can parse existing
2891 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
2893 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
2894 1, convert (type, integer_zero_node),
2898 in_p = n_in_p, low = n_low, high = n_high;
2900 /* If the high bound is missing, reverse the range so it
2901 goes from zero to the low bound minus 1. */
2905 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
2906 integer_one_node, 0);
2907 low = convert (type, integer_zero_node);
2913 /* (-x) IN [a,b] -> x in [-b, -a] */
2914 n_low = range_binop (MINUS_EXPR, type,
2915 convert (type, integer_zero_node), 0, high, 1);
2916 n_high = range_binop (MINUS_EXPR, type,
2917 convert (type, integer_zero_node), 0, low, 0);
2918 low = n_low, high = n_high;
2924 exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
2925 convert (type, integer_one_node));
2928 case PLUS_EXPR: case MINUS_EXPR:
2929 if (TREE_CODE (arg1) != INTEGER_CST)
2932 /* If EXP is signed, any overflow in the computation is undefined,
2933 so we don't worry about it so long as our computations on
2934 the bounds don't overflow. For unsigned, overflow is defined
2935 and this is exactly the right thing. */
2936 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2937 type, low, 0, arg1, 0);
2938 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
2939 type, high, 1, arg1, 0);
2940 if ((n_low != 0 && TREE_OVERFLOW (n_low))
2941 || (n_high != 0 && TREE_OVERFLOW (n_high)))
2944 /* Check for an unsigned range which has wrapped around the maximum
2945 value thus making n_high < n_low, and normalize it. */
2946 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
2948 low = range_binop (PLUS_EXPR, type, n_high, 0,
2949 integer_one_node, 0);
2950 high = range_binop (MINUS_EXPR, type, n_low, 0,
2951 integer_one_node, 0);
2955 low = n_low, high = n_high;
2960 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
2961 if (! INTEGRAL_TYPE_P (type)
2962 || (low != 0 && ! int_fits_type_p (low, type))
2963 || (high != 0 && ! int_fits_type_p (high, type)))
2966 n_low = low, n_high = high;
2969 n_low = convert (type, n_low);
2972 n_high = convert (type, n_high);
2974 /* If we're converting from an unsigned to a signed type,
2975 we will be doing the comparison as unsigned. The tests above
2976 have already verified that LOW and HIGH are both positive.
2978 So we have to make sure that the original unsigned value will
2979 be interpreted as positive. */
2980 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
2982 tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
2985 /* A range without an upper bound is, naturally, unbounded.
2986 Since convert would have cropped a very large value, use
2987 the max value for the destination type. */
2989 high_positive = TYPE_MAX_VALUE (equiv_type);
2992 high_positive = TYPE_MAX_VALUE (type);
2996 high_positive = fold (build (RSHIFT_EXPR, type,
2997 convert (type, high_positive),
2998 convert (type, integer_one_node)));
3000 /* If the low bound is specified, "and" the range with the
3001 range for which the original unsigned value will be
3005 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3007 1, convert (type, integer_zero_node),
3011 in_p = (n_in_p == in_p);
3015 /* Otherwise, "or" the range with the range of the input
3016 that will be interpreted as negative. */
3017 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3019 1, convert (type, integer_zero_node),
3023 in_p = (in_p != n_in_p);
3028 low = n_low, high = n_high;
3038 /* If EXP is a constant, we can evaluate whether this is true or false. */
3039 if (TREE_CODE (exp) == INTEGER_CST)
3041 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3043 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3049 *pin_p = in_p, *plow = low, *phigh = high;
3053 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3054 type, TYPE, return an expression to test if EXP is in (or out of, depending
3055 on IN_P) the range. */
3058 build_range_check (type, exp, in_p, low, high)
3064 tree etype = TREE_TYPE (exp);
3068 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3069 return invert_truthvalue (value);
3071 else if (low == 0 && high == 0)
3072 return convert (type, integer_one_node);
3075 return fold (build (LE_EXPR, type, exp, high));
3078 return fold (build (GE_EXPR, type, exp, low));
3080 else if (operand_equal_p (low, high, 0))
3081 return fold (build (EQ_EXPR, type, exp, low));
3083 else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3084 return build_range_check (type, exp, 1, 0, high);
3086 else if (integer_zerop (low))
3088 utype = unsigned_type (etype);
3089 return build_range_check (type, convert (utype, exp), 1, 0,
3090 convert (utype, high));
3093 else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3094 && ! TREE_OVERFLOW (value))
3095 return build_range_check (type,
3096 fold (build (MINUS_EXPR, etype, exp, low)),
3097 1, convert (etype, integer_zero_node), value);
3102 /* Given two ranges, see if we can merge them into one. Return 1 if we
3103 can, 0 if we can't. Set the output range into the specified parameters. */
3106 merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3110 tree low0, high0, low1, high1;
3118 int lowequal = ((low0 == 0 && low1 == 0)
3119 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3120 low0, 0, low1, 0)));
3121 int highequal = ((high0 == 0 && high1 == 0)
3122 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3123 high0, 1, high1, 1)));
3125 /* Make range 0 be the range that starts first, or ends last if they
3126 start at the same value. Swap them if it isn't. */
3127 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3130 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3131 high1, 1, high0, 1))))
3133 temp = in0_p, in0_p = in1_p, in1_p = temp;
3134 tem = low0, low0 = low1, low1 = tem;
3135 tem = high0, high0 = high1, high1 = tem;
3138 /* Now flag two cases, whether the ranges are disjoint or whether the
3139 second range is totally subsumed in the first. Note that the tests
3140 below are simplified by the ones above. */
3141 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3142 high0, 1, low1, 0));
3143 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3144 high1, 1, high0, 1));
3146 /* We now have four cases, depending on whether we are including or
3147 excluding the two ranges. */
3150 /* If they don't overlap, the result is false. If the second range
3151 is a subset it is the result. Otherwise, the range is from the start
3152 of the second to the end of the first. */
3154 in_p = 0, low = high = 0;
3156 in_p = 1, low = low1, high = high1;
3158 in_p = 1, low = low1, high = high0;
3161 else if (in0_p && ! in1_p)
3163 /* If they don't overlap, the result is the first range. If they are
3164 equal, the result is false. If the second range is a subset of the
3165 first, and the ranges begin at the same place, we go from just after
3166 the end of the first range to the end of the second. If the second
3167 range is not a subset of the first, or if it is a subset and both
3168 ranges end at the same place, the range starts at the start of the
3169 first range and ends just before the second range.
3170 Otherwise, we can't describe this as a single range. */
3172 in_p = 1, low = low0, high = high0;
3173 else if (lowequal && highequal)
3174 in_p = 0, low = high = 0;
3175 else if (subset && lowequal)
3177 in_p = 1, high = high0;
3178 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3179 integer_one_node, 0);
3181 else if (! subset || highequal)
3183 in_p = 1, low = low0;
3184 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3185 integer_one_node, 0);
3191 else if (! in0_p && in1_p)
3193 /* If they don't overlap, the result is the second range. If the second
3194 is a subset of the first, the result is false. Otherwise,
3195 the range starts just after the first range and ends at the
3196 end of the second. */
3198 in_p = 1, low = low1, high = high1;
3200 in_p = 0, low = high = 0;
3203 in_p = 1, high = high1;
3204 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3205 integer_one_node, 0);
3211 /* The case where we are excluding both ranges. Here the complex case
3212 is if they don't overlap. In that case, the only time we have a
3213 range is if they are adjacent. If the second is a subset of the
3214 first, the result is the first. Otherwise, the range to exclude
3215 starts at the beginning of the first range and ends at the end of the
3219 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3220 range_binop (PLUS_EXPR, NULL_TREE,
3222 integer_one_node, 1),
3224 in_p = 0, low = low0, high = high1;
3229 in_p = 0, low = low0, high = high0;
3231 in_p = 0, low = low0, high = high1;
3234 *pin_p = in_p, *plow = low, *phigh = high;
3238 /* EXP is some logical combination of boolean tests. See if we can
3239 merge it into some range test. Return the new tree if so. */
3242 fold_range_test (exp)
3245 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3246 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3247 int in0_p, in1_p, in_p;
3248 tree low0, low1, low, high0, high1, high;
3249 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3250 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3253 /* If this is an OR operation, invert both sides; we will invert
3254 again at the end. */
3256 in0_p = ! in0_p, in1_p = ! in1_p;
3258 /* If both expressions are the same, if we can merge the ranges, and we
3259 can build the range test, return it or it inverted. If one of the
3260 ranges is always true or always false, consider it to be the same
3261 expression as the other. */
3262 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3263 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3265 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3267 : rhs != 0 ? rhs : integer_zero_node,
3269 return or_op ? invert_truthvalue (tem) : tem;
3271 /* On machines where the branch cost is expensive, if this is a
3272 short-circuited branch and the underlying object on both sides
3273 is the same, make a non-short-circuit operation. */
3274 else if (BRANCH_COST >= 2
3275 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3276 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3277 && operand_equal_p (lhs, rhs, 0))
3279 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3280 unless we are at top level, in which case we can't do this. */
3281 if (simple_operand_p (lhs))
3282 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3283 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3284 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3285 TREE_OPERAND (exp, 1));
3287 else if (current_function_decl != 0)
3289 tree common = save_expr (lhs);
3291 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3292 or_op ? ! in0_p : in0_p,
3294 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3295 or_op ? ! in1_p : in1_p,
3297 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3298 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3299 TREE_TYPE (exp), lhs, rhs);
3307 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3308 bit value. Arrange things so the extra bits will be set to zero if and
3309 only if C is signed-extended to its full width. If MASK is nonzero,
3310 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3313 unextend (c, p, unsignedp, mask)
3319 tree type = TREE_TYPE (c);
3320 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3323 if (p == modesize || unsignedp)
3326 /* We work by getting just the sign bit into the low-order bit, then
3327 into the high-order bit, then sign-extend. We then XOR that value
3329 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3330 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3332 /* We must use a signed type in order to get an arithmetic right shift.
3333 However, we must also avoid introducing accidental overflows, so that
3334 a subsequent call to integer_zerop will work. Hence we must
3335 do the type conversion here. At this point, the constant is either
3336 zero or one, and the conversion to a signed type can never overflow.
3337 We could get an overflow if this conversion is done anywhere else. */
3338 if (TREE_UNSIGNED (type))
3339 temp = convert (signed_type (type), temp);
3341 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3342 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3344 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3345 /* If necessary, convert the type back to match the type of C. */
3346 if (TREE_UNSIGNED (type))
3347 temp = convert (type, temp);
3349 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3352 /* Find ways of folding logical expressions of LHS and RHS:
3353 Try to merge two comparisons to the same innermost item.
3354 Look for range tests like "ch >= '0' && ch <= '9'".
3355 Look for combinations of simple terms on machines with expensive branches
3356 and evaluate the RHS unconditionally.
3358 For example, if we have p->a == 2 && p->b == 4 and we can make an
3359 object large enough to span both A and B, we can do this with a comparison
3360 against the object ANDed with the a mask.
3362 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3363 operations to do this with one comparison.
3365 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3366 function and the one above.
3368 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3369 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3371 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3374 We return the simplified tree or 0 if no optimization is possible. */
3377 fold_truthop (code, truth_type, lhs, rhs)
3378 enum tree_code code;
3379 tree truth_type, lhs, rhs;
3381 /* If this is the "or" of two comparisons, we can do something if we
3382 the comparisons are NE_EXPR. If this is the "and", we can do something
3383 if the comparisons are EQ_EXPR. I.e.,
3384 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3386 WANTED_CODE is this operation code. For single bit fields, we can
3387 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3388 comparison for one-bit fields. */
3390 enum tree_code wanted_code;
3391 enum tree_code lcode, rcode;
3392 tree ll_arg, lr_arg, rl_arg, rr_arg;
3393 tree ll_inner, lr_inner, rl_inner, rr_inner;
3394 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3395 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3396 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3397 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3398 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3399 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3400 enum machine_mode lnmode, rnmode;
3401 tree ll_mask, lr_mask, rl_mask, rr_mask;
3402 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3403 tree l_const, r_const;
3405 int first_bit, end_bit;
3408 /* Start by getting the comparison codes. Fail if anything is volatile.
3409 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3410 it were surrounded with a NE_EXPR. */
3412 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3415 lcode = TREE_CODE (lhs);
3416 rcode = TREE_CODE (rhs);
3418 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3419 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3421 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3422 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3424 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3427 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3428 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3430 ll_arg = TREE_OPERAND (lhs, 0);
3431 lr_arg = TREE_OPERAND (lhs, 1);
3432 rl_arg = TREE_OPERAND (rhs, 0);
3433 rr_arg = TREE_OPERAND (rhs, 1);
3435 /* If the RHS can be evaluated unconditionally and its operands are
3436 simple, it wins to evaluate the RHS unconditionally on machines
3437 with expensive branches. In this case, this isn't a comparison
3438 that can be merged. */
3440 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3441 are with zero (tmw). */
3443 if (BRANCH_COST >= 2
3444 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3445 && simple_operand_p (rl_arg)
3446 && simple_operand_p (rr_arg))
3447 return build (code, truth_type, lhs, rhs);
3449 /* See if the comparisons can be merged. Then get all the parameters for
3452 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3453 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3457 ll_inner = decode_field_reference (ll_arg,
3458 &ll_bitsize, &ll_bitpos, &ll_mode,
3459 &ll_unsignedp, &volatilep, &ll_mask,
3461 lr_inner = decode_field_reference (lr_arg,
3462 &lr_bitsize, &lr_bitpos, &lr_mode,
3463 &lr_unsignedp, &volatilep, &lr_mask,
3465 rl_inner = decode_field_reference (rl_arg,
3466 &rl_bitsize, &rl_bitpos, &rl_mode,
3467 &rl_unsignedp, &volatilep, &rl_mask,
3469 rr_inner = decode_field_reference (rr_arg,
3470 &rr_bitsize, &rr_bitpos, &rr_mode,
3471 &rr_unsignedp, &volatilep, &rr_mask,
3474 /* It must be true that the inner operation on the lhs of each
3475 comparison must be the same if we are to be able to do anything.
3476 Then see if we have constants. If not, the same must be true for
3478 if (volatilep || ll_inner == 0 || rl_inner == 0
3479 || ! operand_equal_p (ll_inner, rl_inner, 0))
3482 if (TREE_CODE (lr_arg) == INTEGER_CST
3483 && TREE_CODE (rr_arg) == INTEGER_CST)
3484 l_const = lr_arg, r_const = rr_arg;
3485 else if (lr_inner == 0 || rr_inner == 0
3486 || ! operand_equal_p (lr_inner, rr_inner, 0))
3489 l_const = r_const = 0;
3491 /* If either comparison code is not correct for our logical operation,
3492 fail. However, we can convert a one-bit comparison against zero into
3493 the opposite comparison against that bit being set in the field. */
3495 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3496 if (lcode != wanted_code)
3498 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3500 if (ll_unsignedp || tree_log2 (ll_mask) + 1 < ll_bitsize)
3503 /* Since ll_arg is a single bit bit mask, we can sign extend
3504 it appropriately with a NEGATE_EXPR.
3505 l_const is made a signed value here, but since for l_const != NULL
3506 lr_unsignedp is not used, we don't need to clear the latter. */
3507 l_const = fold (build1 (NEGATE_EXPR, TREE_TYPE (ll_arg),
3508 convert (TREE_TYPE (ll_arg), ll_mask)));
3514 if (rcode != wanted_code)
3516 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3518 if (rl_unsignedp || tree_log2 (rl_mask) + 1 < rl_bitsize)
3521 /* This is analogous to the code for l_const above. */
3522 r_const = fold (build1 (NEGATE_EXPR, TREE_TYPE (rl_arg),
3523 convert (TREE_TYPE (rl_arg), rl_mask)));
3529 /* See if we can find a mode that contains both fields being compared on
3530 the left. If we can't, fail. Otherwise, update all constants and masks
3531 to be relative to a field of that size. */
3532 first_bit = MIN (ll_bitpos, rl_bitpos);
3533 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3534 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3535 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3537 if (lnmode == VOIDmode)
3540 lnbitsize = GET_MODE_BITSIZE (lnmode);
3541 lnbitpos = first_bit & ~ (lnbitsize - 1);
3542 type = type_for_size (lnbitsize, 1);
3543 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3545 if (BYTES_BIG_ENDIAN)
3547 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3548 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3551 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
3552 size_int (xll_bitpos), 0);
3553 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
3554 size_int (xrl_bitpos), 0);
3558 l_const = convert (type, l_const);
3559 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3560 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3561 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3562 fold (build1 (BIT_NOT_EXPR,
3566 warning ("comparison is always %s",
3567 wanted_code == NE_EXPR ? "one" : "zero");
3569 return convert (truth_type,
3570 wanted_code == NE_EXPR
3571 ? integer_one_node : integer_zero_node);
3576 r_const = convert (type, r_const);
3577 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3578 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3579 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3580 fold (build1 (BIT_NOT_EXPR,
3584 warning ("comparison is always %s",
3585 wanted_code == NE_EXPR ? "one" : "zero");
3587 return convert (truth_type,
3588 wanted_code == NE_EXPR
3589 ? integer_one_node : integer_zero_node);
3593 /* If the right sides are not constant, do the same for it. Also,
3594 disallow this optimization if a size or signedness mismatch occurs
3595 between the left and right sides. */
3598 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3599 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3600 /* Make sure the two fields on the right
3601 correspond to the left without being swapped. */
3602 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3605 first_bit = MIN (lr_bitpos, rr_bitpos);
3606 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3607 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3608 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3610 if (rnmode == VOIDmode)
3613 rnbitsize = GET_MODE_BITSIZE (rnmode);
3614 rnbitpos = first_bit & ~ (rnbitsize - 1);
3615 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3617 if (BYTES_BIG_ENDIAN)
3619 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3620 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3623 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
3624 size_int (xlr_bitpos), 0);
3625 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
3626 size_int (xrr_bitpos), 0);
3628 /* Make a mask that corresponds to both fields being compared.
3629 Do this for both items being compared. If the masks agree,
3630 we can do this by masking both and comparing the masked
3632 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3633 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3634 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
3636 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3637 ll_unsignedp || rl_unsignedp);
3638 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
3639 lr_unsignedp || rr_unsignedp);
3640 if (! all_ones_mask_p (ll_mask, lnbitsize))
3642 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3643 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
3645 return build (wanted_code, truth_type, lhs, rhs);
3648 /* There is still another way we can do something: If both pairs of
3649 fields being compared are adjacent, we may be able to make a wider
3650 field containing them both. */
3651 if ((ll_bitsize + ll_bitpos == rl_bitpos
3652 && lr_bitsize + lr_bitpos == rr_bitpos)
3653 || (ll_bitpos == rl_bitpos + rl_bitsize
3654 && lr_bitpos == rr_bitpos + rr_bitsize))
3655 return build (wanted_code, truth_type,
3656 make_bit_field_ref (ll_inner, type,
3657 ll_bitsize + rl_bitsize,
3658 MIN (ll_bitpos, rl_bitpos),
3660 make_bit_field_ref (lr_inner, type,
3661 lr_bitsize + rr_bitsize,
3662 MIN (lr_bitpos, rr_bitpos),
3668 /* Handle the case of comparisons with constants. If there is something in
3669 common between the masks, those bits of the constants must be the same.
3670 If not, the condition is always false. Test for this to avoid generating
3671 incorrect code below. */
3672 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3673 if (! integer_zerop (result)
3674 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3675 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3677 if (wanted_code == NE_EXPR)
3679 warning ("`or' of unmatched not-equal tests is always 1");
3680 return convert (truth_type, integer_one_node);
3684 warning ("`and' of mutually exclusive equal-tests is always zero");
3685 return convert (truth_type, integer_zero_node);
3689 /* Construct the expression we will return. First get the component
3690 reference we will make. Unless the mask is all ones the width of
3691 that field, perform the mask operation. Then compare with the
3693 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3694 ll_unsignedp || rl_unsignedp);
3696 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3697 if (! all_ones_mask_p (ll_mask, lnbitsize))
3698 result = build (BIT_AND_EXPR, type, result, ll_mask);
3700 return build (wanted_code, truth_type, result,
3701 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3704 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3705 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3706 that we may sometimes modify the tree. */
3709 strip_compound_expr (t, s)
3713 enum tree_code code = TREE_CODE (t);
3715 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3716 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3717 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3718 return TREE_OPERAND (t, 1);
3720 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3721 don't bother handling any other types. */
3722 else if (code == COND_EXPR)
3724 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3725 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3726 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3728 else if (TREE_CODE_CLASS (code) == '1')
3729 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3730 else if (TREE_CODE_CLASS (code) == '<'
3731 || TREE_CODE_CLASS (code) == '2')
3733 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3734 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3740 /* Return a node which has the indicated constant VALUE (either 0 or
3741 1), and is of the indicated TYPE. */
3744 constant_boolean_node (value, type)
3748 if (type == integer_type_node)
3749 return value ? integer_one_node : integer_zero_node;
3750 else if (TREE_CODE (type) == BOOLEAN_TYPE)
3751 return truthvalue_conversion (value ? integer_one_node :
3755 tree t = build_int_2 (value, 0);
3756 TREE_TYPE (t) = type;
3761 /* Perform constant folding and related simplification of EXPR.
3762 The related simplifications include x*1 => x, x*0 => 0, etc.,
3763 and application of the associative law.
3764 NOP_EXPR conversions may be removed freely (as long as we
3765 are careful not to change the C type of the overall expression)
3766 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3767 but we can constant-fold them if they have constant operands. */
3773 register tree t = expr;
3774 tree t1 = NULL_TREE;
3776 tree type = TREE_TYPE (expr);
3777 register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3778 register enum tree_code code = TREE_CODE (t);
3782 /* WINS will be nonzero when the switch is done
3783 if all operands are constant. */
3787 /* Don't try to process an RTL_EXPR since its operands aren't trees.
3788 Likewise for a SAVE_EXPR that's already been evaluated. */
3789 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
3792 /* Return right away if already constant. */
3793 if (TREE_CONSTANT (t))
3795 if (code == CONST_DECL)
3796 return DECL_INITIAL (t);
3800 kind = TREE_CODE_CLASS (code);
3801 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3805 /* Special case for conversion ops that can have fixed point args. */
3806 arg0 = TREE_OPERAND (t, 0);
3808 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3810 STRIP_TYPE_NOPS (arg0);
3812 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3813 subop = TREE_REALPART (arg0);
3817 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3818 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3819 && TREE_CODE (subop) != REAL_CST
3820 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3822 /* Note that TREE_CONSTANT isn't enough:
3823 static var addresses are constant but we can't
3824 do arithmetic on them. */
3827 else if (kind == 'e' || kind == '<'
3828 || kind == '1' || kind == '2' || kind == 'r')
3830 register int len = tree_code_length[(int) code];
3832 for (i = 0; i < len; i++)
3834 tree op = TREE_OPERAND (t, i);
3838 continue; /* Valid for CALL_EXPR, at least. */
3840 if (kind == '<' || code == RSHIFT_EXPR)
3842 /* Signedness matters here. Perhaps we can refine this
3844 STRIP_TYPE_NOPS (op);
3848 /* Strip any conversions that don't change the mode. */
3852 if (TREE_CODE (op) == COMPLEX_CST)
3853 subop = TREE_REALPART (op);
3857 if (TREE_CODE (subop) != INTEGER_CST
3858 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3859 && TREE_CODE (subop) != REAL_CST
3860 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3862 /* Note that TREE_CONSTANT isn't enough:
3863 static var addresses are constant but we can't
3864 do arithmetic on them. */
3874 /* If this is a commutative operation, and ARG0 is a constant, move it
3875 to ARG1 to reduce the number of tests below. */
3876 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3877 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3878 || code == BIT_AND_EXPR)
3879 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3881 tem = arg0; arg0 = arg1; arg1 = tem;
3883 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3884 TREE_OPERAND (t, 1) = tem;
3887 /* Now WINS is set as described above,
3888 ARG0 is the first operand of EXPR,
3889 and ARG1 is the second operand (if it has more than one operand).
3891 First check for cases where an arithmetic operation is applied to a
3892 compound, conditional, or comparison operation. Push the arithmetic
3893 operation inside the compound or conditional to see if any folding
3894 can then be done. Convert comparison to conditional for this purpose.
3895 The also optimizes non-constant cases that used to be done in
3898 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3899 one of the operands is a comparison and the other is a comparison, a
3900 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3901 code below would make the expression more complex. Change it to a
3902 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3903 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3905 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3906 || code == EQ_EXPR || code == NE_EXPR)
3907 && ((truth_value_p (TREE_CODE (arg0))
3908 && (truth_value_p (TREE_CODE (arg1))
3909 || (TREE_CODE (arg1) == BIT_AND_EXPR
3910 && integer_onep (TREE_OPERAND (arg1, 1)))))
3911 || (truth_value_p (TREE_CODE (arg1))
3912 && (truth_value_p (TREE_CODE (arg0))
3913 || (TREE_CODE (arg0) == BIT_AND_EXPR
3914 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3916 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3917 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3921 if (code == EQ_EXPR)
3922 t = invert_truthvalue (t);
3927 if (TREE_CODE_CLASS (code) == '1')
3929 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3930 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3931 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3932 else if (TREE_CODE (arg0) == COND_EXPR)
3934 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3935 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3936 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3938 /* If this was a conversion, and all we did was to move into
3939 inside the COND_EXPR, bring it back out. But leave it if
3940 it is a conversion from integer to integer and the
3941 result precision is no wider than a word since such a
3942 conversion is cheap and may be optimized away by combine,
3943 while it couldn't if it were outside the COND_EXPR. Then return
3944 so we don't get into an infinite recursion loop taking the
3945 conversion out and then back in. */
3947 if ((code == NOP_EXPR || code == CONVERT_EXPR
3948 || code == NON_LVALUE_EXPR)
3949 && TREE_CODE (t) == COND_EXPR
3950 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3951 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3952 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3953 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3954 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3955 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3956 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3957 t = build1 (code, type,
3959 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3960 TREE_OPERAND (t, 0),
3961 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3962 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3965 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3966 return fold (build (COND_EXPR, type, arg0,
3967 fold (build1 (code, type, integer_one_node)),
3968 fold (build1 (code, type, integer_zero_node))));
3970 else if (TREE_CODE_CLASS (code) == '2'
3971 || TREE_CODE_CLASS (code) == '<')
3973 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3974 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3975 fold (build (code, type,
3976 arg0, TREE_OPERAND (arg1, 1))));
3977 else if ((TREE_CODE (arg1) == COND_EXPR
3978 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
3979 && TREE_CODE_CLASS (code) != '<'))
3980 && (! TREE_SIDE_EFFECTS (arg0) || current_function_decl != 0))
3982 tree test, true_value, false_value;
3983 tree lhs = 0, rhs = 0;
3985 if (TREE_CODE (arg1) == COND_EXPR)
3987 test = TREE_OPERAND (arg1, 0);
3988 true_value = TREE_OPERAND (arg1, 1);
3989 false_value = TREE_OPERAND (arg1, 2);
3993 tree testtype = TREE_TYPE (arg1);
3995 true_value = convert (testtype, integer_one_node);
3996 false_value = convert (testtype, integer_zero_node);
3999 /* If ARG0 is complex we want to make sure we only evaluate
4000 it once. Though this is only required if it is volatile, it
4001 might be more efficient even if it is not. However, if we
4002 succeed in folding one part to a constant, we do not need
4003 to make this SAVE_EXPR. Since we do this optimization
4004 primarily to see if we do end up with constant and this
4005 SAVE_EXPR interferes with later optimizations, suppressing
4006 it when we can is important.
4008 If we are not in a function, we can't make a SAVE_EXPR, so don't
4009 try to do so. Don't try to see if the result is a constant
4010 if an arm is a COND_EXPR since we get exponential behavior
4013 if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4014 && current_function_decl != 0
4015 && ((TREE_CODE (arg0) != VAR_DECL
4016 && TREE_CODE (arg0) != PARM_DECL)
4017 || TREE_SIDE_EFFECTS (arg0)))
4019 if (TREE_CODE (true_value) != COND_EXPR)
4020 lhs = fold (build (code, type, arg0, true_value));
4022 if (TREE_CODE (false_value) != COND_EXPR)
4023 rhs = fold (build (code, type, arg0, false_value));
4025 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4026 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4027 arg0 = save_expr (arg0), lhs = rhs = 0;
4031 lhs = fold (build (code, type, arg0, true_value));
4033 rhs = fold (build (code, type, arg0, false_value));
4035 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4037 if (TREE_CODE (arg0) == SAVE_EXPR)
4038 return build (COMPOUND_EXPR, type,
4039 convert (void_type_node, arg0),
4040 strip_compound_expr (test, arg0));
4042 return convert (type, test);
4045 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4046 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4047 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4048 else if ((TREE_CODE (arg0) == COND_EXPR
4049 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4050 && TREE_CODE_CLASS (code) != '<'))
4051 && (! TREE_SIDE_EFFECTS (arg1) || current_function_decl != 0))
4053 tree test, true_value, false_value;
4054 tree lhs = 0, rhs = 0;
4056 if (TREE_CODE (arg0) == COND_EXPR)
4058 test = TREE_OPERAND (arg0, 0);
4059 true_value = TREE_OPERAND (arg0, 1);
4060 false_value = TREE_OPERAND (arg0, 2);
4064 tree testtype = TREE_TYPE (arg0);
4066 true_value = convert (testtype, integer_one_node);
4067 false_value = convert (testtype, integer_zero_node);
4070 if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4071 && current_function_decl != 0
4072 && ((TREE_CODE (arg1) != VAR_DECL
4073 && TREE_CODE (arg1) != PARM_DECL)
4074 || TREE_SIDE_EFFECTS (arg1)))
4076 if (TREE_CODE (true_value) != COND_EXPR)
4077 lhs = fold (build (code, type, true_value, arg1));
4079 if (TREE_CODE (false_value) != COND_EXPR)
4080 rhs = fold (build (code, type, false_value, arg1));
4082 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4083 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4084 arg1 = save_expr (arg1), lhs = rhs = 0;
4088 lhs = fold (build (code, type, true_value, arg1));
4091 rhs = fold (build (code, type, false_value, arg1));
4093 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4094 if (TREE_CODE (arg1) == SAVE_EXPR)
4095 return build (COMPOUND_EXPR, type,
4096 convert (void_type_node, arg1),
4097 strip_compound_expr (test, arg1));
4099 return convert (type, test);
4102 else if (TREE_CODE_CLASS (code) == '<'
4103 && TREE_CODE (arg0) == COMPOUND_EXPR)
4104 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4105 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4106 else if (TREE_CODE_CLASS (code) == '<'
4107 && TREE_CODE (arg1) == COMPOUND_EXPR)
4108 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4109 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4121 return fold (DECL_INITIAL (t));
4126 case FIX_TRUNC_EXPR:
4127 /* Other kinds of FIX are not handled properly by fold_convert. */
4129 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4130 return TREE_OPERAND (t, 0);
4132 /* Handle cases of two conversions in a row. */
4133 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4134 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4136 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4137 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4138 tree final_type = TREE_TYPE (t);
4139 int inside_int = INTEGRAL_TYPE_P (inside_type);
4140 int inside_ptr = POINTER_TYPE_P (inside_type);
4141 int inside_float = FLOAT_TYPE_P (inside_type);
4142 int inside_prec = TYPE_PRECISION (inside_type);
4143 int inside_unsignedp = TREE_UNSIGNED (inside_type);
4144 int inter_int = INTEGRAL_TYPE_P (inter_type);
4145 int inter_ptr = POINTER_TYPE_P (inter_type);
4146 int inter_float = FLOAT_TYPE_P (inter_type);
4147 int inter_prec = TYPE_PRECISION (inter_type);
4148 int inter_unsignedp = TREE_UNSIGNED (inter_type);
4149 int final_int = INTEGRAL_TYPE_P (final_type);
4150 int final_ptr = POINTER_TYPE_P (final_type);
4151 int final_float = FLOAT_TYPE_P (final_type);
4152 int final_prec = TYPE_PRECISION (final_type);
4153 int final_unsignedp = TREE_UNSIGNED (final_type);
4155 /* In addition to the cases of two conversions in a row
4156 handled below, if we are converting something to its own
4157 type via an object of identical or wider precision, neither
4158 conversion is needed. */
4159 if (inside_type == final_type
4160 && ((inter_int && final_int) || (inter_float && final_float))
4161 && inter_prec >= final_prec)
4162 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4164 /* Likewise, if the intermediate and final types are either both
4165 float or both integer, we don't need the middle conversion if
4166 it is wider than the final type and doesn't change the signedness
4167 (for integers). Avoid this if the final type is a pointer
4168 since then we sometimes need the inner conversion. Likewise if
4169 the outer has a precision not equal to the size of its mode. */
4170 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4171 || (inter_float && inside_float))
4172 && inter_prec >= inside_prec
4173 && (inter_float || inter_unsignedp == inside_unsignedp)
4174 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4175 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4177 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4179 /* If we have a sign-extension of a zero-extended value, we can
4180 replace that by a single zero-extension. */
4181 if (inside_int && inter_int && final_int
4182 && inside_prec < inter_prec && inter_prec < final_prec
4183 && inside_unsignedp && !inter_unsignedp)
4184 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4186 /* Two conversions in a row are not needed unless:
4187 - some conversion is floating-point (overstrict for now), or
4188 - the intermediate type is narrower than both initial and
4190 - the intermediate type and innermost type differ in signedness,
4191 and the outermost type is wider than the intermediate, or
4192 - the initial type is a pointer type and the precisions of the
4193 intermediate and final types differ, or
4194 - the final type is a pointer type and the precisions of the
4195 initial and intermediate types differ. */
4196 if (! inside_float && ! inter_float && ! final_float
4197 && (inter_prec > inside_prec || inter_prec > final_prec)
4198 && ! (inside_int && inter_int
4199 && inter_unsignedp != inside_unsignedp
4200 && inter_prec < final_prec)
4201 && ((inter_unsignedp && inter_prec > inside_prec)
4202 == (final_unsignedp && final_prec > inter_prec))
4203 && ! (inside_ptr && inter_prec != final_prec)
4204 && ! (final_ptr && inside_prec != inter_prec)
4205 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4206 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4208 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4211 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4212 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4213 /* Detect assigning a bitfield. */
4214 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4215 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4217 /* Don't leave an assignment inside a conversion
4218 unless assigning a bitfield. */
4219 tree prev = TREE_OPERAND (t, 0);
4220 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4221 /* First do the assignment, then return converted constant. */
4222 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4228 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4231 return fold_convert (t, arg0);
4233 #if 0 /* This loses on &"foo"[0]. */
4238 /* Fold an expression like: "foo"[2] */
4239 if (TREE_CODE (arg0) == STRING_CST
4240 && TREE_CODE (arg1) == INTEGER_CST
4241 && !TREE_INT_CST_HIGH (arg1)
4242 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4244 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4245 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4246 force_fit_type (t, 0);
4253 if (TREE_CODE (arg0) == CONSTRUCTOR)
4255 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4262 TREE_CONSTANT (t) = wins;
4268 if (TREE_CODE (arg0) == INTEGER_CST)
4270 HOST_WIDE_INT low, high;
4271 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4272 TREE_INT_CST_HIGH (arg0),
4274 t = build_int_2 (low, high);
4275 TREE_TYPE (t) = type;
4277 = (TREE_OVERFLOW (arg0)
4278 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4279 TREE_CONSTANT_OVERFLOW (t)
4280 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4282 else if (TREE_CODE (arg0) == REAL_CST)
4283 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4285 else if (TREE_CODE (arg0) == NEGATE_EXPR)
4286 return TREE_OPERAND (arg0, 0);
4288 /* Convert - (a - b) to (b - a) for non-floating-point. */
4289 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4290 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4291 TREE_OPERAND (arg0, 0));
4298 if (TREE_CODE (arg0) == INTEGER_CST)
4300 if (! TREE_UNSIGNED (type)
4301 && TREE_INT_CST_HIGH (arg0) < 0)
4303 HOST_WIDE_INT low, high;
4304 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4305 TREE_INT_CST_HIGH (arg0),
4307 t = build_int_2 (low, high);
4308 TREE_TYPE (t) = type;
4310 = (TREE_OVERFLOW (arg0)
4311 | force_fit_type (t, overflow));
4312 TREE_CONSTANT_OVERFLOW (t)
4313 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4316 else if (TREE_CODE (arg0) == REAL_CST)
4318 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4319 t = build_real (type,
4320 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4323 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4324 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4328 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4330 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4331 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4332 TREE_OPERAND (arg0, 0),
4333 fold (build1 (NEGATE_EXPR,
4334 TREE_TYPE (TREE_TYPE (arg0)),
4335 TREE_OPERAND (arg0, 1))));
4336 else if (TREE_CODE (arg0) == COMPLEX_CST)
4337 return build_complex (type, TREE_OPERAND (arg0, 0),
4338 fold (build1 (NEGATE_EXPR,
4339 TREE_TYPE (TREE_TYPE (arg0)),
4340 TREE_OPERAND (arg0, 1))));
4341 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4342 return fold (build (TREE_CODE (arg0), type,
4343 fold (build1 (CONJ_EXPR, type,
4344 TREE_OPERAND (arg0, 0))),
4345 fold (build1 (CONJ_EXPR,
4346 type, TREE_OPERAND (arg0, 1)))));
4347 else if (TREE_CODE (arg0) == CONJ_EXPR)
4348 return TREE_OPERAND (arg0, 0);
4354 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4355 ~ TREE_INT_CST_HIGH (arg0));
4356 TREE_TYPE (t) = type;
4357 force_fit_type (t, 0);
4358 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4359 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4361 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4362 return TREE_OPERAND (arg0, 0);
4366 /* A + (-B) -> A - B */
4367 if (TREE_CODE (arg1) == NEGATE_EXPR)
4368 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4369 else if (! FLOAT_TYPE_P (type))
4371 if (integer_zerop (arg1))
4372 return non_lvalue (convert (type, arg0));
4374 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4375 with a constant, and the two constants have no bits in common,
4376 we should treat this as a BIT_IOR_EXPR since this may produce more
4378 if (TREE_CODE (arg0) == BIT_AND_EXPR
4379 && TREE_CODE (arg1) == BIT_AND_EXPR
4380 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4381 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4382 && integer_zerop (const_binop (BIT_AND_EXPR,
4383 TREE_OPERAND (arg0, 1),
4384 TREE_OPERAND (arg1, 1), 0)))
4386 code = BIT_IOR_EXPR;
4390 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
4391 about the case where C is a constant, just try one of the
4392 four possibilities. */
4394 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4395 && operand_equal_p (TREE_OPERAND (arg0, 1),
4396 TREE_OPERAND (arg1, 1), 0))
4397 return fold (build (MULT_EXPR, type,
4398 fold (build (PLUS_EXPR, type,
4399 TREE_OPERAND (arg0, 0),
4400 TREE_OPERAND (arg1, 0))),
4401 TREE_OPERAND (arg0, 1)));
4403 /* In IEEE floating point, x+0 may not equal x. */
4404 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4406 && real_zerop (arg1))
4407 return non_lvalue (convert (type, arg0));
4409 /* In most languages, can't associate operations on floats
4410 through parentheses. Rather than remember where the parentheses
4411 were, we don't associate floats at all. It shouldn't matter much.
4412 However, associating multiplications is only very slightly
4413 inaccurate, so do that if -ffast-math is specified. */
4414 if (FLOAT_TYPE_P (type)
4415 && ! (flag_fast_math && code == MULT_EXPR))
4418 /* The varsign == -1 cases happen only for addition and subtraction.
4419 It says that the arg that was split was really CON minus VAR.
4420 The rest of the code applies to all associative operations. */
4426 if (split_tree (arg0, code, &var, &con, &varsign))
4430 /* EXPR is (CON-VAR) +- ARG1. */
4431 /* If it is + and VAR==ARG1, return just CONST. */
4432 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4433 return convert (TREE_TYPE (t), con);
4435 /* If ARG0 is a constant, don't change things around;
4436 instead keep all the constant computations together. */
4438 if (TREE_CONSTANT (arg0))
4441 /* Otherwise return (CON +- ARG1) - VAR. */
4442 t = build (MINUS_EXPR, type,
4443 fold (build (code, type, con, arg1)), var);
4447 /* EXPR is (VAR+CON) +- ARG1. */
4448 /* If it is - and VAR==ARG1, return just CONST. */
4449 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4450 return convert (TREE_TYPE (t), con);
4452 /* If ARG0 is a constant, don't change things around;
4453 instead keep all the constant computations together. */
4455 if (TREE_CONSTANT (arg0))
4458 /* Otherwise return VAR +- (ARG1 +- CON). */
4459 tem = fold (build (code, type, arg1, con));
4460 t = build (code, type, var, tem);
4462 if (integer_zerop (tem)
4463 && (code == PLUS_EXPR || code == MINUS_EXPR))
4464 return convert (type, var);
4465 /* If we have x +/- (c - d) [c an explicit integer]
4466 change it to x -/+ (d - c) since if d is relocatable
4467 then the latter can be a single immediate insn
4468 and the former cannot. */
4469 if (TREE_CODE (tem) == MINUS_EXPR
4470 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4472 tree tem1 = TREE_OPERAND (tem, 1);
4473 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4474 TREE_OPERAND (tem, 0) = tem1;
4476 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4482 if (split_tree (arg1, code, &var, &con, &varsign))
4484 if (TREE_CONSTANT (arg1))
4489 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4491 /* EXPR is ARG0 +- (CON +- VAR). */
4492 if (TREE_CODE (t) == MINUS_EXPR
4493 && operand_equal_p (var, arg0, 0))
4495 /* If VAR and ARG0 cancel, return just CON or -CON. */
4496 if (code == PLUS_EXPR)
4497 return convert (TREE_TYPE (t), con);
4498 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4499 convert (TREE_TYPE (t), con)));
4502 t = build (TREE_CODE (t), type,
4503 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4505 if (integer_zerop (TREE_OPERAND (t, 0))
4506 && TREE_CODE (t) == PLUS_EXPR)
4507 return convert (TREE_TYPE (t), var);
4512 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4513 if (TREE_CODE (arg1) == REAL_CST)
4515 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4517 t1 = const_binop (code, arg0, arg1, 0);
4518 if (t1 != NULL_TREE)
4520 /* The return value should always have
4521 the same type as the original expression. */
4522 if (TREE_TYPE (t1) != TREE_TYPE (t))
4523 t1 = convert (TREE_TYPE (t), t1);
4530 if (! FLOAT_TYPE_P (type))
4532 if (! wins && integer_zerop (arg0))
4533 return build1 (NEGATE_EXPR, type, arg1);
4534 if (integer_zerop (arg1))
4535 return non_lvalue (convert (type, arg0));
4537 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
4538 about the case where C is a constant, just try one of the
4539 four possibilities. */
4541 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4542 && operand_equal_p (TREE_OPERAND (arg0, 1),
4543 TREE_OPERAND (arg1, 1), 0))
4544 return fold (build (MULT_EXPR, type,
4545 fold (build (MINUS_EXPR, type,
4546 TREE_OPERAND (arg0, 0),
4547 TREE_OPERAND (arg1, 0))),
4548 TREE_OPERAND (arg0, 1)));
4550 /* Convert A - (-B) to A + B. */
4551 else if (TREE_CODE (arg1) == NEGATE_EXPR)
4552 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4554 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4557 /* Except with IEEE floating point, 0-x equals -x. */
4558 if (! wins && real_zerop (arg0))
4559 return build1 (NEGATE_EXPR, type, arg1);
4560 /* Except with IEEE floating point, x-0 equals x. */
4561 if (real_zerop (arg1))
4562 return non_lvalue (convert (type, arg0));
4565 /* Fold &x - &x. This can happen from &x.foo - &x.
4566 This is unsafe for certain floats even in non-IEEE formats.
4567 In IEEE, it is unsafe because it does wrong for NaNs.
4568 Also note that operand_equal_p is always false if an operand
4571 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4572 && operand_equal_p (arg0, arg1, 0))
4573 return convert (type, integer_zero_node);
4578 if (! FLOAT_TYPE_P (type))
4580 if (integer_zerop (arg1))
4581 return omit_one_operand (type, arg1, arg0);
4582 if (integer_onep (arg1))
4583 return non_lvalue (convert (type, arg0));
4585 /* ((A / C) * C) is A if the division is an
4586 EXACT_DIV_EXPR. Since C is normally a constant,
4587 just check for one of the four possibilities. */
4589 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4590 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4591 return TREE_OPERAND (arg0, 0);
4593 /* (a * (1 << b)) is (a << b) */
4594 if (TREE_CODE (arg1) == LSHIFT_EXPR
4595 && integer_onep (TREE_OPERAND (arg1, 0)))
4596 return fold (build (LSHIFT_EXPR, type, arg0,
4597 TREE_OPERAND (arg1, 1)));
4598 if (TREE_CODE (arg0) == LSHIFT_EXPR
4599 && integer_onep (TREE_OPERAND (arg0, 0)))
4600 return fold (build (LSHIFT_EXPR, type, arg1,
4601 TREE_OPERAND (arg0, 1)));
4605 /* x*0 is 0, except for IEEE floating point. */
4606 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4608 && real_zerop (arg1))
4609 return omit_one_operand (type, arg1, arg0);
4610 /* In IEEE floating point, x*1 is not equivalent to x for snans.
4611 However, ANSI says we can drop signals,
4612 so we can do this anyway. */
4613 if (real_onep (arg1))
4614 return non_lvalue (convert (type, arg0));
4616 if (! wins && real_twop (arg1) && current_function_decl != 0)
4618 tree arg = save_expr (arg0);
4619 return build (PLUS_EXPR, type, arg, arg);
4627 register enum tree_code code0, code1;
4629 if (integer_all_onesp (arg1))
4630 return omit_one_operand (type, arg1, arg0);
4631 if (integer_zerop (arg1))
4632 return non_lvalue (convert (type, arg0));
4633 t1 = distribute_bit_expr (code, type, arg0, arg1);
4634 if (t1 != NULL_TREE)
4637 /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
4638 is a rotate of A by C1 bits. */
4639 /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
4640 is a rotate of A by B bits. */
4642 code0 = TREE_CODE (arg0);
4643 code1 = TREE_CODE (arg1);
4644 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
4645 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
4646 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
4647 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4649 register tree tree01, tree11;
4650 register enum tree_code code01, code11;
4652 tree01 = TREE_OPERAND (arg0, 1);
4653 tree11 = TREE_OPERAND (arg1, 1);
4654 code01 = TREE_CODE (tree01);
4655 code11 = TREE_CODE (tree11);
4656 if (code01 == INTEGER_CST
4657 && code11 == INTEGER_CST
4658 && TREE_INT_CST_HIGH (tree01) == 0
4659 && TREE_INT_CST_HIGH (tree11) == 0
4660 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
4661 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
4662 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
4663 code0 == LSHIFT_EXPR ? tree01 : tree11);
4664 else if (code11 == MINUS_EXPR
4665 && TREE_CODE (TREE_OPERAND (tree11, 0)) == INTEGER_CST
4666 && TREE_INT_CST_HIGH (TREE_OPERAND (tree11, 0)) == 0
4667 && TREE_INT_CST_LOW (TREE_OPERAND (tree11, 0))
4668 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4669 && operand_equal_p (tree01, TREE_OPERAND (tree11, 1), 0))
4670 return build (code0 == LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4671 type, TREE_OPERAND (arg0, 0), tree01);
4672 else if (code01 == MINUS_EXPR
4673 && TREE_CODE (TREE_OPERAND (tree01, 0)) == INTEGER_CST
4674 && TREE_INT_CST_HIGH (TREE_OPERAND (tree01, 0)) == 0
4675 && TREE_INT_CST_LOW (TREE_OPERAND (tree01, 0))
4676 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4677 && operand_equal_p (tree11, TREE_OPERAND (tree01, 1), 0))
4678 return build (code0 != LSHIFT_EXPR ? LROTATE_EXPR : RROTATE_EXPR,
4679 type, TREE_OPERAND (arg0, 0), tree11);
4686 if (integer_zerop (arg1))
4687 return non_lvalue (convert (type, arg0));
4688 if (integer_all_onesp (arg1))
4689 return fold (build1 (BIT_NOT_EXPR, type, arg0));
4694 if (integer_all_onesp (arg1))
4695 return non_lvalue (convert (type, arg0));
4696 if (integer_zerop (arg1))
4697 return omit_one_operand (type, arg1, arg0);
4698 t1 = distribute_bit_expr (code, type, arg0, arg1);
4699 if (t1 != NULL_TREE)
4701 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
4702 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
4703 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
4705 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
4706 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4707 && (~TREE_INT_CST_LOW (arg0)
4708 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4709 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
4711 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
4712 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
4714 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
4715 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
4716 && (~TREE_INT_CST_LOW (arg1)
4717 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
4718 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
4722 case BIT_ANDTC_EXPR:
4723 if (integer_all_onesp (arg0))
4724 return non_lvalue (convert (type, arg1));
4725 if (integer_zerop (arg0))
4726 return omit_one_operand (type, arg0, arg1);
4727 if (TREE_CODE (arg1) == INTEGER_CST)
4729 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
4730 code = BIT_AND_EXPR;
4736 /* In most cases, do nothing with a divide by zero. */
4737 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4738 #ifndef REAL_INFINITY
4739 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
4742 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4744 /* In IEEE floating point, x/1 is not equivalent to x for snans.
4745 However, ANSI says we can drop signals, so we can do this anyway. */
4746 if (real_onep (arg1))
4747 return non_lvalue (convert (type, arg0));
4749 /* If ARG1 is a constant, we can convert this to a multiply by the
4750 reciprocal. This does not have the same rounding properties,
4751 so only do this if -ffast-math. We can actually always safely
4752 do it if ARG1 is a power of two, but it's hard to tell if it is
4753 or not in a portable manner. */
4754 if (TREE_CODE (arg1) == REAL_CST)
4757 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4759 return fold (build (MULT_EXPR, type, arg0, tem));
4760 /* Find the reciprocal if optimizing and the result is exact. */
4764 r = TREE_REAL_CST (arg1);
4765 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
4767 tem = build_real (type, r);
4768 return fold (build (MULT_EXPR, type, arg0, tem));
4774 case TRUNC_DIV_EXPR:
4775 case ROUND_DIV_EXPR:
4776 case FLOOR_DIV_EXPR:
4778 case EXACT_DIV_EXPR:
4779 if (integer_onep (arg1))
4780 return non_lvalue (convert (type, arg0));
4781 if (integer_zerop (arg1))
4784 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
4785 operation, EXACT_DIV_EXPR.
4787 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
4788 At one time others generated faster code, it's not clear if they do
4789 after the last round to changes to the DIV code in expmed.c. */
4790 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
4791 && multiple_of_p (type, arg0, arg1))
4792 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
4794 /* If we have ((a / C1) / C2) where both division are the same type, try
4795 to simplify. First see if C1 * C2 overflows or not. */
4796 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4797 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4801 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4802 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4804 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4805 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4807 /* If no overflow, divide by C1*C2. */
4808 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4812 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4813 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4814 expressions, which often appear in the offsets or sizes of
4815 objects with a varying size. Only deal with positive divisors
4816 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4818 Look for NOPs and SAVE_EXPRs inside. */
4820 if (TREE_CODE (arg1) == INTEGER_CST
4821 && tree_int_cst_sgn (arg1) >= 0)
4823 int have_save_expr = 0;
4824 tree c2 = integer_zero_node;
4827 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4828 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4832 /* Look inside the dividend and simplify using EXACT_DIV_EXPR
4834 if (TREE_CODE (xarg0) == MULT_EXPR
4835 && multiple_of_p (type, TREE_OPERAND (xarg0, 0), arg1))
4839 t = fold (build (MULT_EXPR, type,
4840 fold (build (EXACT_DIV_EXPR, type,
4841 TREE_OPERAND (xarg0, 0), arg1)),
4842 TREE_OPERAND (xarg0, 1)));
4849 if (TREE_CODE (xarg0) == MULT_EXPR
4850 && multiple_of_p (type, TREE_OPERAND (xarg0, 1), arg1))
4854 t = fold (build (MULT_EXPR, type,
4855 fold (build (EXACT_DIV_EXPR, type,
4856 TREE_OPERAND (xarg0, 1), arg1)),
4857 TREE_OPERAND (xarg0, 0)));
4863 if (TREE_CODE (xarg0) == PLUS_EXPR
4864 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4865 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4866 else if (TREE_CODE (xarg0) == MINUS_EXPR
4867 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4868 /* If we are doing this computation unsigned, the negate
4870 && ! TREE_UNSIGNED (type))
4872 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4873 xarg0 = TREE_OPERAND (xarg0, 0);
4876 if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
4877 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4881 if (TREE_CODE (xarg0) == MULT_EXPR
4882 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4883 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4884 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4885 TREE_OPERAND (xarg0, 1), arg1, 1))
4886 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4887 TREE_OPERAND (xarg0, 1), 1)))
4888 && (tree_int_cst_sgn (c2) >= 0
4889 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4892 tree outer_div = integer_one_node;
4893 tree c1 = TREE_OPERAND (xarg0, 1);
4896 /* If C3 > C1, set them equal and do a divide by
4897 C3/C1 at the end of the operation. */
4898 if (tree_int_cst_lt (c1, c3))
4899 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4901 /* The result is A * (C1/C3) + (C2/C3). */
4902 t = fold (build (PLUS_EXPR, type,
4903 fold (build (MULT_EXPR, type,
4904 TREE_OPERAND (xarg0, 0),
4905 const_binop (code, c1, c3, 1))),
4906 const_binop (code, c2, c3, 1)));
4908 if (! integer_onep (outer_div))
4909 t = fold (build (code, type, t, convert (type, outer_div)));
4921 case FLOOR_MOD_EXPR:
4922 case ROUND_MOD_EXPR:
4923 case TRUNC_MOD_EXPR:
4924 if (integer_onep (arg1))
4925 return omit_one_operand (type, integer_zero_node, arg0);
4926 if (integer_zerop (arg1))
4929 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4930 where C1 % C3 == 0. Handle similarly to the division case,
4931 but don't bother with SAVE_EXPRs. */
4933 if (TREE_CODE (arg1) == INTEGER_CST
4934 && ! integer_zerop (arg1))
4936 tree c2 = integer_zero_node;
4939 if (TREE_CODE (xarg0) == PLUS_EXPR
4940 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4941 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4942 else if (TREE_CODE (xarg0) == MINUS_EXPR
4943 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4944 && ! TREE_UNSIGNED (type))
4946 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4947 xarg0 = TREE_OPERAND (xarg0, 0);
4952 if (TREE_CODE (xarg0) == MULT_EXPR
4953 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4954 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4955 TREE_OPERAND (xarg0, 1),
4957 && tree_int_cst_sgn (c2) >= 0)
4958 /* The result is (C2%C3). */
4959 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4960 TREE_OPERAND (xarg0, 0));
4969 if (integer_zerop (arg1))
4970 return non_lvalue (convert (type, arg0));
4971 /* Since negative shift count is not well-defined,
4972 don't try to compute it in the compiler. */
4973 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4975 /* Rewrite an LROTATE_EXPR by a constant into an
4976 RROTATE_EXPR by a new constant. */
4977 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4979 TREE_SET_CODE (t, RROTATE_EXPR);
4980 code = RROTATE_EXPR;
4981 TREE_OPERAND (t, 1) = arg1
4984 convert (TREE_TYPE (arg1),
4985 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4987 if (tree_int_cst_sgn (arg1) < 0)
4991 /* If we have a rotate of a bit operation with the rotate count and
4992 the second operand of the bit operation both constant,
4993 permute the two operations. */
4994 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4995 && (TREE_CODE (arg0) == BIT_AND_EXPR
4996 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4997 || TREE_CODE (arg0) == BIT_IOR_EXPR
4998 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4999 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5000 return fold (build (TREE_CODE (arg0), type,
5001 fold (build (code, type,
5002 TREE_OPERAND (arg0, 0), arg1)),
5003 fold (build (code, type,
5004 TREE_OPERAND (arg0, 1), arg1))));
5006 /* Two consecutive rotates adding up to the width of the mode can
5008 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5009 && TREE_CODE (arg0) == RROTATE_EXPR
5010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5011 && TREE_INT_CST_HIGH (arg1) == 0
5012 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5013 && ((TREE_INT_CST_LOW (arg1)
5014 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5015 == GET_MODE_BITSIZE (TYPE_MODE (type))))
5016 return TREE_OPERAND (arg0, 0);
5021 if (operand_equal_p (arg0, arg1, 0))
5023 if (INTEGRAL_TYPE_P (type)
5024 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5025 return omit_one_operand (type, arg1, arg0);
5029 if (operand_equal_p (arg0, arg1, 0))
5031 if (INTEGRAL_TYPE_P (type)
5032 && TYPE_MAX_VALUE (type)
5033 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5034 return omit_one_operand (type, arg1, arg0);
5037 case TRUTH_NOT_EXPR:
5038 /* Note that the operand of this must be an int
5039 and its values must be 0 or 1.
5040 ("true" is a fixed value perhaps depending on the language,
5041 but we don't handle values other than 1 correctly yet.) */
5042 tem = invert_truthvalue (arg0);
5043 /* Avoid infinite recursion. */
5044 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5046 return convert (type, tem);
5048 case TRUTH_ANDIF_EXPR:
5049 /* Note that the operands of this must be ints
5050 and their values must be 0 or 1.
5051 ("true" is a fixed value perhaps depending on the language.) */
5052 /* If first arg is constant zero, return it. */
5053 if (integer_zerop (arg0))
5055 case TRUTH_AND_EXPR:
5056 /* If either arg is constant true, drop it. */
5057 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5058 return non_lvalue (arg1);
5059 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5060 return non_lvalue (arg0);
5061 /* If second arg is constant zero, result is zero, but first arg
5062 must be evaluated. */
5063 if (integer_zerop (arg1))
5064 return omit_one_operand (type, arg1, arg0);
5065 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5066 case will be handled here. */
5067 if (integer_zerop (arg0))
5068 return omit_one_operand (type, arg0, arg1);
5071 /* We only do these simplifications if we are optimizing. */
5075 /* Check for things like (A || B) && (A || C). We can convert this
5076 to A || (B && C). Note that either operator can be any of the four
5077 truth and/or operations and the transformation will still be
5078 valid. Also note that we only care about order for the
5079 ANDIF and ORIF operators. If B contains side effects, this
5080 might change the truth-value of A. */
5081 if (TREE_CODE (arg0) == TREE_CODE (arg1)
5082 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5083 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5084 || TREE_CODE (arg0) == TRUTH_AND_EXPR
5085 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5086 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5088 tree a00 = TREE_OPERAND (arg0, 0);
5089 tree a01 = TREE_OPERAND (arg0, 1);
5090 tree a10 = TREE_OPERAND (arg1, 0);
5091 tree a11 = TREE_OPERAND (arg1, 1);
5092 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5093 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5094 && (code == TRUTH_AND_EXPR
5095 || code == TRUTH_OR_EXPR));
5097 if (operand_equal_p (a00, a10, 0))
5098 return fold (build (TREE_CODE (arg0), type, a00,
5099 fold (build (code, type, a01, a11))));
5100 else if (commutative && operand_equal_p (a00, a11, 0))
5101 return fold (build (TREE_CODE (arg0), type, a00,
5102 fold (build (code, type, a01, a10))));
5103 else if (commutative && operand_equal_p (a01, a10, 0))
5104 return fold (build (TREE_CODE (arg0), type, a01,
5105 fold (build (code, type, a00, a11))));
5107 /* This case if tricky because we must either have commutative
5108 operators or else A10 must not have side-effects. */
5110 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5111 && operand_equal_p (a01, a11, 0))
5112 return fold (build (TREE_CODE (arg0), type,
5113 fold (build (code, type, a00, a10)),
5117 /* See if we can build a range comparison. */
5118 if (0 != (tem = fold_range_test (t)))
5121 /* Check for the possibility of merging component references. If our
5122 lhs is another similar operation, try to merge its rhs with our
5123 rhs. Then try to merge our lhs and rhs. */
5124 if (TREE_CODE (arg0) == code
5125 && 0 != (tem = fold_truthop (code, type,
5126 TREE_OPERAND (arg0, 1), arg1)))
5127 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5129 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5134 case TRUTH_ORIF_EXPR:
5135 /* Note that the operands of this must be ints
5136 and their values must be 0 or true.
5137 ("true" is a fixed value perhaps depending on the language.) */
5138 /* If first arg is constant true, return it. */
5139 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5142 /* If either arg is constant zero, drop it. */
5143 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5144 return non_lvalue (arg1);
5145 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
5146 return non_lvalue (arg0);
5147 /* If second arg is constant true, result is true, but we must
5148 evaluate first arg. */
5149 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5150 return omit_one_operand (type, arg1, arg0);
5151 /* Likewise for first arg, but note this only occurs here for
5153 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5154 return omit_one_operand (type, arg0, arg1);
5157 case TRUTH_XOR_EXPR:
5158 /* If either arg is constant zero, drop it. */
5159 if (integer_zerop (arg0))
5160 return non_lvalue (arg1);
5161 if (integer_zerop (arg1))
5162 return non_lvalue (arg0);
5163 /* If either arg is constant true, this is a logical inversion. */
5164 if (integer_onep (arg0))
5165 return non_lvalue (invert_truthvalue (arg1));
5166 if (integer_onep (arg1))
5167 return non_lvalue (invert_truthvalue (arg0));
5176 /* If one arg is a constant integer, put it last. */
5177 if (TREE_CODE (arg0) == INTEGER_CST
5178 && TREE_CODE (arg1) != INTEGER_CST)
5180 TREE_OPERAND (t, 0) = arg1;
5181 TREE_OPERAND (t, 1) = arg0;
5182 arg0 = TREE_OPERAND (t, 0);
5183 arg1 = TREE_OPERAND (t, 1);
5184 code = swap_tree_comparison (code);
5185 TREE_SET_CODE (t, code);
5188 /* Convert foo++ == CONST into ++foo == CONST + INCR.
5189 First, see if one arg is constant; find the constant arg
5190 and the other one. */
5192 tree constop = 0, varop = NULL_TREE;
5193 int constopnum = -1;
5195 if (TREE_CONSTANT (arg1))
5196 constopnum = 1, constop = arg1, varop = arg0;
5197 if (TREE_CONSTANT (arg0))
5198 constopnum = 0, constop = arg0, varop = arg1;
5200 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5202 /* This optimization is invalid for ordered comparisons
5203 if CONST+INCR overflows or if foo+incr might overflow.
5204 This optimization is invalid for floating point due to rounding.
5205 For pointer types we assume overflow doesn't happen. */
5206 if (POINTER_TYPE_P (TREE_TYPE (varop))
5207 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5208 && (code == EQ_EXPR || code == NE_EXPR)))
5211 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5212 constop, TREE_OPERAND (varop, 1)));
5213 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
5215 /* If VAROP is a reference to a bitfield, we must mask
5216 the constant by the width of the field. */
5217 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5218 && DECL_BIT_FIELD(TREE_OPERAND
5219 (TREE_OPERAND (varop, 0), 1)))
5222 = TREE_INT_CST_LOW (DECL_SIZE
5224 (TREE_OPERAND (varop, 0), 1)));
5225 tree mask, unsigned_type;
5227 tree folded_compare;
5229 /* First check whether the comparison would come out
5230 always the same. If we don't do that we would
5231 change the meaning with the masking. */
5232 if (constopnum == 0)
5233 folded_compare = fold (build (code, type, constop,
5234 TREE_OPERAND (varop, 0)));
5236 folded_compare = fold (build (code, type,
5237 TREE_OPERAND (varop, 0),
5239 if (integer_zerop (folded_compare)
5240 || integer_onep (folded_compare))
5241 return omit_one_operand (type, folded_compare, varop);
5243 unsigned_type = type_for_size (size, 1);
5244 precision = TYPE_PRECISION (unsigned_type);
5245 mask = build_int_2 (~0, ~0);
5246 TREE_TYPE (mask) = unsigned_type;
5247 force_fit_type (mask, 0);
5248 mask = const_binop (RSHIFT_EXPR, mask,
5249 size_int (precision - size), 0);
5250 newconst = fold (build (BIT_AND_EXPR,
5251 TREE_TYPE (varop), newconst,
5252 convert (TREE_TYPE (varop),
5257 t = build (code, type, TREE_OPERAND (t, 0),
5258 TREE_OPERAND (t, 1));
5259 TREE_OPERAND (t, constopnum) = newconst;
5263 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5265 if (POINTER_TYPE_P (TREE_TYPE (varop))
5266 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
5267 && (code == EQ_EXPR || code == NE_EXPR)))
5270 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5271 constop, TREE_OPERAND (varop, 1)));
5272 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
5274 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5275 && DECL_BIT_FIELD(TREE_OPERAND
5276 (TREE_OPERAND (varop, 0), 1)))
5279 = TREE_INT_CST_LOW (DECL_SIZE
5281 (TREE_OPERAND (varop, 0), 1)));
5282 tree mask, unsigned_type;
5284 tree folded_compare;
5286 if (constopnum == 0)
5287 folded_compare = fold (build (code, type, constop,
5288 TREE_OPERAND (varop, 0)));
5290 folded_compare = fold (build (code, type,
5291 TREE_OPERAND (varop, 0),
5293 if (integer_zerop (folded_compare)
5294 || integer_onep (folded_compare))
5295 return omit_one_operand (type, folded_compare, varop);
5297 unsigned_type = type_for_size (size, 1);
5298 precision = TYPE_PRECISION (unsigned_type);
5299 mask = build_int_2 (~0, ~0);
5300 TREE_TYPE (mask) = TREE_TYPE (varop);
5301 force_fit_type (mask, 0);
5302 mask = const_binop (RSHIFT_EXPR, mask,
5303 size_int (precision - size), 0);
5304 newconst = fold (build (BIT_AND_EXPR,
5305 TREE_TYPE (varop), newconst,
5306 convert (TREE_TYPE (varop),
5311 t = build (code, type, TREE_OPERAND (t, 0),
5312 TREE_OPERAND (t, 1));
5313 TREE_OPERAND (t, constopnum) = newconst;
5319 /* Change X >= CST to X > (CST - 1) if CST is positive. */
5320 if (TREE_CODE (arg1) == INTEGER_CST
5321 && TREE_CODE (arg0) != INTEGER_CST
5322 && tree_int_cst_sgn (arg1) > 0)
5324 switch (TREE_CODE (t))
5328 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5329 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5334 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5335 t = build (code, type, TREE_OPERAND (t, 0), arg1);
5343 /* If this is an EQ or NE comparison with zero and ARG0 is
5344 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
5345 two operations, but the latter can be done in one less insn
5346 on machines that have only two-operand insns or on which a
5347 constant cannot be the first operand. */
5348 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5349 && TREE_CODE (arg0) == BIT_AND_EXPR)
5351 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5352 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5354 fold (build (code, type,
5355 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5357 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5358 TREE_OPERAND (arg0, 1),
5359 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5360 convert (TREE_TYPE (arg0),
5363 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5364 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5366 fold (build (code, type,
5367 build (BIT_AND_EXPR, TREE_TYPE (arg0),
5369 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5370 TREE_OPERAND (arg0, 0),
5371 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5372 convert (TREE_TYPE (arg0),
5377 /* If this is an NE or EQ comparison of zero against the result of a
5378 signed MOD operation whose second operand is a power of 2, make
5379 the MOD operation unsigned since it is simpler and equivalent. */
5380 if ((code == NE_EXPR || code == EQ_EXPR)
5381 && integer_zerop (arg1)
5382 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5383 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5384 || TREE_CODE (arg0) == CEIL_MOD_EXPR
5385 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5386 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5387 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5389 tree newtype = unsigned_type (TREE_TYPE (arg0));
5390 tree newmod = build (TREE_CODE (arg0), newtype,
5391 convert (newtype, TREE_OPERAND (arg0, 0)),
5392 convert (newtype, TREE_OPERAND (arg0, 1)));
5394 return build (code, type, newmod, convert (newtype, arg1));
5397 /* If this is an NE comparison of zero with an AND of one, remove the
5398 comparison since the AND will give the correct value. */
5399 if (code == NE_EXPR && integer_zerop (arg1)
5400 && TREE_CODE (arg0) == BIT_AND_EXPR
5401 && integer_onep (TREE_OPERAND (arg0, 1)))
5402 return convert (type, arg0);
5404 /* If we have (A & C) == C where C is a power of 2, convert this into
5405 (A & C) != 0. Similarly for NE_EXPR. */
5406 if ((code == EQ_EXPR || code == NE_EXPR)
5407 && TREE_CODE (arg0) == BIT_AND_EXPR
5408 && integer_pow2p (TREE_OPERAND (arg0, 1))
5409 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5410 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5411 arg0, integer_zero_node);
5413 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5414 and similarly for >= into !=. */
5415 if ((code == LT_EXPR || code == GE_EXPR)
5416 && TREE_UNSIGNED (TREE_TYPE (arg0))
5417 && TREE_CODE (arg1) == LSHIFT_EXPR
5418 && integer_onep (TREE_OPERAND (arg1, 0)))
5419 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5420 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5421 TREE_OPERAND (arg1, 1)),
5422 convert (TREE_TYPE (arg0), integer_zero_node));
5424 else if ((code == LT_EXPR || code == GE_EXPR)
5425 && TREE_UNSIGNED (TREE_TYPE (arg0))
5426 && (TREE_CODE (arg1) == NOP_EXPR
5427 || TREE_CODE (arg1) == CONVERT_EXPR)
5428 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5429 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5431 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5432 convert (TREE_TYPE (arg0),
5433 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5434 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5435 convert (TREE_TYPE (arg0), integer_zero_node));
5437 /* Simplify comparison of something with itself. (For IEEE
5438 floating-point, we can only do some of these simplifications.) */
5439 if (operand_equal_p (arg0, arg1, 0))
5446 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5447 return constant_boolean_node (1, type);
5449 TREE_SET_CODE (t, code);
5453 /* For NE, we can only do this simplification if integer. */
5454 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5456 /* ... fall through ... */
5459 return constant_boolean_node (0, type);
5465 /* An unsigned comparison against 0 can be simplified. */
5466 if (integer_zerop (arg1)
5467 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5468 || POINTER_TYPE_P (TREE_TYPE (arg1)))
5469 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5471 switch (TREE_CODE (t))
5475 TREE_SET_CODE (t, NE_EXPR);
5479 TREE_SET_CODE (t, EQ_EXPR);
5482 return omit_one_operand (type,
5483 convert (type, integer_one_node),
5486 return omit_one_operand (type,
5487 convert (type, integer_zero_node),
5494 /* An unsigned <= 0x7fffffff can be simplified. */
5496 int width = TYPE_PRECISION (TREE_TYPE (arg1));
5497 if (TREE_CODE (arg1) == INTEGER_CST
5498 && ! TREE_CONSTANT_OVERFLOW (arg1)
5499 && width <= HOST_BITS_PER_WIDE_INT
5500 && TREE_INT_CST_LOW (arg1) == ((HOST_WIDE_INT) 1 << (width - 1)) - 1
5501 && TREE_INT_CST_HIGH (arg1) == 0
5502 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5503 || POINTER_TYPE_P (TREE_TYPE (arg1)))
5504 && TREE_UNSIGNED (TREE_TYPE (arg1)))
5506 switch (TREE_CODE (t))
5509 return fold (build (GE_EXPR, type,
5510 convert (signed_type (TREE_TYPE (arg0)),
5512 convert (signed_type (TREE_TYPE (arg1)),
5513 integer_zero_node)));
5515 return fold (build (LT_EXPR, type,
5516 convert (signed_type (TREE_TYPE (arg0)),
5518 convert (signed_type (TREE_TYPE (arg1)),
5519 integer_zero_node)));
5526 /* If we are comparing an expression that just has comparisons
5527 of two integer values, arithmetic expressions of those comparisons,
5528 and constants, we can simplify it. There are only three cases
5529 to check: the two values can either be equal, the first can be
5530 greater, or the second can be greater. Fold the expression for
5531 those three values. Since each value must be 0 or 1, we have
5532 eight possibilities, each of which corresponds to the constant 0
5533 or 1 or one of the six possible comparisons.
5535 This handles common cases like (a > b) == 0 but also handles
5536 expressions like ((x > y) - (y > x)) > 0, which supposedly
5537 occur in macroized code. */
5539 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5541 tree cval1 = 0, cval2 = 0;
5544 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5545 /* Don't handle degenerate cases here; they should already
5546 have been handled anyway. */
5547 && cval1 != 0 && cval2 != 0
5548 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5549 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5550 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5551 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
5552 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
5553 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5554 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5556 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5557 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5559 /* We can't just pass T to eval_subst in case cval1 or cval2
5560 was the same as ARG1. */
5563 = fold (build (code, type,
5564 eval_subst (arg0, cval1, maxval, cval2, minval),
5567 = fold (build (code, type,
5568 eval_subst (arg0, cval1, maxval, cval2, maxval),
5571 = fold (build (code, type,
5572 eval_subst (arg0, cval1, minval, cval2, maxval),
5575 /* All three of these results should be 0 or 1. Confirm they
5576 are. Then use those values to select the proper code
5579 if ((integer_zerop (high_result)
5580 || integer_onep (high_result))
5581 && (integer_zerop (equal_result)
5582 || integer_onep (equal_result))
5583 && (integer_zerop (low_result)
5584 || integer_onep (low_result)))
5586 /* Make a 3-bit mask with the high-order bit being the
5587 value for `>', the next for '=', and the low for '<'. */
5588 switch ((integer_onep (high_result) * 4)
5589 + (integer_onep (equal_result) * 2)
5590 + integer_onep (low_result))
5594 return omit_one_operand (type, integer_zero_node, arg0);
5615 return omit_one_operand (type, integer_one_node, arg0);
5618 t = build (code, type, cval1, cval2);
5620 return save_expr (t);
5627 /* If this is a comparison of a field, we may be able to simplify it. */
5628 if ((TREE_CODE (arg0) == COMPONENT_REF
5629 || TREE_CODE (arg0) == BIT_FIELD_REF)
5630 && (code == EQ_EXPR || code == NE_EXPR)
5631 /* Handle the constant case even without -O
5632 to make sure the warnings are given. */
5633 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
5635 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
5639 /* If this is a comparison of complex values and either or both
5640 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
5641 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
5642 may prevent needless evaluations. */
5643 if ((code == EQ_EXPR || code == NE_EXPR)
5644 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
5645 && (TREE_CODE (arg0) == COMPLEX_EXPR
5646 || TREE_CODE (arg1) == COMPLEX_EXPR))
5648 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
5649 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
5650 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
5651 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
5652 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
5654 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
5657 fold (build (code, type, real0, real1)),
5658 fold (build (code, type, imag0, imag1))));
5661 /* From here on, the only cases we handle are when the result is
5662 known to be a constant.
5664 To compute GT, swap the arguments and do LT.
5665 To compute GE, do LT and invert the result.
5666 To compute LE, swap the arguments, do LT and invert the result.
5667 To compute NE, do EQ and invert the result.
5669 Therefore, the code below must handle only EQ and LT. */
5671 if (code == LE_EXPR || code == GT_EXPR)
5673 tem = arg0, arg0 = arg1, arg1 = tem;
5674 code = swap_tree_comparison (code);
5677 /* Note that it is safe to invert for real values here because we
5678 will check below in the one case that it matters. */
5681 if (code == NE_EXPR || code == GE_EXPR)
5684 code = invert_tree_comparison (code);
5687 /* Compute a result for LT or EQ if args permit;
5688 otherwise return T. */
5689 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
5691 if (code == EQ_EXPR)
5692 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
5693 == TREE_INT_CST_LOW (arg1))
5694 && (TREE_INT_CST_HIGH (arg0)
5695 == TREE_INT_CST_HIGH (arg1)),
5698 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
5699 ? INT_CST_LT_UNSIGNED (arg0, arg1)
5700 : INT_CST_LT (arg0, arg1)),
5704 #if 0 /* This is no longer useful, but breaks some real code. */
5705 /* Assume a nonexplicit constant cannot equal an explicit one,
5706 since such code would be undefined anyway.
5707 Exception: on sysvr4, using #pragma weak,
5708 a label can come out as 0. */
5709 else if (TREE_CODE (arg1) == INTEGER_CST
5710 && !integer_zerop (arg1)
5711 && TREE_CONSTANT (arg0)
5712 && TREE_CODE (arg0) == ADDR_EXPR
5714 t1 = build_int_2 (0, 0);
5716 /* Two real constants can be compared explicitly. */
5717 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
5719 /* If either operand is a NaN, the result is false with two
5720 exceptions: First, an NE_EXPR is true on NaNs, but that case
5721 is already handled correctly since we will be inverting the
5722 result for NE_EXPR. Second, if we had inverted a LE_EXPR
5723 or a GE_EXPR into a LT_EXPR, we must return true so that it
5724 will be inverted into false. */
5726 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
5727 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
5728 t1 = build_int_2 (invert && code == LT_EXPR, 0);
5730 else if (code == EQ_EXPR)
5731 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
5732 TREE_REAL_CST (arg1)),
5735 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
5736 TREE_REAL_CST (arg1)),
5740 if (t1 == NULL_TREE)
5744 TREE_INT_CST_LOW (t1) ^= 1;
5746 TREE_TYPE (t1) = type;
5747 if (TREE_CODE (type) == BOOLEAN_TYPE)
5748 return truthvalue_conversion (t1);
5752 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
5753 so all simple results must be passed through pedantic_non_lvalue. */
5754 if (TREE_CODE (arg0) == INTEGER_CST)
5755 return pedantic_non_lvalue
5756 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
5757 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
5758 return pedantic_omit_one_operand (type, arg1, arg0);
5760 /* If the second operand is zero, invert the comparison and swap
5761 the second and third operands. Likewise if the second operand
5762 is constant and the third is not or if the third operand is
5763 equivalent to the first operand of the comparison. */
5765 if (integer_zerop (arg1)
5766 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
5767 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5768 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5769 TREE_OPERAND (t, 2),
5770 TREE_OPERAND (arg0, 1))))
5772 /* See if this can be inverted. If it can't, possibly because
5773 it was a floating-point inequality comparison, don't do
5775 tem = invert_truthvalue (arg0);
5777 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5779 t = build (code, type, tem,
5780 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5782 arg1 = TREE_OPERAND (t, 2);
5787 /* If we have A op B ? A : C, we may be able to convert this to a
5788 simpler expression, depending on the operation and the values
5789 of B and C. IEEE floating point prevents this though,
5790 because A or B might be -0.0 or a NaN. */
5792 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5793 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5794 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
5796 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
5797 arg1, TREE_OPERAND (arg0, 1)))
5799 tree arg2 = TREE_OPERAND (t, 2);
5800 enum tree_code comp_code = TREE_CODE (arg0);
5804 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
5805 depending on the comparison operation. */
5806 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
5807 ? real_zerop (TREE_OPERAND (arg0, 1))
5808 : integer_zerop (TREE_OPERAND (arg0, 1)))
5809 && TREE_CODE (arg2) == NEGATE_EXPR
5810 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5814 return pedantic_non_lvalue
5815 (fold (build1 (NEGATE_EXPR, type, arg1)));
5817 return pedantic_non_lvalue (convert (type, arg1));
5820 return pedantic_non_lvalue
5821 (convert (type, fold (build1 (ABS_EXPR,
5822 TREE_TYPE (arg1), arg1))));
5825 return pedantic_non_lvalue
5826 (fold (build1 (NEGATE_EXPR, type,
5828 fold (build1 (ABS_EXPR,
5835 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
5838 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
5840 if (comp_code == NE_EXPR)
5841 return pedantic_non_lvalue (convert (type, arg1));
5842 else if (comp_code == EQ_EXPR)
5843 return pedantic_non_lvalue (convert (type, integer_zero_node));
5846 /* If this is A op B ? A : B, this is either A, B, min (A, B),
5847 or max (A, B), depending on the operation. */
5849 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
5850 arg2, TREE_OPERAND (arg0, 0)))
5852 tree comp_op0 = TREE_OPERAND (arg0, 0);
5853 tree comp_op1 = TREE_OPERAND (arg0, 1);
5854 tree comp_type = TREE_TYPE (comp_op0);
5859 return pedantic_non_lvalue (convert (type, arg2));
5861 return pedantic_non_lvalue (convert (type, arg1));
5864 /* In C++ a ?: expression can be an lvalue, so put the
5865 operand which will be used if they are equal first
5866 so that we can convert this back to the
5867 corresponding COND_EXPR. */
5868 return pedantic_non_lvalue
5869 (convert (type, (fold (build (MIN_EXPR, comp_type,
5870 (comp_code == LE_EXPR
5871 ? comp_op0 : comp_op1),
5872 (comp_code == LE_EXPR
5873 ? comp_op1 : comp_op0))))));
5877 return pedantic_non_lvalue
5878 (convert (type, fold (build (MAX_EXPR, comp_type,
5879 (comp_code == GE_EXPR
5880 ? comp_op0 : comp_op1),
5881 (comp_code == GE_EXPR
5882 ? comp_op1 : comp_op0)))));
5889 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5890 we might still be able to simplify this. For example,
5891 if C1 is one less or one more than C2, this might have started
5892 out as a MIN or MAX and been transformed by this function.
5893 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5895 if (INTEGRAL_TYPE_P (type)
5896 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5897 && TREE_CODE (arg2) == INTEGER_CST)
5901 /* We can replace A with C1 in this case. */
5902 arg1 = convert (type, TREE_OPERAND (arg0, 1));
5903 t = build (code, type, TREE_OPERAND (t, 0), arg1,
5904 TREE_OPERAND (t, 2));
5908 /* If C1 is C2 + 1, this is min(A, C2). */
5909 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5910 && operand_equal_p (TREE_OPERAND (arg0, 1),
5911 const_binop (PLUS_EXPR, arg2,
5912 integer_one_node, 0), 1))
5913 return pedantic_non_lvalue
5914 (fold (build (MIN_EXPR, type, arg1, arg2)));
5918 /* If C1 is C2 - 1, this is min(A, C2). */
5919 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5920 && operand_equal_p (TREE_OPERAND (arg0, 1),
5921 const_binop (MINUS_EXPR, arg2,
5922 integer_one_node, 0), 1))
5923 return pedantic_non_lvalue
5924 (fold (build (MIN_EXPR, type, arg1, arg2)));
5928 /* If C1 is C2 - 1, this is max(A, C2). */
5929 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
5930 && operand_equal_p (TREE_OPERAND (arg0, 1),
5931 const_binop (MINUS_EXPR, arg2,
5932 integer_one_node, 0), 1))
5933 return pedantic_non_lvalue
5934 (fold (build (MAX_EXPR, type, arg1, arg2)));
5938 /* If C1 is C2 + 1, this is max(A, C2). */
5939 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
5940 && operand_equal_p (TREE_OPERAND (arg0, 1),
5941 const_binop (PLUS_EXPR, arg2,
5942 integer_one_node, 0), 1))
5943 return pedantic_non_lvalue
5944 (fold (build (MAX_EXPR, type, arg1, arg2)));
5953 /* If the second operand is simpler than the third, swap them
5954 since that produces better jump optimization results. */
5955 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5956 || TREE_CODE (arg1) == SAVE_EXPR)
5957 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5958 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5959 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5961 /* See if this can be inverted. If it can't, possibly because
5962 it was a floating-point inequality comparison, don't do
5964 tem = invert_truthvalue (arg0);
5966 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5968 t = build (code, type, tem,
5969 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5971 arg1 = TREE_OPERAND (t, 2);
5976 /* Convert A ? 1 : 0 to simply A. */
5977 if (integer_onep (TREE_OPERAND (t, 1))
5978 && integer_zerop (TREE_OPERAND (t, 2))
5979 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5980 call to fold will try to move the conversion inside
5981 a COND, which will recurse. In that case, the COND_EXPR
5982 is probably the best choice, so leave it alone. */
5983 && type == TREE_TYPE (arg0))
5984 return pedantic_non_lvalue (arg0);
5986 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5987 operation is simply A & 2. */
5989 if (integer_zerop (TREE_OPERAND (t, 2))
5990 && TREE_CODE (arg0) == NE_EXPR
5991 && integer_zerop (TREE_OPERAND (arg0, 1))
5992 && integer_pow2p (arg1)
5993 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5994 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5996 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6001 /* When pedantic, a compound expression can be neither an lvalue
6002 nor an integer constant expression. */
6003 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6005 /* Don't let (0, 0) be null pointer constant. */
6006 if (integer_zerop (arg1))
6007 return non_lvalue (arg1);
6012 return build_complex (type, arg0, arg1);
6016 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6018 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6019 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6020 TREE_OPERAND (arg0, 1));
6021 else if (TREE_CODE (arg0) == COMPLEX_CST)
6022 return TREE_REALPART (arg0);
6023 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6024 return fold (build (TREE_CODE (arg0), type,
6025 fold (build1 (REALPART_EXPR, type,
6026 TREE_OPERAND (arg0, 0))),
6027 fold (build1 (REALPART_EXPR,
6028 type, TREE_OPERAND (arg0, 1)))));
6032 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6033 return convert (type, integer_zero_node);
6034 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6035 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6036 TREE_OPERAND (arg0, 0));
6037 else if (TREE_CODE (arg0) == COMPLEX_CST)
6038 return TREE_IMAGPART (arg0);
6039 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6040 return fold (build (TREE_CODE (arg0), type,
6041 fold (build1 (IMAGPART_EXPR, type,
6042 TREE_OPERAND (arg0, 0))),
6043 fold (build1 (IMAGPART_EXPR, type,
6044 TREE_OPERAND (arg0, 1)))));
6047 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6049 case CLEANUP_POINT_EXPR:
6050 if (! has_cleanups (arg0))
6051 return TREE_OPERAND (t, 0);
6054 enum tree_code code0 = TREE_CODE (arg0);
6055 int kind0 = TREE_CODE_CLASS (code0);
6056 tree arg00 = TREE_OPERAND (arg0, 0);
6059 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6060 return fold (build1 (code0, type,
6061 fold (build1 (CLEANUP_POINT_EXPR,
6062 TREE_TYPE (arg00), arg00))));
6064 if (kind0 == '<' || kind0 == '2'
6065 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6066 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
6067 || code0 == TRUTH_XOR_EXPR)
6069 arg01 = TREE_OPERAND (arg0, 1);
6071 if (TREE_CONSTANT (arg00)
6072 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6073 && ! has_cleanups (arg00)))
6074 return fold (build (code0, type, arg00,
6075 fold (build1 (CLEANUP_POINT_EXPR,
6076 TREE_TYPE (arg01), arg01))));
6078 if (TREE_CONSTANT (arg01))
6079 return fold (build (code0, type,
6080 fold (build1 (CLEANUP_POINT_EXPR,
6081 TREE_TYPE (arg00), arg00)),
6090 } /* switch (code) */
6093 /* Determine if first argument is a multiple of second argument.
6094 Return 0 if it is not, or is not easily determined to so be.
6096 An example of the sort of thing we care about (at this point --
6097 this routine could surely be made more general, and expanded
6098 to do what the *_DIV_EXPR's fold() cases do now) is discovering
6101 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6107 when we know that the two `SAVE_EXPR (J * 8)' nodes are the
6108 same node (which means they will have the same value at run
6109 time, even though we don't know when they'll be assigned).
6111 This code also handles discovering that
6113 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6119 (of course) so we don't have to worry about dealing with a
6122 Note that we _look_ inside a SAVE_EXPR only to determine
6123 how it was calculated; it is not safe for fold() to do much
6124 of anything else with the internals of a SAVE_EXPR, since
6125 fold() cannot know when it will be evaluated at run time.
6126 For example, the latter example above _cannot_ be implemented
6131 or any variant thereof, since the value of J at evaluation time
6132 of the original SAVE_EXPR is not necessarily the same at the time
6133 the new expression is evaluated. The only optimization of this
6134 sort that would be valid is changing
6136 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6142 SAVE_EXPR (I) * SAVE_EXPR (J)
6144 (where the same SAVE_EXPR (J) is used in the original and the
6145 transformed version). */
6148 multiple_of_p (type, top, bottom)
6153 if (operand_equal_p (top, bottom, 0))
6156 if (TREE_CODE (type) != INTEGER_TYPE)
6159 switch (TREE_CODE (top))
6162 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6163 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6167 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6168 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6171 /* Punt if conversion from non-integral or wider integral type. */
6172 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6173 || (TYPE_PRECISION (type)
6174 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6178 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
6181 if ((TREE_CODE (bottom) != INTEGER_CST)
6182 || (tree_int_cst_sgn (top) < 0)
6183 || (tree_int_cst_sgn (bottom) < 0))
6185 return integer_zerop (const_binop (TRUNC_MOD_EXPR,