1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_expr_p (tree);
64 static tree negate_expr (tree);
65 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
66 static tree associate_trees (tree, tree, enum tree_code, tree);
67 static tree int_const_binop (enum tree_code, tree, tree, int);
68 static tree const_binop (enum tree_code, tree, tree, int);
69 static hashval_t size_htab_hash (const void *);
70 static int size_htab_eq (const void *, const void *);
71 static tree fold_convert (tree, tree);
72 static enum tree_code invert_tree_comparison (enum tree_code);
73 static enum tree_code swap_tree_comparison (enum tree_code);
74 static int comparison_to_compcode (enum tree_code);
75 static enum tree_code compcode_to_comparison (int);
76 static int truth_value_p (enum tree_code);
77 static int operand_equal_for_comparison_p (tree, tree, tree);
78 static int twoval_comparison_p (tree, tree *, tree *, int *);
79 static tree eval_subst (tree, tree, tree, tree, tree);
80 static tree pedantic_omit_one_operand (tree, tree, tree);
81 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
82 static tree make_bit_field_ref (tree, tree, int, int, int);
83 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
84 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
85 enum machine_mode *, int *, int *,
87 static int all_ones_mask_p (tree, int);
88 static tree sign_bit_p (tree, tree);
89 static int simple_operand_p (tree);
90 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
91 static tree make_range (tree, int *, tree *, tree *);
92 static tree build_range_check (tree, tree, int, tree, tree);
93 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
95 static tree fold_range_test (tree);
96 static tree unextend (tree, int, int, tree);
97 static tree fold_truthop (enum tree_code, tree, tree, tree);
98 static tree optimize_minmax_comparison (tree);
99 static tree extract_muldiv (tree, tree, enum tree_code, tree);
100 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
101 static tree strip_compound_expr (tree, tree);
102 static int multiple_of_p (tree, tree, tree);
103 static tree constant_boolean_node (int, tree);
104 static int count_cond (tree, int);
105 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
107 static bool fold_real_zero_addition_p (tree, tree, int);
108 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
110 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
112 /* The following constants represent a bit based encoding of GCC's
113 comparison operators. This encoding simplifies transformations
114 on relational comparison operators, such as AND and OR. */
115 #define COMPCODE_FALSE 0
116 #define COMPCODE_LT 1
117 #define COMPCODE_EQ 2
118 #define COMPCODE_LE 3
119 #define COMPCODE_GT 4
120 #define COMPCODE_NE 5
121 #define COMPCODE_GE 6
122 #define COMPCODE_TRUE 7
124 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
125 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
126 and SUM1. Then this yields nonzero if overflow occurred during the
129 Overflow occurs if A and B have the same sign, but A and SUM differ in
130 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
132 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
134 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
135 We do that by representing the two-word integer in 4 words, with only
136 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
137 number. The value of the word is LOWPART + HIGHPART * BASE. */
140 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
141 #define HIGHPART(x) \
142 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
143 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
145 /* Unpack a two-word integer into 4 words.
146 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
147 WORDS points to the array of HOST_WIDE_INTs. */
150 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
152 words[0] = LOWPART (low);
153 words[1] = HIGHPART (low);
154 words[2] = LOWPART (hi);
155 words[3] = HIGHPART (hi);
158 /* Pack an array of 4 words into a two-word integer.
159 WORDS points to the array of words.
160 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
163 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
166 *low = words[0] + words[1] * BASE;
167 *hi = words[2] + words[3] * BASE;
170 /* Make the integer constant T valid for its type by setting to 0 or 1 all
171 the bits in the constant that don't belong in the type.
173 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
174 nonzero, a signed overflow has already occurred in calculating T, so
178 force_fit_type (tree t, int overflow)
180 unsigned HOST_WIDE_INT low;
184 if (TREE_CODE (t) == REAL_CST)
186 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
187 Consider doing it via real_convert now. */
191 else if (TREE_CODE (t) != INTEGER_CST)
194 low = TREE_INT_CST_LOW (t);
195 high = TREE_INT_CST_HIGH (t);
197 if (POINTER_TYPE_P (TREE_TYPE (t))
198 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
201 prec = TYPE_PRECISION (TREE_TYPE (t));
203 /* First clear all bits that are beyond the type's precision. */
205 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
207 else if (prec > HOST_BITS_PER_WIDE_INT)
208 TREE_INT_CST_HIGH (t)
209 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
212 TREE_INT_CST_HIGH (t) = 0;
213 if (prec < HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
217 /* Unsigned types do not suffer sign extension or overflow unless they
219 if (TREE_UNSIGNED (TREE_TYPE (t))
220 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
221 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
224 /* If the value's sign bit is set, extend the sign. */
225 if (prec != 2 * HOST_BITS_PER_WIDE_INT
226 && (prec > HOST_BITS_PER_WIDE_INT
227 ? 0 != (TREE_INT_CST_HIGH (t)
229 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
230 : 0 != (TREE_INT_CST_LOW (t)
231 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
233 /* Value is negative:
234 set to 1 all the bits that are outside this type's precision. */
235 if (prec > HOST_BITS_PER_WIDE_INT)
236 TREE_INT_CST_HIGH (t)
237 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
240 TREE_INT_CST_HIGH (t) = -1;
241 if (prec < HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
246 /* Return nonzero if signed overflow occurred. */
248 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
252 /* Add two doubleword integers with doubleword result.
253 Each argument is given as two `HOST_WIDE_INT' pieces.
254 One argument is L1 and H1; the other, L2 and H2.
255 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
258 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
259 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
260 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
262 unsigned HOST_WIDE_INT l;
266 h = h1 + h2 + (l < l1);
270 return OVERFLOW_SUM_SIGN (h1, h2, h);
273 /* Negate a doubleword integer with doubleword result.
274 Return nonzero if the operation overflows, assuming it's signed.
275 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
276 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
279 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
280 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
286 return (*hv & h1) < 0;
296 /* Multiply two doubleword integers with doubleword result.
297 Return nonzero if the operation overflows, assuming it's signed.
298 Each argument is given as two `HOST_WIDE_INT' pieces.
299 One argument is L1 and H1; the other, L2 and H2.
300 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
303 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
304 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
305 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
307 HOST_WIDE_INT arg1[4];
308 HOST_WIDE_INT arg2[4];
309 HOST_WIDE_INT prod[4 * 2];
310 unsigned HOST_WIDE_INT carry;
312 unsigned HOST_WIDE_INT toplow, neglow;
313 HOST_WIDE_INT tophigh, neghigh;
315 encode (arg1, l1, h1);
316 encode (arg2, l2, h2);
318 memset (prod, 0, sizeof prod);
320 for (i = 0; i < 4; i++)
323 for (j = 0; j < 4; j++)
326 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
327 carry += arg1[i] * arg2[j];
328 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
330 prod[k] = LOWPART (carry);
331 carry = HIGHPART (carry);
336 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
338 /* Check for overflow by calculating the top half of the answer in full;
339 it should agree with the low half's sign bit. */
340 decode (prod + 4, &toplow, &tophigh);
343 neg_double (l2, h2, &neglow, &neghigh);
344 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
348 neg_double (l1, h1, &neglow, &neghigh);
349 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
351 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
354 /* Shift the doubleword integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Shift right if COUNT is negative.
357 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
358 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
361 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
362 HOST_WIDE_INT count, unsigned int prec,
363 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
365 unsigned HOST_WIDE_INT signmask;
369 rshift_double (l1, h1, -count, prec, lv, hv, arith);
373 #ifdef SHIFT_COUNT_TRUNCATED
374 if (SHIFT_COUNT_TRUNCATED)
378 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
380 /* Shifting by the host word size is undefined according to the
381 ANSI standard, so we must handle this as a special case. */
385 else if (count >= HOST_BITS_PER_WIDE_INT)
387 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
392 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
393 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
397 /* Sign extend all bits that are beyond the precision. */
399 signmask = -((prec > HOST_BITS_PER_WIDE_INT
400 ? ((unsigned HOST_WIDE_INT) *hv
401 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
402 : (*lv >> (prec - 1))) & 1);
404 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
406 else if (prec >= HOST_BITS_PER_WIDE_INT)
408 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
409 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
414 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
415 *lv |= signmask << prec;
419 /* Shift the doubleword integer in L1, H1 right by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
422 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
425 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
426 HOST_WIDE_INT count, unsigned int prec,
427 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
430 unsigned HOST_WIDE_INT signmask;
433 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
436 #ifdef SHIFT_COUNT_TRUNCATED
437 if (SHIFT_COUNT_TRUNCATED)
441 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
443 /* Shifting by the host word size is undefined according to the
444 ANSI standard, so we must handle this as a special case. */
448 else if (count >= HOST_BITS_PER_WIDE_INT)
451 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
455 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
457 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
460 /* Zero / sign extend all bits that are beyond the precision. */
462 if (count >= (HOST_WIDE_INT)prec)
467 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
469 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
471 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
472 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
477 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
478 *lv |= signmask << (prec - count);
482 /* Rotate the doubleword integer in L1, H1 left by COUNT places
483 keeping only PREC bits of result.
484 Rotate right if COUNT is negative.
485 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
488 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
489 HOST_WIDE_INT count, unsigned int prec,
490 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
492 unsigned HOST_WIDE_INT s1l, s2l;
493 HOST_WIDE_INT s1h, s2h;
499 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
500 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505 /* Rotate the doubleword integer in L1, H1 left by COUNT places
506 keeping only PREC bits of result. COUNT must be positive.
507 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
510 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
511 HOST_WIDE_INT count, unsigned int prec,
512 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
514 unsigned HOST_WIDE_INT s1l, s2l;
515 HOST_WIDE_INT s1h, s2h;
521 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
522 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
528 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
529 CODE is a tree code for a kind of division, one of
530 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
532 It controls how the quotient is rounded to an integer.
533 Return nonzero if the operation overflows.
534 UNS nonzero says do unsigned division. */
537 div_and_round_double (enum tree_code code, int uns,
538 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
539 HOST_WIDE_INT hnum_orig,
540 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
541 HOST_WIDE_INT hden_orig,
542 unsigned HOST_WIDE_INT *lquo,
543 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
547 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
548 HOST_WIDE_INT den[4], quo[4];
550 unsigned HOST_WIDE_INT work;
551 unsigned HOST_WIDE_INT carry = 0;
552 unsigned HOST_WIDE_INT lnum = lnum_orig;
553 HOST_WIDE_INT hnum = hnum_orig;
554 unsigned HOST_WIDE_INT lden = lden_orig;
555 HOST_WIDE_INT hden = hden_orig;
558 if (hden == 0 && lden == 0)
559 overflow = 1, lden = 1;
561 /* Calculate quotient sign and convert operands to unsigned. */
567 /* (minimum integer) / (-1) is the only overflow case. */
568 if (neg_double (lnum, hnum, &lnum, &hnum)
569 && ((HOST_WIDE_INT) lden & hden) == -1)
575 neg_double (lden, hden, &lden, &hden);
579 if (hnum == 0 && hden == 0)
580 { /* single precision */
582 /* This unsigned division rounds toward zero. */
588 { /* trivial case: dividend < divisor */
589 /* hden != 0 already checked. */
596 memset (quo, 0, sizeof quo);
598 memset (num, 0, sizeof num); /* to zero 9th element */
599 memset (den, 0, sizeof den);
601 encode (num, lnum, hnum);
602 encode (den, lden, hden);
604 /* Special code for when the divisor < BASE. */
605 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
607 /* hnum != 0 already checked. */
608 for (i = 4 - 1; i >= 0; i--)
610 work = num[i] + carry * BASE;
611 quo[i] = work / lden;
617 /* Full double precision division,
618 with thanks to Don Knuth's "Seminumerical Algorithms". */
619 int num_hi_sig, den_hi_sig;
620 unsigned HOST_WIDE_INT quo_est, scale;
622 /* Find the highest nonzero divisor digit. */
623 for (i = 4 - 1;; i--)
630 /* Insure that the first digit of the divisor is at least BASE/2.
631 This is required by the quotient digit estimation algorithm. */
633 scale = BASE / (den[den_hi_sig] + 1);
635 { /* scale divisor and dividend */
637 for (i = 0; i <= 4 - 1; i++)
639 work = (num[i] * scale) + carry;
640 num[i] = LOWPART (work);
641 carry = HIGHPART (work);
646 for (i = 0; i <= 4 - 1; i++)
648 work = (den[i] * scale) + carry;
649 den[i] = LOWPART (work);
650 carry = HIGHPART (work);
651 if (den[i] != 0) den_hi_sig = i;
658 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
660 /* Guess the next quotient digit, quo_est, by dividing the first
661 two remaining dividend digits by the high order quotient digit.
662 quo_est is never low and is at most 2 high. */
663 unsigned HOST_WIDE_INT tmp;
665 num_hi_sig = i + den_hi_sig + 1;
666 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
667 if (num[num_hi_sig] != den[den_hi_sig])
668 quo_est = work / den[den_hi_sig];
672 /* Refine quo_est so it's usually correct, and at most one high. */
673 tmp = work - quo_est * den[den_hi_sig];
675 && (den[den_hi_sig - 1] * quo_est
676 > (tmp * BASE + num[num_hi_sig - 2])))
679 /* Try QUO_EST as the quotient digit, by multiplying the
680 divisor by QUO_EST and subtracting from the remaining dividend.
681 Keep in mind that QUO_EST is the I - 1st digit. */
684 for (j = 0; j <= den_hi_sig; j++)
686 work = quo_est * den[j] + carry;
687 carry = HIGHPART (work);
688 work = num[i + j] - LOWPART (work);
689 num[i + j] = LOWPART (work);
690 carry += HIGHPART (work) != 0;
693 /* If quo_est was high by one, then num[i] went negative and
694 we need to correct things. */
695 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
698 carry = 0; /* add divisor back in */
699 for (j = 0; j <= den_hi_sig; j++)
701 work = num[i + j] + den[j] + carry;
702 carry = HIGHPART (work);
703 num[i + j] = LOWPART (work);
706 num [num_hi_sig] += carry;
709 /* Store the quotient digit. */
714 decode (quo, lquo, hquo);
717 /* If result is negative, make it so. */
719 neg_double (*lquo, *hquo, lquo, hquo);
721 /* compute trial remainder: rem = num - (quo * den) */
722 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
723 neg_double (*lrem, *hrem, lrem, hrem);
724 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
729 case TRUNC_MOD_EXPR: /* round toward zero */
730 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
734 case FLOOR_MOD_EXPR: /* round toward negative infinity */
735 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
738 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
746 case CEIL_MOD_EXPR: /* round toward positive infinity */
747 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
749 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
757 case ROUND_MOD_EXPR: /* round to closest integer */
759 unsigned HOST_WIDE_INT labs_rem = *lrem;
760 HOST_WIDE_INT habs_rem = *hrem;
761 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
762 HOST_WIDE_INT habs_den = hden, htwice;
764 /* Get absolute values. */
766 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
768 neg_double (lden, hden, &labs_den, &habs_den);
770 /* If (2 * abs (lrem) >= abs (lden)) */
771 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
772 labs_rem, habs_rem, <wice, &htwice);
774 if (((unsigned HOST_WIDE_INT) habs_den
775 < (unsigned HOST_WIDE_INT) htwice)
776 || (((unsigned HOST_WIDE_INT) habs_den
777 == (unsigned HOST_WIDE_INT) htwice)
778 && (labs_den < ltwice)))
782 add_double (*lquo, *hquo,
783 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
786 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
798 /* compute true remainder: rem = num - (quo * den) */
799 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
800 neg_double (*lrem, *hrem, lrem, hrem);
801 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
805 /* Determine whether an expression T can be cheaply negated using
806 the function negate_expr. */
809 negate_expr_p (tree t)
811 unsigned HOST_WIDE_INT val;
818 type = TREE_TYPE (t);
821 switch (TREE_CODE (t))
824 if (TREE_UNSIGNED (type))
827 /* Check that -CST will not overflow type. */
828 prec = TYPE_PRECISION (type);
829 if (prec > HOST_BITS_PER_WIDE_INT)
831 if (TREE_INT_CST_LOW (t) != 0)
833 prec -= HOST_BITS_PER_WIDE_INT;
834 val = TREE_INT_CST_HIGH (t);
837 val = TREE_INT_CST_LOW (t);
838 if (prec < HOST_BITS_PER_WIDE_INT)
839 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
840 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
847 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
848 return ! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations;
851 if (TREE_UNSIGNED (TREE_TYPE (t)))
857 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
858 return negate_expr_p (TREE_OPERAND (t, 1))
859 || negate_expr_p (TREE_OPERAND (t, 0));
868 /* Given T, an expression, return the negation of T. Allow for T to be
869 null, in which case return null. */
880 type = TREE_TYPE (t);
883 switch (TREE_CODE (t))
886 if (! TREE_UNSIGNED (type)
887 && 0 != (tem = fold (build1 (NEGATE_EXPR, type, t)))
888 && ! TREE_OVERFLOW (tem))
893 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
894 /* Two's complement FP formats, such as c4x, may overflow. */
895 if (! TREE_OVERFLOW (tem))
896 return convert (type, tem);
900 return convert (type, TREE_OPERAND (t, 0));
903 /* - (A - B) -> B - A */
904 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
905 return convert (type,
906 fold (build (MINUS_EXPR, TREE_TYPE (t),
908 TREE_OPERAND (t, 0))));
912 if (TREE_UNSIGNED (TREE_TYPE (t)))
918 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
920 tem = TREE_OPERAND (t, 1);
921 if (negate_expr_p (tem))
922 return convert (type,
923 fold (build (TREE_CODE (t), TREE_TYPE (t),
925 negate_expr (tem))));
926 tem = TREE_OPERAND (t, 0);
927 if (negate_expr_p (tem))
928 return convert (type,
929 fold (build (TREE_CODE (t), TREE_TYPE (t),
931 TREE_OPERAND (t, 1))));
939 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
942 /* Split a tree IN into a constant, literal and variable parts that could be
943 combined with CODE to make IN. "constant" means an expression with
944 TREE_CONSTANT but that isn't an actual constant. CODE must be a
945 commutative arithmetic operation. Store the constant part into *CONP,
946 the literal in *LITP and return the variable part. If a part isn't
947 present, set it to null. If the tree does not decompose in this way,
948 return the entire tree as the variable part and the other parts as null.
950 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
951 case, we negate an operand that was subtracted. Except if it is a
952 literal for which we use *MINUS_LITP instead.
954 If NEGATE_P is true, we are negating all of IN, again except a literal
955 for which we use *MINUS_LITP instead.
957 If IN is itself a literal or constant, return it as appropriate.
959 Note that we do not guarantee that any of the three values will be the
960 same type as IN, but they will have the same signedness and mode. */
963 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
964 tree *minus_litp, int negate_p)
972 /* Strip any conversions that don't change the machine mode or signedness. */
973 STRIP_SIGN_NOPS (in);
975 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
977 else if (TREE_CODE (in) == code
978 || (! FLOAT_TYPE_P (TREE_TYPE (in))
979 /* We can associate addition and subtraction together (even
980 though the C standard doesn't say so) for integers because
981 the value is not affected. For reals, the value might be
982 affected, so we can't. */
983 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
984 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
986 tree op0 = TREE_OPERAND (in, 0);
987 tree op1 = TREE_OPERAND (in, 1);
988 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
989 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
991 /* First see if either of the operands is a literal, then a constant. */
992 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
993 *litp = op0, op0 = 0;
994 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
995 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
997 if (op0 != 0 && TREE_CONSTANT (op0))
998 *conp = op0, op0 = 0;
999 else if (op1 != 0 && TREE_CONSTANT (op1))
1000 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1002 /* If we haven't dealt with either operand, this is not a case we can
1003 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1004 if (op0 != 0 && op1 != 0)
1009 var = op1, neg_var_p = neg1_p;
1011 /* Now do any needed negations. */
1013 *minus_litp = *litp, *litp = 0;
1015 *conp = negate_expr (*conp);
1017 var = negate_expr (var);
1019 else if (TREE_CONSTANT (in))
1027 *minus_litp = *litp, *litp = 0;
1028 else if (*minus_litp)
1029 *litp = *minus_litp, *minus_litp = 0;
1030 *conp = negate_expr (*conp);
1031 var = negate_expr (var);
1037 /* Re-associate trees split by the above function. T1 and T2 are either
1038 expressions to associate or null. Return the new expression, if any. If
1039 we build an operation, do it in TYPE and with CODE. */
1042 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1049 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1050 try to fold this since we will have infinite recursion. But do
1051 deal with any NEGATE_EXPRs. */
1052 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1053 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1055 if (code == PLUS_EXPR)
1057 if (TREE_CODE (t1) == NEGATE_EXPR)
1058 return build (MINUS_EXPR, type, convert (type, t2),
1059 convert (type, TREE_OPERAND (t1, 0)));
1060 else if (TREE_CODE (t2) == NEGATE_EXPR)
1061 return build (MINUS_EXPR, type, convert (type, t1),
1062 convert (type, TREE_OPERAND (t2, 0)));
1064 return build (code, type, convert (type, t1), convert (type, t2));
1067 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1070 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1071 to produce a new constant.
1073 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1076 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1078 unsigned HOST_WIDE_INT int1l, int2l;
1079 HOST_WIDE_INT int1h, int2h;
1080 unsigned HOST_WIDE_INT low;
1082 unsigned HOST_WIDE_INT garbagel;
1083 HOST_WIDE_INT garbageh;
1085 tree type = TREE_TYPE (arg1);
1086 int uns = TREE_UNSIGNED (type);
1088 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1090 int no_overflow = 0;
1092 int1l = TREE_INT_CST_LOW (arg1);
1093 int1h = TREE_INT_CST_HIGH (arg1);
1094 int2l = TREE_INT_CST_LOW (arg2);
1095 int2h = TREE_INT_CST_HIGH (arg2);
1100 low = int1l | int2l, hi = int1h | int2h;
1104 low = int1l ^ int2l, hi = int1h ^ int2h;
1108 low = int1l & int2l, hi = int1h & int2h;
1114 /* It's unclear from the C standard whether shifts can overflow.
1115 The following code ignores overflow; perhaps a C standard
1116 interpretation ruling is needed. */
1117 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1125 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1130 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1134 neg_double (int2l, int2h, &low, &hi);
1135 add_double (int1l, int1h, low, hi, &low, &hi);
1136 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1140 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1143 case TRUNC_DIV_EXPR:
1144 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1145 case EXACT_DIV_EXPR:
1146 /* This is a shortcut for a common special case. */
1147 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1148 && ! TREE_CONSTANT_OVERFLOW (arg1)
1149 && ! TREE_CONSTANT_OVERFLOW (arg2)
1150 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1152 if (code == CEIL_DIV_EXPR)
1155 low = int1l / int2l, hi = 0;
1159 /* ... fall through ... */
1161 case ROUND_DIV_EXPR:
1162 if (int2h == 0 && int2l == 1)
1164 low = int1l, hi = int1h;
1167 if (int1l == int2l && int1h == int2h
1168 && ! (int1l == 0 && int1h == 0))
1173 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1174 &low, &hi, &garbagel, &garbageh);
1177 case TRUNC_MOD_EXPR:
1178 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1179 /* This is a shortcut for a common special case. */
1180 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1181 && ! TREE_CONSTANT_OVERFLOW (arg1)
1182 && ! TREE_CONSTANT_OVERFLOW (arg2)
1183 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1185 if (code == CEIL_MOD_EXPR)
1187 low = int1l % int2l, hi = 0;
1191 /* ... fall through ... */
1193 case ROUND_MOD_EXPR:
1194 overflow = div_and_round_double (code, uns,
1195 int1l, int1h, int2l, int2h,
1196 &garbagel, &garbageh, &low, &hi);
1202 low = (((unsigned HOST_WIDE_INT) int1h
1203 < (unsigned HOST_WIDE_INT) int2h)
1204 || (((unsigned HOST_WIDE_INT) int1h
1205 == (unsigned HOST_WIDE_INT) int2h)
1208 low = (int1h < int2h
1209 || (int1h == int2h && int1l < int2l));
1211 if (low == (code == MIN_EXPR))
1212 low = int1l, hi = int1h;
1214 low = int2l, hi = int2h;
1221 /* If this is for a sizetype, can be represented as one (signed)
1222 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1225 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1226 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1227 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1228 return size_int_type_wide (low, type);
1231 t = build_int_2 (low, hi);
1232 TREE_TYPE (t) = TREE_TYPE (arg1);
1237 ? (!uns || is_sizetype) && overflow
1238 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1240 | TREE_OVERFLOW (arg1)
1241 | TREE_OVERFLOW (arg2));
1243 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1244 So check if force_fit_type truncated the value. */
1246 && ! TREE_OVERFLOW (t)
1247 && (TREE_INT_CST_HIGH (t) != hi
1248 || TREE_INT_CST_LOW (t) != low))
1249 TREE_OVERFLOW (t) = 1;
1251 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1252 | TREE_CONSTANT_OVERFLOW (arg1)
1253 | TREE_CONSTANT_OVERFLOW (arg2));
1257 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1258 constant. We assume ARG1 and ARG2 have the same data type, or at least
1259 are the same kind of constant and the same machine mode.
1261 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1264 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1269 if (TREE_CODE (arg1) == INTEGER_CST)
1270 return int_const_binop (code, arg1, arg2, notrunc);
1272 if (TREE_CODE (arg1) == REAL_CST)
1274 enum machine_mode mode;
1277 REAL_VALUE_TYPE value;
1280 d1 = TREE_REAL_CST (arg1);
1281 d2 = TREE_REAL_CST (arg2);
1283 type = TREE_TYPE (arg1);
1284 mode = TYPE_MODE (type);
1286 /* Don't perform operation if we honor signaling NaNs and
1287 either operand is a NaN. */
1288 if (HONOR_SNANS (mode)
1289 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1292 /* Don't perform operation if it would raise a division
1293 by zero exception. */
1294 if (code == RDIV_EXPR
1295 && REAL_VALUES_EQUAL (d2, dconst0)
1296 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1299 /* If either operand is a NaN, just return it. Otherwise, set up
1300 for floating-point trap; we return an overflow. */
1301 if (REAL_VALUE_ISNAN (d1))
1303 else if (REAL_VALUE_ISNAN (d2))
1306 REAL_ARITHMETIC (value, code, d1, d2);
1308 t = build_real (type, real_value_truncate (mode, value));
1311 = (force_fit_type (t, 0)
1312 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1313 TREE_CONSTANT_OVERFLOW (t)
1315 | TREE_CONSTANT_OVERFLOW (arg1)
1316 | TREE_CONSTANT_OVERFLOW (arg2);
1319 if (TREE_CODE (arg1) == COMPLEX_CST)
1321 tree type = TREE_TYPE (arg1);
1322 tree r1 = TREE_REALPART (arg1);
1323 tree i1 = TREE_IMAGPART (arg1);
1324 tree r2 = TREE_REALPART (arg2);
1325 tree i2 = TREE_IMAGPART (arg2);
1331 t = build_complex (type,
1332 const_binop (PLUS_EXPR, r1, r2, notrunc),
1333 const_binop (PLUS_EXPR, i1, i2, notrunc));
1337 t = build_complex (type,
1338 const_binop (MINUS_EXPR, r1, r2, notrunc),
1339 const_binop (MINUS_EXPR, i1, i2, notrunc));
1343 t = build_complex (type,
1344 const_binop (MINUS_EXPR,
1345 const_binop (MULT_EXPR,
1347 const_binop (MULT_EXPR,
1350 const_binop (PLUS_EXPR,
1351 const_binop (MULT_EXPR,
1353 const_binop (MULT_EXPR,
1361 = const_binop (PLUS_EXPR,
1362 const_binop (MULT_EXPR, r2, r2, notrunc),
1363 const_binop (MULT_EXPR, i2, i2, notrunc),
1366 t = build_complex (type,
1368 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1369 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1370 const_binop (PLUS_EXPR,
1371 const_binop (MULT_EXPR, r1, r2,
1373 const_binop (MULT_EXPR, i1, i2,
1376 magsquared, notrunc),
1378 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1379 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1380 const_binop (MINUS_EXPR,
1381 const_binop (MULT_EXPR, i1, r2,
1383 const_binop (MULT_EXPR, r1, i2,
1386 magsquared, notrunc));
1398 /* These are the hash table functions for the hash table of INTEGER_CST
1399 nodes of a sizetype. */
1401 /* Return the hash code code X, an INTEGER_CST. */
1404 size_htab_hash (const void *x)
1408 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1409 ^ htab_hash_pointer (TREE_TYPE (t))
1410 ^ (TREE_OVERFLOW (t) << 20));
1413 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1414 is the same as that given by *Y, which is the same. */
1417 size_htab_eq (const void *x, const void *y)
1422 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1423 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1424 && TREE_TYPE (xt) == TREE_TYPE (yt)
1425 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1428 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1429 bits are given by NUMBER and of the sizetype represented by KIND. */
1432 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1434 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1437 /* Likewise, but the desired type is specified explicitly. */
1439 static GTY (()) tree new_const;
1440 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1444 size_int_type_wide (HOST_WIDE_INT number, tree type)
1450 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1451 new_const = make_node (INTEGER_CST);
1454 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1455 hash table, we return the value from the hash table. Otherwise, we
1456 place that in the hash table and make a new node for the next time. */
1457 TREE_INT_CST_LOW (new_const) = number;
1458 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1459 TREE_TYPE (new_const) = type;
1460 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1461 = force_fit_type (new_const, 0);
1463 slot = htab_find_slot (size_htab, new_const, INSERT);
1469 new_const = make_node (INTEGER_CST);
1473 return (tree) *slot;
1476 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1477 is a tree code. The type of the result is taken from the operands.
1478 Both must be the same type integer type and it must be a size type.
1479 If the operands are constant, so is the result. */
1482 size_binop (enum tree_code code, tree arg0, tree arg1)
1484 tree type = TREE_TYPE (arg0);
1486 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1487 || type != TREE_TYPE (arg1))
1490 /* Handle the special case of two integer constants faster. */
1491 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1493 /* And some specific cases even faster than that. */
1494 if (code == PLUS_EXPR && integer_zerop (arg0))
1496 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1497 && integer_zerop (arg1))
1499 else if (code == MULT_EXPR && integer_onep (arg0))
1502 /* Handle general case of two integer constants. */
1503 return int_const_binop (code, arg0, arg1, 0);
1506 if (arg0 == error_mark_node || arg1 == error_mark_node)
1507 return error_mark_node;
1509 return fold (build (code, type, arg0, arg1));
1512 /* Given two values, either both of sizetype or both of bitsizetype,
1513 compute the difference between the two values. Return the value
1514 in signed type corresponding to the type of the operands. */
1517 size_diffop (tree arg0, tree arg1)
1519 tree type = TREE_TYPE (arg0);
1522 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1523 || type != TREE_TYPE (arg1))
1526 /* If the type is already signed, just do the simple thing. */
1527 if (! TREE_UNSIGNED (type))
1528 return size_binop (MINUS_EXPR, arg0, arg1);
1530 ctype = (type == bitsizetype || type == ubitsizetype
1531 ? sbitsizetype : ssizetype);
1533 /* If either operand is not a constant, do the conversions to the signed
1534 type and subtract. The hardware will do the right thing with any
1535 overflow in the subtraction. */
1536 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1537 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1538 convert (ctype, arg1));
1540 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1541 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1542 overflow) and negate (which can't either). Special-case a result
1543 of zero while we're here. */
1544 if (tree_int_cst_equal (arg0, arg1))
1545 return convert (ctype, integer_zero_node);
1546 else if (tree_int_cst_lt (arg1, arg0))
1547 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1549 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1550 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1554 /* Given T, a tree representing type conversion of ARG1, a constant,
1555 return a constant tree representing the result of conversion. */
1558 fold_convert (tree t, tree arg1)
1560 tree type = TREE_TYPE (t);
1563 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1565 if (TREE_CODE (arg1) == INTEGER_CST)
1567 /* If we would build a constant wider than GCC supports,
1568 leave the conversion unfolded. */
1569 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1572 /* If we are trying to make a sizetype for a small integer, use
1573 size_int to pick up cached types to reduce duplicate nodes. */
1574 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1575 && !TREE_CONSTANT_OVERFLOW (arg1)
1576 && compare_tree_int (arg1, 10000) < 0)
1577 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1579 /* Given an integer constant, make new constant with new type,
1580 appropriately sign-extended or truncated. */
1581 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1582 TREE_INT_CST_HIGH (arg1));
1583 TREE_TYPE (t) = type;
1584 /* Indicate an overflow if (1) ARG1 already overflowed,
1585 or (2) force_fit_type indicates an overflow.
1586 Tell force_fit_type that an overflow has already occurred
1587 if ARG1 is a too-large unsigned value and T is signed.
1588 But don't indicate an overflow if converting a pointer. */
1590 = ((force_fit_type (t,
1591 (TREE_INT_CST_HIGH (arg1) < 0
1592 && (TREE_UNSIGNED (type)
1593 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1594 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1595 || TREE_OVERFLOW (arg1));
1596 TREE_CONSTANT_OVERFLOW (t)
1597 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1599 else if (TREE_CODE (arg1) == REAL_CST)
1601 /* Don't initialize these, use assignments.
1602 Initialized local aggregates don't work on old compilers. */
1606 tree type1 = TREE_TYPE (arg1);
1609 x = TREE_REAL_CST (arg1);
1610 l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1612 no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1613 if (!no_upper_bound)
1614 u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1616 /* See if X will be in range after truncation towards 0.
1617 To compensate for truncation, move the bounds away from 0,
1618 but reject if X exactly equals the adjusted bounds. */
1619 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1620 if (!no_upper_bound)
1621 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1622 /* If X is a NaN, use zero instead and show we have an overflow.
1623 Otherwise, range check. */
1624 if (REAL_VALUE_ISNAN (x))
1625 overflow = 1, x = dconst0;
1626 else if (! (REAL_VALUES_LESS (l, x)
1628 && REAL_VALUES_LESS (x, u)))
1632 HOST_WIDE_INT low, high;
1633 REAL_VALUE_TO_INT (&low, &high, x);
1634 t = build_int_2 (low, high);
1636 TREE_TYPE (t) = type;
1638 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1639 TREE_CONSTANT_OVERFLOW (t)
1640 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1642 TREE_TYPE (t) = type;
1644 else if (TREE_CODE (type) == REAL_TYPE)
1646 if (TREE_CODE (arg1) == INTEGER_CST)
1647 return build_real_from_int_cst (type, arg1);
1648 if (TREE_CODE (arg1) == REAL_CST)
1650 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1652 /* We make a copy of ARG1 so that we don't modify an
1653 existing constant tree. */
1654 t = copy_node (arg1);
1655 TREE_TYPE (t) = type;
1659 t = build_real (type,
1660 real_value_truncate (TYPE_MODE (type),
1661 TREE_REAL_CST (arg1)));
1664 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1665 TREE_CONSTANT_OVERFLOW (t)
1666 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1670 TREE_CONSTANT (t) = 1;
1674 /* Return an expr equal to X but certainly not valid as an lvalue. */
1681 /* These things are certainly not lvalues. */
1682 if (TREE_CODE (x) == NON_LVALUE_EXPR
1683 || TREE_CODE (x) == INTEGER_CST
1684 || TREE_CODE (x) == REAL_CST
1685 || TREE_CODE (x) == STRING_CST
1686 || TREE_CODE (x) == ADDR_EXPR)
1689 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1690 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1694 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1695 Zero means allow extended lvalues. */
1697 int pedantic_lvalues;
1699 /* When pedantic, return an expr equal to X but certainly not valid as a
1700 pedantic lvalue. Otherwise, return X. */
1703 pedantic_non_lvalue (tree x)
1705 if (pedantic_lvalues)
1706 return non_lvalue (x);
1711 /* Given a tree comparison code, return the code that is the logical inverse
1712 of the given code. It is not safe to do this for floating-point
1713 comparisons, except for NE_EXPR and EQ_EXPR. */
1715 static enum tree_code
1716 invert_tree_comparison (enum tree_code code)
1737 /* Similar, but return the comparison that results if the operands are
1738 swapped. This is safe for floating-point. */
1740 static enum tree_code
1741 swap_tree_comparison (enum tree_code code)
1762 /* Convert a comparison tree code from an enum tree_code representation
1763 into a compcode bit-based encoding. This function is the inverse of
1764 compcode_to_comparison. */
1767 comparison_to_compcode (enum tree_code code)
1788 /* Convert a compcode bit-based encoding of a comparison operator back
1789 to GCC's enum tree_code representation. This function is the
1790 inverse of comparison_to_compcode. */
1792 static enum tree_code
1793 compcode_to_comparison (int code)
1814 /* Return nonzero if CODE is a tree code that represents a truth value. */
1817 truth_value_p (enum tree_code code)
1819 return (TREE_CODE_CLASS (code) == '<'
1820 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1821 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1822 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1825 /* Return nonzero if two operands (typically of the same tree node)
1826 are necessarily equal. If either argument has side-effects this
1827 function returns zero.
1829 If ONLY_CONST is nonzero, only return nonzero for constants.
1830 This function tests whether the operands are indistinguishable;
1831 it does not test whether they are equal using C's == operation.
1832 The distinction is important for IEEE floating point, because
1833 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1834 (2) two NaNs may be indistinguishable, but NaN!=NaN.
1836 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
1837 even though it may hold multiple values during a function.
1838 This is because a GCC tree node guarantees that nothing else is
1839 executed between the evaluation of its "operands" (which may often
1840 be evaluated in arbitrary order). Hence if the operands themselves
1841 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
1842 same value in each operand/subexpression. Hence a zero value for
1843 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
1844 If comparing arbitrary expression trees, such as from different
1845 statements, ONLY_CONST must usually be nonzero. */
1848 operand_equal_p (tree arg0, tree arg1, int only_const)
1852 /* If both types don't have the same signedness, then we can't consider
1853 them equal. We must check this before the STRIP_NOPS calls
1854 because they may change the signedness of the arguments. */
1855 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1861 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1862 /* This is needed for conversions and for COMPONENT_REF.
1863 Might as well play it safe and always test this. */
1864 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
1865 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
1866 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1869 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1870 We don't care about side effects in that case because the SAVE_EXPR
1871 takes care of that for us. In all other cases, two expressions are
1872 equal if they have no side effects. If we have two identical
1873 expressions with side effects that should be treated the same due
1874 to the only side effects being identical SAVE_EXPR's, that will
1875 be detected in the recursive calls below. */
1876 if (arg0 == arg1 && ! only_const
1877 && (TREE_CODE (arg0) == SAVE_EXPR
1878 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
1881 /* Next handle constant cases, those for which we can return 1 even
1882 if ONLY_CONST is set. */
1883 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
1884 switch (TREE_CODE (arg0))
1887 return (! TREE_CONSTANT_OVERFLOW (arg0)
1888 && ! TREE_CONSTANT_OVERFLOW (arg1)
1889 && tree_int_cst_equal (arg0, arg1));
1892 return (! TREE_CONSTANT_OVERFLOW (arg0)
1893 && ! TREE_CONSTANT_OVERFLOW (arg1)
1894 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
1895 TREE_REAL_CST (arg1)));
1901 if (TREE_CONSTANT_OVERFLOW (arg0)
1902 || TREE_CONSTANT_OVERFLOW (arg1))
1905 v1 = TREE_VECTOR_CST_ELTS (arg0);
1906 v2 = TREE_VECTOR_CST_ELTS (arg1);
1909 if (!operand_equal_p (v1, v2, only_const))
1911 v1 = TREE_CHAIN (v1);
1912 v2 = TREE_CHAIN (v2);
1919 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
1921 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
1925 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
1926 && ! memcmp (TREE_STRING_POINTER (arg0),
1927 TREE_STRING_POINTER (arg1),
1928 TREE_STRING_LENGTH (arg0)));
1931 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
1940 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1943 /* Two conversions are equal only if signedness and modes match. */
1944 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1945 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1946 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1949 return operand_equal_p (TREE_OPERAND (arg0, 0),
1950 TREE_OPERAND (arg1, 0), 0);
1954 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
1955 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
1959 /* For commutative ops, allow the other order. */
1960 return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
1961 || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
1962 || TREE_CODE (arg0) == BIT_IOR_EXPR
1963 || TREE_CODE (arg0) == BIT_XOR_EXPR
1964 || TREE_CODE (arg0) == BIT_AND_EXPR
1965 || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
1966 && operand_equal_p (TREE_OPERAND (arg0, 0),
1967 TREE_OPERAND (arg1, 1), 0)
1968 && operand_equal_p (TREE_OPERAND (arg0, 1),
1969 TREE_OPERAND (arg1, 0), 0));
1972 /* If either of the pointer (or reference) expressions we are
1973 dereferencing contain a side effect, these cannot be equal. */
1974 if (TREE_SIDE_EFFECTS (arg0)
1975 || TREE_SIDE_EFFECTS (arg1))
1978 switch (TREE_CODE (arg0))
1981 return operand_equal_p (TREE_OPERAND (arg0, 0),
1982 TREE_OPERAND (arg1, 0), 0);
1986 case ARRAY_RANGE_REF:
1987 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1988 TREE_OPERAND (arg1, 0), 0)
1989 && operand_equal_p (TREE_OPERAND (arg0, 1),
1990 TREE_OPERAND (arg1, 1), 0));
1993 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1994 TREE_OPERAND (arg1, 0), 0)
1995 && operand_equal_p (TREE_OPERAND (arg0, 1),
1996 TREE_OPERAND (arg1, 1), 0)
1997 && operand_equal_p (TREE_OPERAND (arg0, 2),
1998 TREE_OPERAND (arg1, 2), 0));
2004 switch (TREE_CODE (arg0))
2007 case TRUTH_NOT_EXPR:
2008 return operand_equal_p (TREE_OPERAND (arg0, 0),
2009 TREE_OPERAND (arg1, 0), 0);
2012 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2015 /* If the CALL_EXPRs call different functions, then they
2016 clearly can not be equal. */
2017 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2018 TREE_OPERAND (arg1, 0), 0))
2021 /* Only consider const functions equivalent. */
2022 fndecl = get_callee_fndecl (arg0);
2023 if (fndecl == NULL_TREE
2024 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2027 /* Now see if all the arguments are the same. operand_equal_p
2028 does not handle TREE_LIST, so we walk the operands here
2029 feeding them to operand_equal_p. */
2030 arg0 = TREE_OPERAND (arg0, 1);
2031 arg1 = TREE_OPERAND (arg1, 1);
2032 while (arg0 && arg1)
2034 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2037 arg0 = TREE_CHAIN (arg0);
2038 arg1 = TREE_CHAIN (arg1);
2041 /* If we get here and both argument lists are exhausted
2042 then the CALL_EXPRs are equal. */
2043 return ! (arg0 || arg1);
2050 /* Consider __builtin_sqrt equal to sqrt. */
2051 return TREE_CODE (arg0) == FUNCTION_DECL
2052 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2053 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2054 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2061 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2062 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2064 When in doubt, return 0. */
2067 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2069 int unsignedp1, unsignedpo;
2070 tree primarg0, primarg1, primother;
2071 unsigned int correct_width;
2073 if (operand_equal_p (arg0, arg1, 0))
2076 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2077 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2080 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2081 and see if the inner values are the same. This removes any
2082 signedness comparison, which doesn't matter here. */
2083 primarg0 = arg0, primarg1 = arg1;
2084 STRIP_NOPS (primarg0);
2085 STRIP_NOPS (primarg1);
2086 if (operand_equal_p (primarg0, primarg1, 0))
2089 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2090 actual comparison operand, ARG0.
2092 First throw away any conversions to wider types
2093 already present in the operands. */
2095 primarg1 = get_narrower (arg1, &unsignedp1);
2096 primother = get_narrower (other, &unsignedpo);
2098 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2099 if (unsignedp1 == unsignedpo
2100 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2101 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2103 tree type = TREE_TYPE (arg0);
2105 /* Make sure shorter operand is extended the right way
2106 to match the longer operand. */
2107 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2108 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2110 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2117 /* See if ARG is an expression that is either a comparison or is performing
2118 arithmetic on comparisons. The comparisons must only be comparing
2119 two different values, which will be stored in *CVAL1 and *CVAL2; if
2120 they are nonzero it means that some operands have already been found.
2121 No variables may be used anywhere else in the expression except in the
2122 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2123 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2125 If this is true, return 1. Otherwise, return zero. */
2128 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2130 enum tree_code code = TREE_CODE (arg);
2131 char class = TREE_CODE_CLASS (code);
2133 /* We can handle some of the 'e' cases here. */
2134 if (class == 'e' && code == TRUTH_NOT_EXPR)
2136 else if (class == 'e'
2137 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2138 || code == COMPOUND_EXPR))
2141 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2142 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2144 /* If we've already found a CVAL1 or CVAL2, this expression is
2145 two complex to handle. */
2146 if (*cval1 || *cval2)
2156 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2159 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2160 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2161 cval1, cval2, save_p));
2167 if (code == COND_EXPR)
2168 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2169 cval1, cval2, save_p)
2170 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2171 cval1, cval2, save_p)
2172 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2173 cval1, cval2, save_p));
2177 /* First see if we can handle the first operand, then the second. For
2178 the second operand, we know *CVAL1 can't be zero. It must be that
2179 one side of the comparison is each of the values; test for the
2180 case where this isn't true by failing if the two operands
2183 if (operand_equal_p (TREE_OPERAND (arg, 0),
2184 TREE_OPERAND (arg, 1), 0))
2188 *cval1 = TREE_OPERAND (arg, 0);
2189 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2191 else if (*cval2 == 0)
2192 *cval2 = TREE_OPERAND (arg, 0);
2193 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2198 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2200 else if (*cval2 == 0)
2201 *cval2 = TREE_OPERAND (arg, 1);
2202 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2214 /* ARG is a tree that is known to contain just arithmetic operations and
2215 comparisons. Evaluate the operations in the tree substituting NEW0 for
2216 any occurrence of OLD0 as an operand of a comparison and likewise for
2220 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2222 tree type = TREE_TYPE (arg);
2223 enum tree_code code = TREE_CODE (arg);
2224 char class = TREE_CODE_CLASS (code);
2226 /* We can handle some of the 'e' cases here. */
2227 if (class == 'e' && code == TRUTH_NOT_EXPR)
2229 else if (class == 'e'
2230 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2236 return fold (build1 (code, type,
2237 eval_subst (TREE_OPERAND (arg, 0),
2238 old0, new0, old1, new1)));
2241 return fold (build (code, type,
2242 eval_subst (TREE_OPERAND (arg, 0),
2243 old0, new0, old1, new1),
2244 eval_subst (TREE_OPERAND (arg, 1),
2245 old0, new0, old1, new1)));
2251 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2254 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2257 return fold (build (code, type,
2258 eval_subst (TREE_OPERAND (arg, 0),
2259 old0, new0, old1, new1),
2260 eval_subst (TREE_OPERAND (arg, 1),
2261 old0, new0, old1, new1),
2262 eval_subst (TREE_OPERAND (arg, 2),
2263 old0, new0, old1, new1)));
2267 /* Fall through - ??? */
2271 tree arg0 = TREE_OPERAND (arg, 0);
2272 tree arg1 = TREE_OPERAND (arg, 1);
2274 /* We need to check both for exact equality and tree equality. The
2275 former will be true if the operand has a side-effect. In that
2276 case, we know the operand occurred exactly once. */
2278 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2280 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2283 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2285 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2288 return fold (build (code, type, arg0, arg1));
2296 /* Return a tree for the case when the result of an expression is RESULT
2297 converted to TYPE and OMITTED was previously an operand of the expression
2298 but is now not needed (e.g., we folded OMITTED * 0).
2300 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2301 the conversion of RESULT to TYPE. */
2304 omit_one_operand (tree type, tree result, tree omitted)
2306 tree t = convert (type, result);
2308 if (TREE_SIDE_EFFECTS (omitted))
2309 return build (COMPOUND_EXPR, type, omitted, t);
2311 return non_lvalue (t);
2314 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2317 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2319 tree t = convert (type, result);
2321 if (TREE_SIDE_EFFECTS (omitted))
2322 return build (COMPOUND_EXPR, type, omitted, t);
2324 return pedantic_non_lvalue (t);
2327 /* Return a simplified tree node for the truth-negation of ARG. This
2328 never alters ARG itself. We assume that ARG is an operation that
2329 returns a truth value (0 or 1). */
2332 invert_truthvalue (tree arg)
2334 tree type = TREE_TYPE (arg);
2335 enum tree_code code = TREE_CODE (arg);
2337 if (code == ERROR_MARK)
2340 /* If this is a comparison, we can simply invert it, except for
2341 floating-point non-equality comparisons, in which case we just
2342 enclose a TRUTH_NOT_EXPR around what we have. */
2344 if (TREE_CODE_CLASS (code) == '<')
2346 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2347 && !flag_unsafe_math_optimizations
2350 return build1 (TRUTH_NOT_EXPR, type, arg);
2352 return build (invert_tree_comparison (code), type,
2353 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2359 return convert (type, build_int_2 (integer_zerop (arg), 0));
2361 case TRUTH_AND_EXPR:
2362 return build (TRUTH_OR_EXPR, type,
2363 invert_truthvalue (TREE_OPERAND (arg, 0)),
2364 invert_truthvalue (TREE_OPERAND (arg, 1)));
2367 return build (TRUTH_AND_EXPR, type,
2368 invert_truthvalue (TREE_OPERAND (arg, 0)),
2369 invert_truthvalue (TREE_OPERAND (arg, 1)));
2371 case TRUTH_XOR_EXPR:
2372 /* Here we can invert either operand. We invert the first operand
2373 unless the second operand is a TRUTH_NOT_EXPR in which case our
2374 result is the XOR of the first operand with the inside of the
2375 negation of the second operand. */
2377 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2378 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2379 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2381 return build (TRUTH_XOR_EXPR, type,
2382 invert_truthvalue (TREE_OPERAND (arg, 0)),
2383 TREE_OPERAND (arg, 1));
2385 case TRUTH_ANDIF_EXPR:
2386 return build (TRUTH_ORIF_EXPR, type,
2387 invert_truthvalue (TREE_OPERAND (arg, 0)),
2388 invert_truthvalue (TREE_OPERAND (arg, 1)));
2390 case TRUTH_ORIF_EXPR:
2391 return build (TRUTH_ANDIF_EXPR, type,
2392 invert_truthvalue (TREE_OPERAND (arg, 0)),
2393 invert_truthvalue (TREE_OPERAND (arg, 1)));
2395 case TRUTH_NOT_EXPR:
2396 return TREE_OPERAND (arg, 0);
2399 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2400 invert_truthvalue (TREE_OPERAND (arg, 1)),
2401 invert_truthvalue (TREE_OPERAND (arg, 2)));
2404 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2405 invert_truthvalue (TREE_OPERAND (arg, 1)));
2407 case WITH_RECORD_EXPR:
2408 return build (WITH_RECORD_EXPR, type,
2409 invert_truthvalue (TREE_OPERAND (arg, 0)),
2410 TREE_OPERAND (arg, 1));
2412 case NON_LVALUE_EXPR:
2413 return invert_truthvalue (TREE_OPERAND (arg, 0));
2418 return build1 (TREE_CODE (arg), type,
2419 invert_truthvalue (TREE_OPERAND (arg, 0)));
2422 if (!integer_onep (TREE_OPERAND (arg, 1)))
2424 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2427 return build1 (TRUTH_NOT_EXPR, type, arg);
2429 case CLEANUP_POINT_EXPR:
2430 return build1 (CLEANUP_POINT_EXPR, type,
2431 invert_truthvalue (TREE_OPERAND (arg, 0)));
2436 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2438 return build1 (TRUTH_NOT_EXPR, type, arg);
2441 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2442 operands are another bit-wise operation with a common input. If so,
2443 distribute the bit operations to save an operation and possibly two if
2444 constants are involved. For example, convert
2445 (A | B) & (A | C) into A | (B & C)
2446 Further simplification will occur if B and C are constants.
2448 If this optimization cannot be done, 0 will be returned. */
2451 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2456 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2457 || TREE_CODE (arg0) == code
2458 || (TREE_CODE (arg0) != BIT_AND_EXPR
2459 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2462 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2464 common = TREE_OPERAND (arg0, 0);
2465 left = TREE_OPERAND (arg0, 1);
2466 right = TREE_OPERAND (arg1, 1);
2468 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2470 common = TREE_OPERAND (arg0, 0);
2471 left = TREE_OPERAND (arg0, 1);
2472 right = TREE_OPERAND (arg1, 0);
2474 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2476 common = TREE_OPERAND (arg0, 1);
2477 left = TREE_OPERAND (arg0, 0);
2478 right = TREE_OPERAND (arg1, 1);
2480 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2482 common = TREE_OPERAND (arg0, 1);
2483 left = TREE_OPERAND (arg0, 0);
2484 right = TREE_OPERAND (arg1, 0);
2489 return fold (build (TREE_CODE (arg0), type, common,
2490 fold (build (code, type, left, right))));
2493 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2494 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2497 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2500 tree result = build (BIT_FIELD_REF, type, inner,
2501 size_int (bitsize), bitsize_int (bitpos));
2503 TREE_UNSIGNED (result) = unsignedp;
2508 /* Optimize a bit-field compare.
2510 There are two cases: First is a compare against a constant and the
2511 second is a comparison of two items where the fields are at the same
2512 bit position relative to the start of a chunk (byte, halfword, word)
2513 large enough to contain it. In these cases we can avoid the shift
2514 implicit in bitfield extractions.
2516 For constants, we emit a compare of the shifted constant with the
2517 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2518 compared. For two fields at the same position, we do the ANDs with the
2519 similar mask and compare the result of the ANDs.
2521 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2522 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2523 are the left and right operands of the comparison, respectively.
2525 If the optimization described above can be done, we return the resulting
2526 tree. Otherwise we return zero. */
2529 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2532 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2533 tree type = TREE_TYPE (lhs);
2534 tree signed_type, unsigned_type;
2535 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2536 enum machine_mode lmode, rmode, nmode;
2537 int lunsignedp, runsignedp;
2538 int lvolatilep = 0, rvolatilep = 0;
2539 tree linner, rinner = NULL_TREE;
2543 /* Get all the information about the extractions being done. If the bit size
2544 if the same as the size of the underlying object, we aren't doing an
2545 extraction at all and so can do nothing. We also don't want to
2546 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2547 then will no longer be able to replace it. */
2548 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2549 &lunsignedp, &lvolatilep);
2550 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2551 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2556 /* If this is not a constant, we can only do something if bit positions,
2557 sizes, and signedness are the same. */
2558 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2559 &runsignedp, &rvolatilep);
2561 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2562 || lunsignedp != runsignedp || offset != 0
2563 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2567 /* See if we can find a mode to refer to this field. We should be able to,
2568 but fail if we can't. */
2569 nmode = get_best_mode (lbitsize, lbitpos,
2570 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2571 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2572 TYPE_ALIGN (TREE_TYPE (rinner))),
2573 word_mode, lvolatilep || rvolatilep);
2574 if (nmode == VOIDmode)
2577 /* Set signed and unsigned types of the precision of this mode for the
2579 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2580 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2582 /* Compute the bit position and size for the new reference and our offset
2583 within it. If the new reference is the same size as the original, we
2584 won't optimize anything, so return zero. */
2585 nbitsize = GET_MODE_BITSIZE (nmode);
2586 nbitpos = lbitpos & ~ (nbitsize - 1);
2588 if (nbitsize == lbitsize)
2591 if (BYTES_BIG_ENDIAN)
2592 lbitpos = nbitsize - lbitsize - lbitpos;
2594 /* Make the mask to be used against the extracted field. */
2595 mask = build_int_2 (~0, ~0);
2596 TREE_TYPE (mask) = unsigned_type;
2597 force_fit_type (mask, 0);
2598 mask = convert (unsigned_type, mask);
2599 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2600 mask = const_binop (RSHIFT_EXPR, mask,
2601 size_int (nbitsize - lbitsize - lbitpos), 0);
2604 /* If not comparing with constant, just rework the comparison
2606 return build (code, compare_type,
2607 build (BIT_AND_EXPR, unsigned_type,
2608 make_bit_field_ref (linner, unsigned_type,
2609 nbitsize, nbitpos, 1),
2611 build (BIT_AND_EXPR, unsigned_type,
2612 make_bit_field_ref (rinner, unsigned_type,
2613 nbitsize, nbitpos, 1),
2616 /* Otherwise, we are handling the constant case. See if the constant is too
2617 big for the field. Warn and return a tree of for 0 (false) if so. We do
2618 this not only for its own sake, but to avoid having to test for this
2619 error case below. If we didn't, we might generate wrong code.
2621 For unsigned fields, the constant shifted right by the field length should
2622 be all zero. For signed fields, the high-order bits should agree with
2627 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2628 convert (unsigned_type, rhs),
2629 size_int (lbitsize), 0)))
2631 warning ("comparison is always %d due to width of bit-field",
2633 return convert (compare_type,
2635 ? integer_one_node : integer_zero_node));
2640 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2641 size_int (lbitsize - 1), 0);
2642 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2644 warning ("comparison is always %d due to width of bit-field",
2646 return convert (compare_type,
2648 ? integer_one_node : integer_zero_node));
2652 /* Single-bit compares should always be against zero. */
2653 if (lbitsize == 1 && ! integer_zerop (rhs))
2655 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2656 rhs = convert (type, integer_zero_node);
2659 /* Make a new bitfield reference, shift the constant over the
2660 appropriate number of bits and mask it with the computed mask
2661 (in case this was a signed field). If we changed it, make a new one. */
2662 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2665 TREE_SIDE_EFFECTS (lhs) = 1;
2666 TREE_THIS_VOLATILE (lhs) = 1;
2669 rhs = fold (const_binop (BIT_AND_EXPR,
2670 const_binop (LSHIFT_EXPR,
2671 convert (unsigned_type, rhs),
2672 size_int (lbitpos), 0),
2675 return build (code, compare_type,
2676 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2680 /* Subroutine for fold_truthop: decode a field reference.
2682 If EXP is a comparison reference, we return the innermost reference.
2684 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2685 set to the starting bit number.
2687 If the innermost field can be completely contained in a mode-sized
2688 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2690 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2691 otherwise it is not changed.
2693 *PUNSIGNEDP is set to the signedness of the field.
2695 *PMASK is set to the mask used. This is either contained in a
2696 BIT_AND_EXPR or derived from the width of the field.
2698 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2700 Return 0 if this is not a component reference or is one that we can't
2701 do anything with. */
2704 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2705 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2706 int *punsignedp, int *pvolatilep,
2707 tree *pmask, tree *pand_mask)
2709 tree outer_type = 0;
2711 tree mask, inner, offset;
2713 unsigned int precision;
2715 /* All the optimizations using this function assume integer fields.
2716 There are problems with FP fields since the type_for_size call
2717 below can fail for, e.g., XFmode. */
2718 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2721 /* We are interested in the bare arrangement of bits, so strip everything
2722 that doesn't affect the machine mode. However, record the type of the
2723 outermost expression if it may matter below. */
2724 if (TREE_CODE (exp) == NOP_EXPR
2725 || TREE_CODE (exp) == CONVERT_EXPR
2726 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2727 outer_type = TREE_TYPE (exp);
2730 if (TREE_CODE (exp) == BIT_AND_EXPR)
2732 and_mask = TREE_OPERAND (exp, 1);
2733 exp = TREE_OPERAND (exp, 0);
2734 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2735 if (TREE_CODE (and_mask) != INTEGER_CST)
2739 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2740 punsignedp, pvolatilep);
2741 if ((inner == exp && and_mask == 0)
2742 || *pbitsize < 0 || offset != 0
2743 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2746 /* If the number of bits in the reference is the same as the bitsize of
2747 the outer type, then the outer type gives the signedness. Otherwise
2748 (in case of a small bitfield) the signedness is unchanged. */
2749 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2750 *punsignedp = TREE_UNSIGNED (outer_type);
2752 /* Compute the mask to access the bitfield. */
2753 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2754 precision = TYPE_PRECISION (unsigned_type);
2756 mask = build_int_2 (~0, ~0);
2757 TREE_TYPE (mask) = unsigned_type;
2758 force_fit_type (mask, 0);
2759 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2760 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2762 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2764 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2765 convert (unsigned_type, and_mask), mask));
2768 *pand_mask = and_mask;
2772 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2776 all_ones_mask_p (tree mask, int size)
2778 tree type = TREE_TYPE (mask);
2779 unsigned int precision = TYPE_PRECISION (type);
2782 tmask = build_int_2 (~0, ~0);
2783 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2784 force_fit_type (tmask, 0);
2786 tree_int_cst_equal (mask,
2787 const_binop (RSHIFT_EXPR,
2788 const_binop (LSHIFT_EXPR, tmask,
2789 size_int (precision - size),
2791 size_int (precision - size), 0));
2794 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2795 represents the sign bit of EXP's type. If EXP represents a sign
2796 or zero extension, also test VAL against the unextended type.
2797 The return value is the (sub)expression whose sign bit is VAL,
2798 or NULL_TREE otherwise. */
2801 sign_bit_p (tree exp, tree val)
2803 unsigned HOST_WIDE_INT mask_lo, lo;
2804 HOST_WIDE_INT mask_hi, hi;
2808 /* Tree EXP must have an integral type. */
2809 t = TREE_TYPE (exp);
2810 if (! INTEGRAL_TYPE_P (t))
2813 /* Tree VAL must be an integer constant. */
2814 if (TREE_CODE (val) != INTEGER_CST
2815 || TREE_CONSTANT_OVERFLOW (val))
2818 width = TYPE_PRECISION (t);
2819 if (width > HOST_BITS_PER_WIDE_INT)
2821 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2824 mask_hi = ((unsigned HOST_WIDE_INT) -1
2825 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2831 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
2834 mask_lo = ((unsigned HOST_WIDE_INT) -1
2835 >> (HOST_BITS_PER_WIDE_INT - width));
2838 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
2839 treat VAL as if it were unsigned. */
2840 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
2841 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
2844 /* Handle extension from a narrower type. */
2845 if (TREE_CODE (exp) == NOP_EXPR
2846 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
2847 return sign_bit_p (TREE_OPERAND (exp, 0), val);
2852 /* Subroutine for fold_truthop: determine if an operand is simple enough
2853 to be evaluated unconditionally. */
2856 simple_operand_p (tree exp)
2858 /* Strip any conversions that don't change the machine mode. */
2859 while ((TREE_CODE (exp) == NOP_EXPR
2860 || TREE_CODE (exp) == CONVERT_EXPR)
2861 && (TYPE_MODE (TREE_TYPE (exp))
2862 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2863 exp = TREE_OPERAND (exp, 0);
2865 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2867 && ! TREE_ADDRESSABLE (exp)
2868 && ! TREE_THIS_VOLATILE (exp)
2869 && ! DECL_NONLOCAL (exp)
2870 /* Don't regard global variables as simple. They may be
2871 allocated in ways unknown to the compiler (shared memory,
2872 #pragma weak, etc). */
2873 && ! TREE_PUBLIC (exp)
2874 && ! DECL_EXTERNAL (exp)
2875 /* Loading a static variable is unduly expensive, but global
2876 registers aren't expensive. */
2877 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2880 /* The following functions are subroutines to fold_range_test and allow it to
2881 try to change a logical combination of comparisons into a range test.
2884 X == 2 || X == 3 || X == 4 || X == 5
2888 (unsigned) (X - 2) <= 3
2890 We describe each set of comparisons as being either inside or outside
2891 a range, using a variable named like IN_P, and then describe the
2892 range with a lower and upper bound. If one of the bounds is omitted,
2893 it represents either the highest or lowest value of the type.
2895 In the comments below, we represent a range by two numbers in brackets
2896 preceded by a "+" to designate being inside that range, or a "-" to
2897 designate being outside that range, so the condition can be inverted by
2898 flipping the prefix. An omitted bound is represented by a "-". For
2899 example, "- [-, 10]" means being outside the range starting at the lowest
2900 possible value and ending at 10, in other words, being greater than 10.
2901 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
2904 We set up things so that the missing bounds are handled in a consistent
2905 manner so neither a missing bound nor "true" and "false" need to be
2906 handled using a special case. */
2908 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
2909 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
2910 and UPPER1_P are nonzero if the respective argument is an upper bound
2911 and zero for a lower. TYPE, if nonzero, is the type of the result; it
2912 must be specified for a comparison. ARG1 will be converted to ARG0's
2913 type if both are specified. */
2916 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
2917 tree arg1, int upper1_p)
2923 /* If neither arg represents infinity, do the normal operation.
2924 Else, if not a comparison, return infinity. Else handle the special
2925 comparison rules. Note that most of the cases below won't occur, but
2926 are handled for consistency. */
2928 if (arg0 != 0 && arg1 != 0)
2930 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
2931 arg0, convert (TREE_TYPE (arg0), arg1)));
2933 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
2936 if (TREE_CODE_CLASS (code) != '<')
2939 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
2940 for neither. In real maths, we cannot assume open ended ranges are
2941 the same. But, this is computer arithmetic, where numbers are finite.
2942 We can therefore make the transformation of any unbounded range with
2943 the value Z, Z being greater than any representable number. This permits
2944 us to treat unbounded ranges as equal. */
2945 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
2946 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
2950 result = sgn0 == sgn1;
2953 result = sgn0 != sgn1;
2956 result = sgn0 < sgn1;
2959 result = sgn0 <= sgn1;
2962 result = sgn0 > sgn1;
2965 result = sgn0 >= sgn1;
2971 return convert (type, result ? integer_one_node : integer_zero_node);
2974 /* Given EXP, a logical expression, set the range it is testing into
2975 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
2976 actually being tested. *PLOW and *PHIGH will be made of the same type
2977 as the returned expression. If EXP is not a comparison, we will most
2978 likely not be returning a useful value and range. */
2981 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
2983 enum tree_code code;
2984 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
2985 tree orig_type = NULL_TREE;
2987 tree low, high, n_low, n_high;
2989 /* Start with simply saying "EXP != 0" and then look at the code of EXP
2990 and see if we can refine the range. Some of the cases below may not
2991 happen, but it doesn't seem worth worrying about this. We "continue"
2992 the outer loop when we've changed something; otherwise we "break"
2993 the switch, which will "break" the while. */
2995 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
2999 code = TREE_CODE (exp);
3001 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3003 if (first_rtl_op (code) > 0)
3004 arg0 = TREE_OPERAND (exp, 0);
3005 if (TREE_CODE_CLASS (code) == '<'
3006 || TREE_CODE_CLASS (code) == '1'
3007 || TREE_CODE_CLASS (code) == '2')
3008 type = TREE_TYPE (arg0);
3009 if (TREE_CODE_CLASS (code) == '2'
3010 || TREE_CODE_CLASS (code) == '<'
3011 || (TREE_CODE_CLASS (code) == 'e'
3012 && TREE_CODE_LENGTH (code) > 1))
3013 arg1 = TREE_OPERAND (exp, 1);
3016 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3017 lose a cast by accident. */
3018 if (type != NULL_TREE && orig_type == NULL_TREE)
3023 case TRUTH_NOT_EXPR:
3024 in_p = ! in_p, exp = arg0;
3027 case EQ_EXPR: case NE_EXPR:
3028 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3029 /* We can only do something if the range is testing for zero
3030 and if the second operand is an integer constant. Note that
3031 saying something is "in" the range we make is done by
3032 complementing IN_P since it will set in the initial case of
3033 being not equal to zero; "out" is leaving it alone. */
3034 if (low == 0 || high == 0
3035 || ! integer_zerop (low) || ! integer_zerop (high)
3036 || TREE_CODE (arg1) != INTEGER_CST)
3041 case NE_EXPR: /* - [c, c] */
3044 case EQ_EXPR: /* + [c, c] */
3045 in_p = ! in_p, low = high = arg1;
3047 case GT_EXPR: /* - [-, c] */
3048 low = 0, high = arg1;
3050 case GE_EXPR: /* + [c, -] */
3051 in_p = ! in_p, low = arg1, high = 0;
3053 case LT_EXPR: /* - [c, -] */
3054 low = arg1, high = 0;
3056 case LE_EXPR: /* + [-, c] */
3057 in_p = ! in_p, low = 0, high = arg1;
3065 /* If this is an unsigned comparison, we also know that EXP is
3066 greater than or equal to zero. We base the range tests we make
3067 on that fact, so we record it here so we can parse existing
3069 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3071 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3072 1, convert (type, integer_zero_node),
3076 in_p = n_in_p, low = n_low, high = n_high;
3078 /* If the high bound is missing, but we
3079 have a low bound, reverse the range so
3080 it goes from zero to the low bound minus 1. */
3081 if (high == 0 && low)
3084 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3085 integer_one_node, 0);
3086 low = convert (type, integer_zero_node);
3092 /* (-x) IN [a,b] -> x in [-b, -a] */
3093 n_low = range_binop (MINUS_EXPR, type,
3094 convert (type, integer_zero_node), 0, high, 1);
3095 n_high = range_binop (MINUS_EXPR, type,
3096 convert (type, integer_zero_node), 0, low, 0);
3097 low = n_low, high = n_high;
3103 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3104 convert (type, integer_one_node));
3107 case PLUS_EXPR: case MINUS_EXPR:
3108 if (TREE_CODE (arg1) != INTEGER_CST)
3111 /* If EXP is signed, any overflow in the computation is undefined,
3112 so we don't worry about it so long as our computations on
3113 the bounds don't overflow. For unsigned, overflow is defined
3114 and this is exactly the right thing. */
3115 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3116 type, low, 0, arg1, 0);
3117 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3118 type, high, 1, arg1, 0);
3119 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3120 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3123 /* Check for an unsigned range which has wrapped around the maximum
3124 value thus making n_high < n_low, and normalize it. */
3125 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3127 low = range_binop (PLUS_EXPR, type, n_high, 0,
3128 integer_one_node, 0);
3129 high = range_binop (MINUS_EXPR, type, n_low, 0,
3130 integer_one_node, 0);
3132 /* If the range is of the form +/- [ x+1, x ], we won't
3133 be able to normalize it. But then, it represents the
3134 whole range or the empty set, so make it
3136 if (tree_int_cst_equal (n_low, low)
3137 && tree_int_cst_equal (n_high, high))
3143 low = n_low, high = n_high;
3148 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3149 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3152 if (! INTEGRAL_TYPE_P (type)
3153 || (low != 0 && ! int_fits_type_p (low, type))
3154 || (high != 0 && ! int_fits_type_p (high, type)))
3157 n_low = low, n_high = high;
3160 n_low = convert (type, n_low);
3163 n_high = convert (type, n_high);
3165 /* If we're converting from an unsigned to a signed type,
3166 we will be doing the comparison as unsigned. The tests above
3167 have already verified that LOW and HIGH are both positive.
3169 So we have to make sure that the original unsigned value will
3170 be interpreted as positive. */
3171 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3173 tree equiv_type = (*lang_hooks.types.type_for_mode)
3174 (TYPE_MODE (type), 1);
3177 /* A range without an upper bound is, naturally, unbounded.
3178 Since convert would have cropped a very large value, use
3179 the max value for the destination type. */
3181 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3182 : TYPE_MAX_VALUE (type);
3184 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3185 high_positive = fold (build (RSHIFT_EXPR, type,
3186 convert (type, high_positive),
3187 convert (type, integer_one_node)));
3189 /* If the low bound is specified, "and" the range with the
3190 range for which the original unsigned value will be
3194 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3196 1, convert (type, integer_zero_node),
3200 in_p = (n_in_p == in_p);
3204 /* Otherwise, "or" the range with the range of the input
3205 that will be interpreted as negative. */
3206 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3208 1, convert (type, integer_zero_node),
3212 in_p = (in_p != n_in_p);
3217 low = n_low, high = n_high;
3227 /* If EXP is a constant, we can evaluate whether this is true or false. */
3228 if (TREE_CODE (exp) == INTEGER_CST)
3230 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3232 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3238 *pin_p = in_p, *plow = low, *phigh = high;
3242 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3243 type, TYPE, return an expression to test if EXP is in (or out of, depending
3244 on IN_P) the range. */
3247 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3249 tree etype = TREE_TYPE (exp);
3253 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3254 return invert_truthvalue (value);
3256 if (low == 0 && high == 0)
3257 return convert (type, integer_one_node);
3260 return fold (build (LE_EXPR, type, exp, high));
3263 return fold (build (GE_EXPR, type, exp, low));
3265 if (operand_equal_p (low, high, 0))
3266 return fold (build (EQ_EXPR, type, exp, low));
3268 if (integer_zerop (low))
3270 if (! TREE_UNSIGNED (etype))
3272 etype = (*lang_hooks.types.unsigned_type) (etype);
3273 high = convert (etype, high);
3274 exp = convert (etype, exp);
3276 return build_range_check (type, exp, 1, 0, high);
3279 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3280 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3282 unsigned HOST_WIDE_INT lo;
3286 prec = TYPE_PRECISION (etype);
3287 if (prec <= HOST_BITS_PER_WIDE_INT)
3290 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3294 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3295 lo = (unsigned HOST_WIDE_INT) -1;
3298 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3300 if (TREE_UNSIGNED (etype))
3302 etype = (*lang_hooks.types.signed_type) (etype);
3303 exp = convert (etype, exp);
3305 return fold (build (GT_EXPR, type, exp,
3306 convert (etype, integer_zero_node)));
3310 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3311 && ! TREE_OVERFLOW (value))
3312 return build_range_check (type,
3313 fold (build (MINUS_EXPR, etype, exp, low)),
3314 1, convert (etype, integer_zero_node), value);
3319 /* Given two ranges, see if we can merge them into one. Return 1 if we
3320 can, 0 if we can't. Set the output range into the specified parameters. */
3323 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3324 tree high0, int in1_p, tree low1, tree high1)
3332 int lowequal = ((low0 == 0 && low1 == 0)
3333 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3334 low0, 0, low1, 0)));
3335 int highequal = ((high0 == 0 && high1 == 0)
3336 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3337 high0, 1, high1, 1)));
3339 /* Make range 0 be the range that starts first, or ends last if they
3340 start at the same value. Swap them if it isn't. */
3341 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3344 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3345 high1, 1, high0, 1))))
3347 temp = in0_p, in0_p = in1_p, in1_p = temp;
3348 tem = low0, low0 = low1, low1 = tem;
3349 tem = high0, high0 = high1, high1 = tem;
3352 /* Now flag two cases, whether the ranges are disjoint or whether the
3353 second range is totally subsumed in the first. Note that the tests
3354 below are simplified by the ones above. */
3355 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3356 high0, 1, low1, 0));
3357 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3358 high1, 1, high0, 1));
3360 /* We now have four cases, depending on whether we are including or
3361 excluding the two ranges. */
3364 /* If they don't overlap, the result is false. If the second range
3365 is a subset it is the result. Otherwise, the range is from the start
3366 of the second to the end of the first. */
3368 in_p = 0, low = high = 0;
3370 in_p = 1, low = low1, high = high1;
3372 in_p = 1, low = low1, high = high0;
3375 else if (in0_p && ! in1_p)
3377 /* If they don't overlap, the result is the first range. If they are
3378 equal, the result is false. If the second range is a subset of the
3379 first, and the ranges begin at the same place, we go from just after
3380 the end of the first range to the end of the second. If the second
3381 range is not a subset of the first, or if it is a subset and both
3382 ranges end at the same place, the range starts at the start of the
3383 first range and ends just before the second range.
3384 Otherwise, we can't describe this as a single range. */
3386 in_p = 1, low = low0, high = high0;
3387 else if (lowequal && highequal)
3388 in_p = 0, low = high = 0;
3389 else if (subset && lowequal)
3391 in_p = 1, high = high0;
3392 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3393 integer_one_node, 0);
3395 else if (! subset || highequal)
3397 in_p = 1, low = low0;
3398 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3399 integer_one_node, 0);
3405 else if (! in0_p && in1_p)
3407 /* If they don't overlap, the result is the second range. If the second
3408 is a subset of the first, the result is false. Otherwise,
3409 the range starts just after the first range and ends at the
3410 end of the second. */
3412 in_p = 1, low = low1, high = high1;
3413 else if (subset || highequal)
3414 in_p = 0, low = high = 0;
3417 in_p = 1, high = high1;
3418 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3419 integer_one_node, 0);
3425 /* The case where we are excluding both ranges. Here the complex case
3426 is if they don't overlap. In that case, the only time we have a
3427 range is if they are adjacent. If the second is a subset of the
3428 first, the result is the first. Otherwise, the range to exclude
3429 starts at the beginning of the first range and ends at the end of the
3433 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3434 range_binop (PLUS_EXPR, NULL_TREE,
3436 integer_one_node, 1),
3438 in_p = 0, low = low0, high = high1;
3443 in_p = 0, low = low0, high = high0;
3445 in_p = 0, low = low0, high = high1;
3448 *pin_p = in_p, *plow = low, *phigh = high;
3452 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3453 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3456 /* EXP is some logical combination of boolean tests. See if we can
3457 merge it into some range test. Return the new tree if so. */
3460 fold_range_test (tree exp)
3462 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3463 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3464 int in0_p, in1_p, in_p;
3465 tree low0, low1, low, high0, high1, high;
3466 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3467 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3470 /* If this is an OR operation, invert both sides; we will invert
3471 again at the end. */
3473 in0_p = ! in0_p, in1_p = ! in1_p;
3475 /* If both expressions are the same, if we can merge the ranges, and we
3476 can build the range test, return it or it inverted. If one of the
3477 ranges is always true or always false, consider it to be the same
3478 expression as the other. */
3479 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3480 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3482 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3484 : rhs != 0 ? rhs : integer_zero_node,
3486 return or_op ? invert_truthvalue (tem) : tem;
3488 /* On machines where the branch cost is expensive, if this is a
3489 short-circuited branch and the underlying object on both sides
3490 is the same, make a non-short-circuit operation. */
3491 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3492 && lhs != 0 && rhs != 0
3493 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3494 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3495 && operand_equal_p (lhs, rhs, 0))
3497 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3498 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3499 which cases we can't do this. */
3500 if (simple_operand_p (lhs))
3501 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3502 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3503 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3504 TREE_OPERAND (exp, 1));
3506 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3507 && ! CONTAINS_PLACEHOLDER_P (lhs))
3509 tree common = save_expr (lhs);
3511 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3512 or_op ? ! in0_p : in0_p,
3514 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3515 or_op ? ! in1_p : in1_p,
3517 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3518 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3519 TREE_TYPE (exp), lhs, rhs);
3526 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3527 bit value. Arrange things so the extra bits will be set to zero if and
3528 only if C is signed-extended to its full width. If MASK is nonzero,
3529 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3532 unextend (tree c, int p, int unsignedp, tree mask)
3534 tree type = TREE_TYPE (c);
3535 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3538 if (p == modesize || unsignedp)
3541 /* We work by getting just the sign bit into the low-order bit, then
3542 into the high-order bit, then sign-extend. We then XOR that value
3544 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3545 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3547 /* We must use a signed type in order to get an arithmetic right shift.
3548 However, we must also avoid introducing accidental overflows, so that
3549 a subsequent call to integer_zerop will work. Hence we must
3550 do the type conversion here. At this point, the constant is either
3551 zero or one, and the conversion to a signed type can never overflow.
3552 We could get an overflow if this conversion is done anywhere else. */
3553 if (TREE_UNSIGNED (type))
3554 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3556 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3557 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3559 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3560 /* If necessary, convert the type back to match the type of C. */
3561 if (TREE_UNSIGNED (type))
3562 temp = convert (type, temp);
3564 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3567 /* Find ways of folding logical expressions of LHS and RHS:
3568 Try to merge two comparisons to the same innermost item.
3569 Look for range tests like "ch >= '0' && ch <= '9'".
3570 Look for combinations of simple terms on machines with expensive branches
3571 and evaluate the RHS unconditionally.
3573 For example, if we have p->a == 2 && p->b == 4 and we can make an
3574 object large enough to span both A and B, we can do this with a comparison
3575 against the object ANDed with the a mask.
3577 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3578 operations to do this with one comparison.
3580 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3581 function and the one above.
3583 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3584 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3586 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3589 We return the simplified tree or 0 if no optimization is possible. */
3592 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3594 /* If this is the "or" of two comparisons, we can do something if
3595 the comparisons are NE_EXPR. If this is the "and", we can do something
3596 if the comparisons are EQ_EXPR. I.e.,
3597 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3599 WANTED_CODE is this operation code. For single bit fields, we can
3600 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3601 comparison for one-bit fields. */
3603 enum tree_code wanted_code;
3604 enum tree_code lcode, rcode;
3605 tree ll_arg, lr_arg, rl_arg, rr_arg;
3606 tree ll_inner, lr_inner, rl_inner, rr_inner;
3607 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3608 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3609 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3610 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3611 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3612 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3613 enum machine_mode lnmode, rnmode;
3614 tree ll_mask, lr_mask, rl_mask, rr_mask;
3615 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3616 tree l_const, r_const;
3617 tree lntype, rntype, result;
3618 int first_bit, end_bit;
3621 /* Start by getting the comparison codes. Fail if anything is volatile.
3622 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3623 it were surrounded with a NE_EXPR. */
3625 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3628 lcode = TREE_CODE (lhs);
3629 rcode = TREE_CODE (rhs);
3631 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3632 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3634 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3635 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3637 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3640 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3641 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3643 ll_arg = TREE_OPERAND (lhs, 0);
3644 lr_arg = TREE_OPERAND (lhs, 1);
3645 rl_arg = TREE_OPERAND (rhs, 0);
3646 rr_arg = TREE_OPERAND (rhs, 1);
3648 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3649 if (simple_operand_p (ll_arg)
3650 && simple_operand_p (lr_arg)
3651 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3655 if (operand_equal_p (ll_arg, rl_arg, 0)
3656 && operand_equal_p (lr_arg, rr_arg, 0))
3658 int lcompcode, rcompcode;
3660 lcompcode = comparison_to_compcode (lcode);
3661 rcompcode = comparison_to_compcode (rcode);
3662 compcode = (code == TRUTH_AND_EXPR)
3663 ? lcompcode & rcompcode
3664 : lcompcode | rcompcode;
3666 else if (operand_equal_p (ll_arg, rr_arg, 0)
3667 && operand_equal_p (lr_arg, rl_arg, 0))
3669 int lcompcode, rcompcode;
3671 rcode = swap_tree_comparison (rcode);
3672 lcompcode = comparison_to_compcode (lcode);
3673 rcompcode = comparison_to_compcode (rcode);
3674 compcode = (code == TRUTH_AND_EXPR)
3675 ? lcompcode & rcompcode
3676 : lcompcode | rcompcode;
3681 if (compcode == COMPCODE_TRUE)
3682 return convert (truth_type, integer_one_node);
3683 else if (compcode == COMPCODE_FALSE)
3684 return convert (truth_type, integer_zero_node);
3685 else if (compcode != -1)
3686 return build (compcode_to_comparison (compcode),
3687 truth_type, ll_arg, lr_arg);
3690 /* If the RHS can be evaluated unconditionally and its operands are
3691 simple, it wins to evaluate the RHS unconditionally on machines
3692 with expensive branches. In this case, this isn't a comparison
3693 that can be merged. Avoid doing this if the RHS is a floating-point
3694 comparison since those can trap. */
3696 if (BRANCH_COST >= 2
3697 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3698 && simple_operand_p (rl_arg)
3699 && simple_operand_p (rr_arg))
3701 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3702 if (code == TRUTH_OR_EXPR
3703 && lcode == NE_EXPR && integer_zerop (lr_arg)
3704 && rcode == NE_EXPR && integer_zerop (rr_arg)
3705 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3706 return build (NE_EXPR, truth_type,
3707 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3711 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3712 if (code == TRUTH_AND_EXPR
3713 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3714 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3715 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3716 return build (EQ_EXPR, truth_type,
3717 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3721 return build (code, truth_type, lhs, rhs);
3724 /* See if the comparisons can be merged. Then get all the parameters for
3727 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3728 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3732 ll_inner = decode_field_reference (ll_arg,
3733 &ll_bitsize, &ll_bitpos, &ll_mode,
3734 &ll_unsignedp, &volatilep, &ll_mask,
3736 lr_inner = decode_field_reference (lr_arg,
3737 &lr_bitsize, &lr_bitpos, &lr_mode,
3738 &lr_unsignedp, &volatilep, &lr_mask,
3740 rl_inner = decode_field_reference (rl_arg,
3741 &rl_bitsize, &rl_bitpos, &rl_mode,
3742 &rl_unsignedp, &volatilep, &rl_mask,
3744 rr_inner = decode_field_reference (rr_arg,
3745 &rr_bitsize, &rr_bitpos, &rr_mode,
3746 &rr_unsignedp, &volatilep, &rr_mask,
3749 /* It must be true that the inner operation on the lhs of each
3750 comparison must be the same if we are to be able to do anything.
3751 Then see if we have constants. If not, the same must be true for
3753 if (volatilep || ll_inner == 0 || rl_inner == 0
3754 || ! operand_equal_p (ll_inner, rl_inner, 0))
3757 if (TREE_CODE (lr_arg) == INTEGER_CST
3758 && TREE_CODE (rr_arg) == INTEGER_CST)
3759 l_const = lr_arg, r_const = rr_arg;
3760 else if (lr_inner == 0 || rr_inner == 0
3761 || ! operand_equal_p (lr_inner, rr_inner, 0))
3764 l_const = r_const = 0;
3766 /* If either comparison code is not correct for our logical operation,
3767 fail. However, we can convert a one-bit comparison against zero into
3768 the opposite comparison against that bit being set in the field. */
3770 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3771 if (lcode != wanted_code)
3773 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3775 /* Make the left operand unsigned, since we are only interested
3776 in the value of one bit. Otherwise we are doing the wrong
3785 /* This is analogous to the code for l_const above. */
3786 if (rcode != wanted_code)
3788 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3797 /* After this point all optimizations will generate bit-field
3798 references, which we might not want. */
3799 if (! (*lang_hooks.can_use_bit_fields_p) ())
3802 /* See if we can find a mode that contains both fields being compared on
3803 the left. If we can't, fail. Otherwise, update all constants and masks
3804 to be relative to a field of that size. */
3805 first_bit = MIN (ll_bitpos, rl_bitpos);
3806 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3807 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3808 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3810 if (lnmode == VOIDmode)
3813 lnbitsize = GET_MODE_BITSIZE (lnmode);
3814 lnbitpos = first_bit & ~ (lnbitsize - 1);
3815 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3816 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3818 if (BYTES_BIG_ENDIAN)
3820 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3821 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3824 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3825 size_int (xll_bitpos), 0);
3826 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3827 size_int (xrl_bitpos), 0);
3831 l_const = convert (lntype, l_const);
3832 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
3833 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3834 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3835 fold (build1 (BIT_NOT_EXPR,
3839 warning ("comparison is always %d", wanted_code == NE_EXPR);
3841 return convert (truth_type,
3842 wanted_code == NE_EXPR
3843 ? integer_one_node : integer_zero_node);
3848 r_const = convert (lntype, r_const);
3849 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3850 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3851 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3852 fold (build1 (BIT_NOT_EXPR,
3856 warning ("comparison is always %d", wanted_code == NE_EXPR);
3858 return convert (truth_type,
3859 wanted_code == NE_EXPR
3860 ? integer_one_node : integer_zero_node);
3864 /* If the right sides are not constant, do the same for it. Also,
3865 disallow this optimization if a size or signedness mismatch occurs
3866 between the left and right sides. */
3869 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3870 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3871 /* Make sure the two fields on the right
3872 correspond to the left without being swapped. */
3873 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3876 first_bit = MIN (lr_bitpos, rr_bitpos);
3877 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3878 rnmode = get_best_mode (end_bit - first_bit, first_bit,
3879 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3881 if (rnmode == VOIDmode)
3884 rnbitsize = GET_MODE_BITSIZE (rnmode);
3885 rnbitpos = first_bit & ~ (rnbitsize - 1);
3886 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
3887 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3889 if (BYTES_BIG_ENDIAN)
3891 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3892 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3895 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3896 size_int (xlr_bitpos), 0);
3897 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3898 size_int (xrr_bitpos), 0);
3900 /* Make a mask that corresponds to both fields being compared.
3901 Do this for both items being compared. If the operands are the
3902 same size and the bits being compared are in the same position
3903 then we can do this by masking both and comparing the masked
3905 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3906 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3907 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3909 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3910 ll_unsignedp || rl_unsignedp);
3911 if (! all_ones_mask_p (ll_mask, lnbitsize))
3912 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3914 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3915 lr_unsignedp || rr_unsignedp);
3916 if (! all_ones_mask_p (lr_mask, rnbitsize))
3917 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3919 return build (wanted_code, truth_type, lhs, rhs);
3922 /* There is still another way we can do something: If both pairs of
3923 fields being compared are adjacent, we may be able to make a wider
3924 field containing them both.
3926 Note that we still must mask the lhs/rhs expressions. Furthermore,
3927 the mask must be shifted to account for the shift done by
3928 make_bit_field_ref. */
3929 if ((ll_bitsize + ll_bitpos == rl_bitpos
3930 && lr_bitsize + lr_bitpos == rr_bitpos)
3931 || (ll_bitpos == rl_bitpos + rl_bitsize
3932 && lr_bitpos == rr_bitpos + rr_bitsize))
3936 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3937 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3938 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3939 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3941 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3942 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3943 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3944 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3946 /* Convert to the smaller type before masking out unwanted bits. */
3948 if (lntype != rntype)
3950 if (lnbitsize > rnbitsize)
3952 lhs = convert (rntype, lhs);
3953 ll_mask = convert (rntype, ll_mask);
3956 else if (lnbitsize < rnbitsize)
3958 rhs = convert (lntype, rhs);
3959 lr_mask = convert (lntype, lr_mask);
3964 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
3965 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
3967 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
3968 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
3970 return build (wanted_code, truth_type, lhs, rhs);
3976 /* Handle the case of comparisons with constants. If there is something in
3977 common between the masks, those bits of the constants must be the same.
3978 If not, the condition is always false. Test for this to avoid generating
3979 incorrect code below. */
3980 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3981 if (! integer_zerop (result)
3982 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3983 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3985 if (wanted_code == NE_EXPR)
3987 warning ("`or' of unmatched not-equal tests is always 1");
3988 return convert (truth_type, integer_one_node);
3992 warning ("`and' of mutually exclusive equal-tests is always 0");
3993 return convert (truth_type, integer_zero_node);
3997 /* Construct the expression we will return. First get the component
3998 reference we will make. Unless the mask is all ones the width of
3999 that field, perform the mask operation. Then compare with the
4001 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4002 ll_unsignedp || rl_unsignedp);
4004 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4005 if (! all_ones_mask_p (ll_mask, lnbitsize))
4006 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4008 return build (wanted_code, truth_type, result,
4009 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4012 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4016 optimize_minmax_comparison (tree t)
4018 tree type = TREE_TYPE (t);
4019 tree arg0 = TREE_OPERAND (t, 0);
4020 enum tree_code op_code;
4021 tree comp_const = TREE_OPERAND (t, 1);
4023 int consts_equal, consts_lt;
4026 STRIP_SIGN_NOPS (arg0);
4028 op_code = TREE_CODE (arg0);
4029 minmax_const = TREE_OPERAND (arg0, 1);
4030 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4031 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4032 inner = TREE_OPERAND (arg0, 0);
4034 /* If something does not permit us to optimize, return the original tree. */
4035 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4036 || TREE_CODE (comp_const) != INTEGER_CST
4037 || TREE_CONSTANT_OVERFLOW (comp_const)
4038 || TREE_CODE (minmax_const) != INTEGER_CST
4039 || TREE_CONSTANT_OVERFLOW (minmax_const))
4042 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4043 and GT_EXPR, doing the rest with recursive calls using logical
4045 switch (TREE_CODE (t))
4047 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4049 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4053 fold (build (TRUTH_ORIF_EXPR, type,
4054 optimize_minmax_comparison
4055 (build (EQ_EXPR, type, arg0, comp_const)),
4056 optimize_minmax_comparison
4057 (build (GT_EXPR, type, arg0, comp_const))));
4060 if (op_code == MAX_EXPR && consts_equal)
4061 /* MAX (X, 0) == 0 -> X <= 0 */
4062 return fold (build (LE_EXPR, type, inner, comp_const));
4064 else if (op_code == MAX_EXPR && consts_lt)
4065 /* MAX (X, 0) == 5 -> X == 5 */
4066 return fold (build (EQ_EXPR, type, inner, comp_const));
4068 else if (op_code == MAX_EXPR)
4069 /* MAX (X, 0) == -1 -> false */
4070 return omit_one_operand (type, integer_zero_node, inner);
4072 else if (consts_equal)
4073 /* MIN (X, 0) == 0 -> X >= 0 */
4074 return fold (build (GE_EXPR, type, inner, comp_const));
4077 /* MIN (X, 0) == 5 -> false */
4078 return omit_one_operand (type, integer_zero_node, inner);
4081 /* MIN (X, 0) == -1 -> X == -1 */
4082 return fold (build (EQ_EXPR, type, inner, comp_const));
4085 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4086 /* MAX (X, 0) > 0 -> X > 0
4087 MAX (X, 0) > 5 -> X > 5 */
4088 return fold (build (GT_EXPR, type, inner, comp_const));
4090 else if (op_code == MAX_EXPR)
4091 /* MAX (X, 0) > -1 -> true */
4092 return omit_one_operand (type, integer_one_node, inner);
4094 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4095 /* MIN (X, 0) > 0 -> false
4096 MIN (X, 0) > 5 -> false */
4097 return omit_one_operand (type, integer_zero_node, inner);
4100 /* MIN (X, 0) > -1 -> X > -1 */
4101 return fold (build (GT_EXPR, type, inner, comp_const));
4108 /* T is an integer expression that is being multiplied, divided, or taken a
4109 modulus (CODE says which and what kind of divide or modulus) by a
4110 constant C. See if we can eliminate that operation by folding it with
4111 other operations already in T. WIDE_TYPE, if non-null, is a type that
4112 should be used for the computation if wider than our type.
4114 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4115 (X * 2) + (Y * 4). We must, however, be assured that either the original
4116 expression would not overflow or that overflow is undefined for the type
4117 in the language in question.
4119 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4120 the machine has a multiply-accumulate insn or that this is part of an
4121 addressing calculation.
4123 If we return a non-null expression, it is an equivalent form of the
4124 original computation, but need not be in the original type. */
4127 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4129 /* To avoid exponential search depth, refuse to allow recursion past
4130 three levels. Beyond that (1) it's highly unlikely that we'll find
4131 something interesting and (2) we've probably processed it before
4132 when we built the inner expression. */
4141 ret = extract_muldiv_1 (t, c, code, wide_type);
4148 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4150 tree type = TREE_TYPE (t);
4151 enum tree_code tcode = TREE_CODE (t);
4152 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4153 > GET_MODE_SIZE (TYPE_MODE (type)))
4154 ? wide_type : type);
4156 int same_p = tcode == code;
4157 tree op0 = NULL_TREE, op1 = NULL_TREE;
4159 /* Don't deal with constants of zero here; they confuse the code below. */
4160 if (integer_zerop (c))
4163 if (TREE_CODE_CLASS (tcode) == '1')
4164 op0 = TREE_OPERAND (t, 0);
4166 if (TREE_CODE_CLASS (tcode) == '2')
4167 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4169 /* Note that we need not handle conditional operations here since fold
4170 already handles those cases. So just do arithmetic here. */
4174 /* For a constant, we can always simplify if we are a multiply
4175 or (for divide and modulus) if it is a multiple of our constant. */
4176 if (code == MULT_EXPR
4177 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4178 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4181 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4182 /* If op0 is an expression ... */
4183 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4184 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4185 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4186 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4187 /* ... and is unsigned, and its type is smaller than ctype,
4188 then we cannot pass through as widening. */
4189 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4190 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4191 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4192 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4193 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4194 /* ... or its type is larger than ctype,
4195 then we cannot pass through this truncation. */
4196 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4197 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4198 /* ... or signedness changes for division or modulus,
4199 then we cannot pass through this conversion. */
4200 || (code != MULT_EXPR
4201 && (TREE_UNSIGNED (ctype)
4202 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4205 /* Pass the constant down and see if we can make a simplification. If
4206 we can, replace this expression with the inner simplification for
4207 possible later conversion to our or some other type. */
4208 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4209 && TREE_CODE (t2) == INTEGER_CST
4210 && ! TREE_CONSTANT_OVERFLOW (t2)
4211 && (0 != (t1 = extract_muldiv (op0, t2, code,
4213 ? ctype : NULL_TREE))))
4217 case NEGATE_EXPR: case ABS_EXPR:
4218 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4219 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4222 case MIN_EXPR: case MAX_EXPR:
4223 /* If widening the type changes the signedness, then we can't perform
4224 this optimization as that changes the result. */
4225 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4228 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4229 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4230 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4232 if (tree_int_cst_sgn (c) < 0)
4233 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4235 return fold (build (tcode, ctype, convert (ctype, t1),
4236 convert (ctype, t2)));
4240 case WITH_RECORD_EXPR:
4241 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4242 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4243 TREE_OPERAND (t, 1));
4246 case LSHIFT_EXPR: case RSHIFT_EXPR:
4247 /* If the second operand is constant, this is a multiplication
4248 or floor division, by a power of two, so we can treat it that
4249 way unless the multiplier or divisor overflows. */
4250 if (TREE_CODE (op1) == INTEGER_CST
4251 /* const_binop may not detect overflow correctly,
4252 so check for it explicitly here. */
4253 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4254 && TREE_INT_CST_HIGH (op1) == 0
4255 && 0 != (t1 = convert (ctype,
4256 const_binop (LSHIFT_EXPR, size_one_node,
4258 && ! TREE_OVERFLOW (t1))
4259 return extract_muldiv (build (tcode == LSHIFT_EXPR
4260 ? MULT_EXPR : FLOOR_DIV_EXPR,
4261 ctype, convert (ctype, op0), t1),
4262 c, code, wide_type);
4265 case PLUS_EXPR: case MINUS_EXPR:
4266 /* See if we can eliminate the operation on both sides. If we can, we
4267 can return a new PLUS or MINUS. If we can't, the only remaining
4268 cases where we can do anything are if the second operand is a
4270 t1 = extract_muldiv (op0, c, code, wide_type);
4271 t2 = extract_muldiv (op1, c, code, wide_type);
4272 if (t1 != 0 && t2 != 0
4273 && (code == MULT_EXPR
4274 /* If not multiplication, we can only do this if both operands
4275 are divisible by c. */
4276 || (multiple_of_p (ctype, op0, c)
4277 && multiple_of_p (ctype, op1, c))))
4278 return fold (build (tcode, ctype, convert (ctype, t1),
4279 convert (ctype, t2)));
4281 /* If this was a subtraction, negate OP1 and set it to be an addition.
4282 This simplifies the logic below. */
4283 if (tcode == MINUS_EXPR)
4284 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4286 if (TREE_CODE (op1) != INTEGER_CST)
4289 /* If either OP1 or C are negative, this optimization is not safe for
4290 some of the division and remainder types while for others we need
4291 to change the code. */
4292 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4294 if (code == CEIL_DIV_EXPR)
4295 code = FLOOR_DIV_EXPR;
4296 else if (code == FLOOR_DIV_EXPR)
4297 code = CEIL_DIV_EXPR;
4298 else if (code != MULT_EXPR
4299 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4303 /* If it's a multiply or a division/modulus operation of a multiple
4304 of our constant, do the operation and verify it doesn't overflow. */
4305 if (code == MULT_EXPR
4306 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4308 op1 = const_binop (code, convert (ctype, op1), convert (ctype, c), 0);
4309 if (op1 == 0 || TREE_OVERFLOW (op1))
4315 /* If we have an unsigned type is not a sizetype, we cannot widen
4316 the operation since it will change the result if the original
4317 computation overflowed. */
4318 if (TREE_UNSIGNED (ctype)
4319 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4323 /* If we were able to eliminate our operation from the first side,
4324 apply our operation to the second side and reform the PLUS. */
4325 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4326 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4328 /* The last case is if we are a multiply. In that case, we can
4329 apply the distributive law to commute the multiply and addition
4330 if the multiplication of the constants doesn't overflow. */
4331 if (code == MULT_EXPR)
4332 return fold (build (tcode, ctype, fold (build (code, ctype,
4333 convert (ctype, op0),
4334 convert (ctype, c))),
4340 /* We have a special case here if we are doing something like
4341 (C * 8) % 4 since we know that's zero. */
4342 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4343 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4344 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4345 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4346 return omit_one_operand (type, integer_zero_node, op0);
4348 /* ... fall through ... */
4350 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4351 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4352 /* If we can extract our operation from the LHS, do so and return a
4353 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4354 do something only if the second operand is a constant. */
4356 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4357 return fold (build (tcode, ctype, convert (ctype, t1),
4358 convert (ctype, op1)));
4359 else if (tcode == MULT_EXPR && code == MULT_EXPR
4360 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4361 return fold (build (tcode, ctype, convert (ctype, op0),
4362 convert (ctype, t1)));
4363 else if (TREE_CODE (op1) != INTEGER_CST)
4366 /* If these are the same operation types, we can associate them
4367 assuming no overflow. */
4369 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4370 convert (ctype, c), 0))
4371 && ! TREE_OVERFLOW (t1))
4372 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4374 /* If these operations "cancel" each other, we have the main
4375 optimizations of this pass, which occur when either constant is a
4376 multiple of the other, in which case we replace this with either an
4377 operation or CODE or TCODE.
4379 If we have an unsigned type that is not a sizetype, we cannot do
4380 this since it will change the result if the original computation
4382 if ((! TREE_UNSIGNED (ctype)
4383 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4385 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4386 || (tcode == MULT_EXPR
4387 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4388 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4390 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4391 return fold (build (tcode, ctype, convert (ctype, op0),
4393 const_binop (TRUNC_DIV_EXPR,
4395 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4396 return fold (build (code, ctype, convert (ctype, op0),
4398 const_binop (TRUNC_DIV_EXPR,
4410 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4411 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4412 that we may sometimes modify the tree. */
4415 strip_compound_expr (tree t, tree s)
4417 enum tree_code code = TREE_CODE (t);
4419 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4420 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4421 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4422 return TREE_OPERAND (t, 1);
4424 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4425 don't bother handling any other types. */
4426 else if (code == COND_EXPR)
4428 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4429 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4430 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4432 else if (TREE_CODE_CLASS (code) == '1')
4433 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4434 else if (TREE_CODE_CLASS (code) == '<'
4435 || TREE_CODE_CLASS (code) == '2')
4437 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4438 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4444 /* Return a node which has the indicated constant VALUE (either 0 or
4445 1), and is of the indicated TYPE. */
4448 constant_boolean_node (int value, tree type)
4450 if (type == integer_type_node)
4451 return value ? integer_one_node : integer_zero_node;
4452 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4453 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4457 tree t = build_int_2 (value, 0);
4459 TREE_TYPE (t) = type;
4464 /* Utility function for the following routine, to see how complex a nesting of
4465 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4466 we don't care (to avoid spending too much time on complex expressions.). */
4469 count_cond (tree expr, int lim)
4473 if (TREE_CODE (expr) != COND_EXPR)
4478 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4479 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4480 return MIN (lim, 1 + ctrue + cfalse);
4483 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4484 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4485 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4486 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4487 COND is the first argument to CODE; otherwise (as in the example
4488 given here), it is the second argument. TYPE is the type of the
4489 original expression. */
4492 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4493 tree cond, tree arg, int cond_first_p)
4495 tree test, true_value, false_value;
4496 tree lhs = NULL_TREE;
4497 tree rhs = NULL_TREE;
4498 /* In the end, we'll produce a COND_EXPR. Both arms of the
4499 conditional expression will be binary operations. The left-hand
4500 side of the expression to be executed if the condition is true
4501 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4502 of the expression to be executed if the condition is true will be
4503 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4504 but apply to the expression to be executed if the conditional is
4510 /* These are the codes to use for the left-hand side and right-hand
4511 side of the COND_EXPR. Normally, they are the same as CODE. */
4512 enum tree_code lhs_code = code;
4513 enum tree_code rhs_code = code;
4514 /* And these are the types of the expressions. */
4515 tree lhs_type = type;
4516 tree rhs_type = type;
4521 true_rhs = false_rhs = &arg;
4522 true_lhs = &true_value;
4523 false_lhs = &false_value;
4527 true_lhs = false_lhs = &arg;
4528 true_rhs = &true_value;
4529 false_rhs = &false_value;
4532 if (TREE_CODE (cond) == COND_EXPR)
4534 test = TREE_OPERAND (cond, 0);
4535 true_value = TREE_OPERAND (cond, 1);
4536 false_value = TREE_OPERAND (cond, 2);
4537 /* If this operand throws an expression, then it does not make
4538 sense to try to perform a logical or arithmetic operation
4539 involving it. Instead of building `a + throw 3' for example,
4540 we simply build `a, throw 3'. */
4541 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4545 lhs_code = COMPOUND_EXPR;
4546 lhs_type = void_type_node;
4551 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4555 rhs_code = COMPOUND_EXPR;
4556 rhs_type = void_type_node;
4564 tree testtype = TREE_TYPE (cond);
4566 true_value = convert (testtype, integer_one_node);
4567 false_value = convert (testtype, integer_zero_node);
4570 /* If ARG is complex we want to make sure we only evaluate it once. Though
4571 this is only required if it is volatile, it might be more efficient even
4572 if it is not. However, if we succeed in folding one part to a constant,
4573 we do not need to make this SAVE_EXPR. Since we do this optimization
4574 primarily to see if we do end up with constant and this SAVE_EXPR
4575 interferes with later optimizations, suppressing it when we can is
4578 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4579 do so. Don't try to see if the result is a constant if an arm is a
4580 COND_EXPR since we get exponential behavior in that case. */
4582 if (saved_expr_p (arg))
4584 else if (lhs == 0 && rhs == 0
4585 && !TREE_CONSTANT (arg)
4586 && (*lang_hooks.decls.global_bindings_p) () == 0
4587 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4588 || TREE_SIDE_EFFECTS (arg)))
4590 if (TREE_CODE (true_value) != COND_EXPR)
4591 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4593 if (TREE_CODE (false_value) != COND_EXPR)
4594 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4596 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4597 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4599 arg = save_expr (arg);
4606 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4608 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4610 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4613 return build (COMPOUND_EXPR, type,
4614 convert (void_type_node, arg),
4615 strip_compound_expr (test, arg));
4617 return convert (type, test);
4621 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4623 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4624 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4625 ADDEND is the same as X.
4627 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4628 and finite. The problematic cases are when X is zero, and its mode
4629 has signed zeros. In the case of rounding towards -infinity,
4630 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4631 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4634 fold_real_zero_addition_p (tree type, tree addend, int negate)
4636 if (!real_zerop (addend))
4639 /* Don't allow the fold with -fsignaling-nans. */
4640 if (HONOR_SNANS (TYPE_MODE (type)))
4643 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4644 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4647 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4648 if (TREE_CODE (addend) == REAL_CST
4649 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4652 /* The mode has signed zeros, and we have to honor their sign.
4653 In this situation, there is only one case we can return true for.
4654 X - 0 is the same as X unless rounding towards -infinity is
4656 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4659 /* Subroutine of fold() that checks comparisons of built-in math
4660 functions against real constants.
4662 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4663 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4664 is the type of the result and ARG0 and ARG1 are the operands of the
4665 comparison. ARG1 must be a TREE_REAL_CST.
4667 The function returns the constant folded tree if a simplification
4668 can be made, and NULL_TREE otherwise. */
4671 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4672 tree type, tree arg0, tree arg1)
4676 if (fcode == BUILT_IN_SQRT
4677 || fcode == BUILT_IN_SQRTF
4678 || fcode == BUILT_IN_SQRTL)
4680 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4681 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4683 c = TREE_REAL_CST (arg1);
4684 if (REAL_VALUE_NEGATIVE (c))
4686 /* sqrt(x) < y is always false, if y is negative. */
4687 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4688 return omit_one_operand (type,
4689 convert (type, integer_zero_node),
4692 /* sqrt(x) > y is always true, if y is negative and we
4693 don't care about NaNs, i.e. negative values of x. */
4694 if (code == NE_EXPR || !HONOR_NANS (mode))
4695 return omit_one_operand (type,
4696 convert (type, integer_one_node),
4699 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4700 return fold (build (GE_EXPR, type, arg,
4701 build_real (TREE_TYPE (arg), dconst0)));
4703 else if (code == GT_EXPR || code == GE_EXPR)
4707 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4708 real_convert (&c2, mode, &c2);
4710 if (REAL_VALUE_ISINF (c2))
4712 /* sqrt(x) > y is x == +Inf, when y is very large. */
4713 if (HONOR_INFINITIES (mode))
4714 return fold (build (EQ_EXPR, type, arg,
4715 build_real (TREE_TYPE (arg), c2)));
4717 /* sqrt(x) > y is always false, when y is very large
4718 and we don't care about infinities. */
4719 return omit_one_operand (type,
4720 convert (type, integer_zero_node),
4724 /* sqrt(x) > c is the same as x > c*c. */
4725 return fold (build (code, type, arg,
4726 build_real (TREE_TYPE (arg), c2)));
4728 else if (code == LT_EXPR || code == LE_EXPR)
4732 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4733 real_convert (&c2, mode, &c2);
4735 if (REAL_VALUE_ISINF (c2))
4737 /* sqrt(x) < y is always true, when y is a very large
4738 value and we don't care about NaNs or Infinities. */
4739 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4740 return omit_one_operand (type,
4741 convert (type, integer_one_node),
4744 /* sqrt(x) < y is x != +Inf when y is very large and we
4745 don't care about NaNs. */
4746 if (! HONOR_NANS (mode))
4747 return fold (build (NE_EXPR, type, arg,
4748 build_real (TREE_TYPE (arg), c2)));
4750 /* sqrt(x) < y is x >= 0 when y is very large and we
4751 don't care about Infinities. */
4752 if (! HONOR_INFINITIES (mode))
4753 return fold (build (GE_EXPR, type, arg,
4754 build_real (TREE_TYPE (arg), dconst0)));
4756 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4757 if ((*lang_hooks.decls.global_bindings_p) () != 0
4758 || CONTAINS_PLACEHOLDER_P (arg))
4761 arg = save_expr (arg);
4762 return fold (build (TRUTH_ANDIF_EXPR, type,
4763 fold (build (GE_EXPR, type, arg,
4764 build_real (TREE_TYPE (arg),
4766 fold (build (NE_EXPR, type, arg,
4767 build_real (TREE_TYPE (arg),
4771 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4772 if (! HONOR_NANS (mode))
4773 return fold (build (code, type, arg,
4774 build_real (TREE_TYPE (arg), c2)));
4776 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4777 if ((*lang_hooks.decls.global_bindings_p) () == 0
4778 && ! CONTAINS_PLACEHOLDER_P (arg))
4780 arg = save_expr (arg);
4781 return fold (build (TRUTH_ANDIF_EXPR, type,
4782 fold (build (GE_EXPR, type, arg,
4783 build_real (TREE_TYPE (arg),
4785 fold (build (code, type, arg,
4786 build_real (TREE_TYPE (arg),
4795 /* Subroutine of fold() that optimizes comparisons against Infinities,
4796 either +Inf or -Inf.
4798 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4799 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4800 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4802 The function returns the constant folded tree if a simplification
4803 can be made, and NULL_TREE otherwise. */
4806 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4808 enum machine_mode mode;
4809 REAL_VALUE_TYPE max;
4813 mode = TYPE_MODE (TREE_TYPE (arg0));
4815 /* For negative infinity swap the sense of the comparison. */
4816 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4818 code = swap_tree_comparison (code);
4823 /* x > +Inf is always false, if with ignore sNANs. */
4824 if (HONOR_SNANS (mode))
4826 return omit_one_operand (type,
4827 convert (type, integer_zero_node),
4831 /* x <= +Inf is always true, if we don't case about NaNs. */
4832 if (! HONOR_NANS (mode))
4833 return omit_one_operand (type,
4834 convert (type, integer_one_node),
4837 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
4838 if ((*lang_hooks.decls.global_bindings_p) () == 0
4839 && ! CONTAINS_PLACEHOLDER_P (arg0))
4841 arg0 = save_expr (arg0);
4842 return fold (build (EQ_EXPR, type, arg0, arg0));
4848 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
4849 real_maxval (&max, neg, mode);
4850 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
4851 arg0, build_real (TREE_TYPE (arg0), max)));
4854 /* x < +Inf is always equal to x <= DBL_MAX. */
4855 real_maxval (&max, neg, mode);
4856 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4857 arg0, build_real (TREE_TYPE (arg0), max)));
4860 /* x != +Inf is always equal to !(x > DBL_MAX). */
4861 real_maxval (&max, neg, mode);
4862 if (! HONOR_NANS (mode))
4863 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
4864 arg0, build_real (TREE_TYPE (arg0), max)));
4865 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
4866 arg0, build_real (TREE_TYPE (arg0), max)));
4867 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
4876 /* If CODE with arguments ARG0 and ARG1 represents a single bit
4877 equality/inequality test, then return a simplified form of
4878 the test using shifts and logical operations. Otherwise return
4879 NULL. TYPE is the desired result type. */
4882 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
4885 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
4887 if (code == TRUTH_NOT_EXPR)
4889 code = TREE_CODE (arg0);
4890 if (code != NE_EXPR && code != EQ_EXPR)
4893 /* Extract the arguments of the EQ/NE. */
4894 arg1 = TREE_OPERAND (arg0, 1);
4895 arg0 = TREE_OPERAND (arg0, 0);
4897 /* This requires us to invert the code. */
4898 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
4901 /* If this is testing a single bit, we can optimize the test. */
4902 if ((code == NE_EXPR || code == EQ_EXPR)
4903 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
4904 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4906 tree inner = TREE_OPERAND (arg0, 0);
4907 tree type = TREE_TYPE (arg0);
4908 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
4909 enum machine_mode operand_mode = TYPE_MODE (type);
4911 tree signed_type, unsigned_type;
4914 /* If we have (A & C) != 0 where C is the sign bit of A, convert
4915 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
4916 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
4917 if (arg00 != NULL_TREE)
4919 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
4920 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
4921 convert (stype, arg00),
4922 convert (stype, integer_zero_node)));
4925 /* At this point, we know that arg0 is not testing the sign bit. */
4926 if (TYPE_PRECISION (type) - 1 == bitnum)
4929 /* Otherwise we have (A & C) != 0 where C is a single bit,
4930 convert that into ((A >> C2) & 1). Where C2 = log2(C).
4931 Similarly for (A & C) == 0. */
4933 /* If INNER is a right shift of a constant and it plus BITNUM does
4934 not overflow, adjust BITNUM and INNER. */
4935 if (TREE_CODE (inner) == RSHIFT_EXPR
4936 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
4937 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
4938 && bitnum < TYPE_PRECISION (type)
4939 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
4940 bitnum - TYPE_PRECISION (type)))
4942 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
4943 inner = TREE_OPERAND (inner, 0);
4946 /* If we are going to be able to omit the AND below, we must do our
4947 operations as unsigned. If we must use the AND, we have a choice.
4948 Normally unsigned is faster, but for some machines signed is. */
4949 #ifdef LOAD_EXTEND_OP
4950 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
4955 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
4956 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
4959 inner = build (RSHIFT_EXPR, ops_unsigned ? unsigned_type : signed_type,
4960 inner, size_int (bitnum));
4962 if (code == EQ_EXPR)
4963 inner = build (BIT_XOR_EXPR, ops_unsigned ? unsigned_type : signed_type,
4964 inner, integer_one_node);
4966 /* Put the AND last so it can combine with more things. */
4967 inner = build (BIT_AND_EXPR, ops_unsigned ? unsigned_type : signed_type,
4968 inner, integer_one_node);
4970 /* Make sure to return the proper type. */
4971 if (TREE_TYPE (inner) != result_type)
4972 inner = convert (result_type, inner);
4979 /* Perform constant folding and related simplification of EXPR.
4980 The related simplifications include x*1 => x, x*0 => 0, etc.,
4981 and application of the associative law.
4982 NOP_EXPR conversions may be removed freely (as long as we
4983 are careful not to change the C type of the overall expression)
4984 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4985 but we can constant-fold them if they have constant operands. */
4987 #ifdef ENABLE_FOLD_CHECKING
4988 # define fold(x) fold_1 (x)
4989 static tree fold_1 (tree);
4995 tree t = expr, orig_t;
4996 tree t1 = NULL_TREE;
4998 tree type = TREE_TYPE (expr);
4999 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5000 enum tree_code code = TREE_CODE (t);
5001 int kind = TREE_CODE_CLASS (code);
5003 /* WINS will be nonzero when the switch is done
5004 if all operands are constant. */
5007 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5008 Likewise for a SAVE_EXPR that's already been evaluated. */
5009 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5012 /* Return right away if a constant. */
5016 #ifdef MAX_INTEGER_COMPUTATION_MODE
5017 check_max_integer_computation_mode (expr);
5021 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5025 /* Special case for conversion ops that can have fixed point args. */
5026 arg0 = TREE_OPERAND (t, 0);
5028 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5030 STRIP_SIGN_NOPS (arg0);
5032 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5033 subop = TREE_REALPART (arg0);
5037 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5038 && TREE_CODE (subop) != REAL_CST
5040 /* Note that TREE_CONSTANT isn't enough:
5041 static var addresses are constant but we can't
5042 do arithmetic on them. */
5045 else if (IS_EXPR_CODE_CLASS (kind))
5047 int len = first_rtl_op (code);
5049 for (i = 0; i < len; i++)
5051 tree op = TREE_OPERAND (t, i);
5055 continue; /* Valid for CALL_EXPR, at least. */
5057 if (kind == '<' || code == RSHIFT_EXPR)
5059 /* Signedness matters here. Perhaps we can refine this
5061 STRIP_SIGN_NOPS (op);
5064 /* Strip any conversions that don't change the mode. */
5067 if (TREE_CODE (op) == COMPLEX_CST)
5068 subop = TREE_REALPART (op);
5072 if (TREE_CODE (subop) != INTEGER_CST
5073 && TREE_CODE (subop) != REAL_CST)
5074 /* Note that TREE_CONSTANT isn't enough:
5075 static var addresses are constant but we can't
5076 do arithmetic on them. */
5086 /* If this is a commutative operation, and ARG0 is a constant, move it
5087 to ARG1 to reduce the number of tests below. */
5088 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5089 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5090 || code == BIT_AND_EXPR)
5091 && ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) != INTEGER_CST)
5092 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) != REAL_CST)))
5094 tem = arg0; arg0 = arg1; arg1 = tem;
5098 TREE_OPERAND (t, 0) = arg0;
5099 TREE_OPERAND (t, 1) = arg1;
5102 /* Now WINS is set as described above,
5103 ARG0 is the first operand of EXPR,
5104 and ARG1 is the second operand (if it has more than one operand).
5106 First check for cases where an arithmetic operation is applied to a
5107 compound, conditional, or comparison operation. Push the arithmetic
5108 operation inside the compound or conditional to see if any folding
5109 can then be done. Convert comparison to conditional for this purpose.
5110 The also optimizes non-constant cases that used to be done in
5113 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5114 one of the operands is a comparison and the other is a comparison, a
5115 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5116 code below would make the expression more complex. Change it to a
5117 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5118 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5120 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5121 || code == EQ_EXPR || code == NE_EXPR)
5122 && ((truth_value_p (TREE_CODE (arg0))
5123 && (truth_value_p (TREE_CODE (arg1))
5124 || (TREE_CODE (arg1) == BIT_AND_EXPR
5125 && integer_onep (TREE_OPERAND (arg1, 1)))))
5126 || (truth_value_p (TREE_CODE (arg1))
5127 && (truth_value_p (TREE_CODE (arg0))
5128 || (TREE_CODE (arg0) == BIT_AND_EXPR
5129 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5131 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5132 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5136 if (code == EQ_EXPR)
5137 t = invert_truthvalue (t);
5142 if (TREE_CODE_CLASS (code) == '1')
5144 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5145 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5146 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5147 else if (TREE_CODE (arg0) == COND_EXPR)
5149 tree arg01 = TREE_OPERAND (arg0, 1);
5150 tree arg02 = TREE_OPERAND (arg0, 2);
5151 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5152 arg01 = fold (build1 (code, type, arg01));
5153 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5154 arg02 = fold (build1 (code, type, arg02));
5155 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5158 /* If this was a conversion, and all we did was to move into
5159 inside the COND_EXPR, bring it back out. But leave it if
5160 it is a conversion from integer to integer and the
5161 result precision is no wider than a word since such a
5162 conversion is cheap and may be optimized away by combine,
5163 while it couldn't if it were outside the COND_EXPR. Then return
5164 so we don't get into an infinite recursion loop taking the
5165 conversion out and then back in. */
5167 if ((code == NOP_EXPR || code == CONVERT_EXPR
5168 || code == NON_LVALUE_EXPR)
5169 && TREE_CODE (t) == COND_EXPR
5170 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5171 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5172 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5173 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5174 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5175 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5176 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5178 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5179 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5180 t = build1 (code, type,
5182 TREE_TYPE (TREE_OPERAND
5183 (TREE_OPERAND (t, 1), 0)),
5184 TREE_OPERAND (t, 0),
5185 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5186 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5189 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5190 return fold (build (COND_EXPR, type, arg0,
5191 fold (build1 (code, type, integer_one_node)),
5192 fold (build1 (code, type, integer_zero_node))));
5194 else if (TREE_CODE_CLASS (code) == '<'
5195 && TREE_CODE (arg0) == COMPOUND_EXPR)
5196 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5197 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5198 else if (TREE_CODE_CLASS (code) == '<'
5199 && TREE_CODE (arg1) == COMPOUND_EXPR)
5200 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5201 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5202 else if (TREE_CODE_CLASS (code) == '2'
5203 || TREE_CODE_CLASS (code) == '<')
5205 if (TREE_CODE (arg1) == COMPOUND_EXPR
5206 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5207 && ! TREE_SIDE_EFFECTS (arg0))
5208 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5209 fold (build (code, type,
5210 arg0, TREE_OPERAND (arg1, 1))));
5211 else if ((TREE_CODE (arg1) == COND_EXPR
5212 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5213 && TREE_CODE_CLASS (code) != '<'))
5214 && (TREE_CODE (arg0) != COND_EXPR
5215 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5216 && (! TREE_SIDE_EFFECTS (arg0)
5217 || ((*lang_hooks.decls.global_bindings_p) () == 0
5218 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5220 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5221 /*cond_first_p=*/0);
5222 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5223 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5224 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5225 else if ((TREE_CODE (arg0) == COND_EXPR
5226 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5227 && TREE_CODE_CLASS (code) != '<'))
5228 && (TREE_CODE (arg1) != COND_EXPR
5229 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5230 && (! TREE_SIDE_EFFECTS (arg1)
5231 || ((*lang_hooks.decls.global_bindings_p) () == 0
5232 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5234 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5235 /*cond_first_p=*/1);
5249 return fold (DECL_INITIAL (t));
5254 case FIX_TRUNC_EXPR:
5255 /* Other kinds of FIX are not handled properly by fold_convert. */
5257 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5258 return TREE_OPERAND (t, 0);
5260 /* Handle cases of two conversions in a row. */
5261 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5262 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5264 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5265 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5266 tree final_type = TREE_TYPE (t);
5267 int inside_int = INTEGRAL_TYPE_P (inside_type);
5268 int inside_ptr = POINTER_TYPE_P (inside_type);
5269 int inside_float = FLOAT_TYPE_P (inside_type);
5270 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5271 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5272 int inter_int = INTEGRAL_TYPE_P (inter_type);
5273 int inter_ptr = POINTER_TYPE_P (inter_type);
5274 int inter_float = FLOAT_TYPE_P (inter_type);
5275 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5276 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5277 int final_int = INTEGRAL_TYPE_P (final_type);
5278 int final_ptr = POINTER_TYPE_P (final_type);
5279 int final_float = FLOAT_TYPE_P (final_type);
5280 unsigned int final_prec = TYPE_PRECISION (final_type);
5281 int final_unsignedp = TREE_UNSIGNED (final_type);
5283 /* In addition to the cases of two conversions in a row
5284 handled below, if we are converting something to its own
5285 type via an object of identical or wider precision, neither
5286 conversion is needed. */
5287 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5288 && ((inter_int && final_int) || (inter_float && final_float))
5289 && inter_prec >= final_prec)
5290 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5292 /* Likewise, if the intermediate and final types are either both
5293 float or both integer, we don't need the middle conversion if
5294 it is wider than the final type and doesn't change the signedness
5295 (for integers). Avoid this if the final type is a pointer
5296 since then we sometimes need the inner conversion. Likewise if
5297 the outer has a precision not equal to the size of its mode. */
5298 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5299 || (inter_float && inside_float))
5300 && inter_prec >= inside_prec
5301 && (inter_float || inter_unsignedp == inside_unsignedp)
5302 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5303 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5305 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5307 /* If we have a sign-extension of a zero-extended value, we can
5308 replace that by a single zero-extension. */
5309 if (inside_int && inter_int && final_int
5310 && inside_prec < inter_prec && inter_prec < final_prec
5311 && inside_unsignedp && !inter_unsignedp)
5312 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5314 /* Two conversions in a row are not needed unless:
5315 - some conversion is floating-point (overstrict for now), or
5316 - the intermediate type is narrower than both initial and
5318 - the intermediate type and innermost type differ in signedness,
5319 and the outermost type is wider than the intermediate, or
5320 - the initial type is a pointer type and the precisions of the
5321 intermediate and final types differ, or
5322 - the final type is a pointer type and the precisions of the
5323 initial and intermediate types differ. */
5324 if (! inside_float && ! inter_float && ! final_float
5325 && (inter_prec > inside_prec || inter_prec > final_prec)
5326 && ! (inside_int && inter_int
5327 && inter_unsignedp != inside_unsignedp
5328 && inter_prec < final_prec)
5329 && ((inter_unsignedp && inter_prec > inside_prec)
5330 == (final_unsignedp && final_prec > inter_prec))
5331 && ! (inside_ptr && inter_prec != final_prec)
5332 && ! (final_ptr && inside_prec != inter_prec)
5333 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5334 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5336 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5339 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5340 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5341 /* Detect assigning a bitfield. */
5342 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5343 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5345 /* Don't leave an assignment inside a conversion
5346 unless assigning a bitfield. */
5347 tree prev = TREE_OPERAND (t, 0);
5350 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5351 /* First do the assignment, then return converted constant. */
5352 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5357 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5358 constants (if x has signed type, the sign bit cannot be set
5359 in c). This folds extension into the BIT_AND_EXPR. */
5360 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5361 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5362 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5363 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5365 tree and = TREE_OPERAND (t, 0);
5366 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5369 if (TREE_UNSIGNED (TREE_TYPE (and))
5370 || (TYPE_PRECISION (TREE_TYPE (t))
5371 <= TYPE_PRECISION (TREE_TYPE (and))))
5373 else if (TYPE_PRECISION (TREE_TYPE (and1))
5374 <= HOST_BITS_PER_WIDE_INT
5375 && host_integerp (and1, 1))
5377 unsigned HOST_WIDE_INT cst;
5379 cst = tree_low_cst (and1, 1);
5380 cst &= (HOST_WIDE_INT) -1
5381 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5382 change = (cst == 0);
5383 #ifdef LOAD_EXTEND_OP
5385 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5388 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5389 and0 = convert (uns, and0);
5390 and1 = convert (uns, and1);
5395 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5396 convert (TREE_TYPE (t), and0),
5397 convert (TREE_TYPE (t), and1)));
5402 if (TREE_CONSTANT (t) != TREE_CONSTANT (arg0))
5406 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
5410 return fold_convert (t, arg0);
5412 case VIEW_CONVERT_EXPR:
5413 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5414 return build1 (VIEW_CONVERT_EXPR, type,
5415 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5419 if (TREE_CODE (arg0) == CONSTRUCTOR
5420 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5422 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5429 if (TREE_CONSTANT (t) != wins)
5433 TREE_CONSTANT (t) = wins;
5440 if (TREE_CODE (arg0) == INTEGER_CST)
5442 unsigned HOST_WIDE_INT low;
5444 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5445 TREE_INT_CST_HIGH (arg0),
5447 t = build_int_2 (low, high);
5448 TREE_TYPE (t) = type;
5450 = (TREE_OVERFLOW (arg0)
5451 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
5452 TREE_CONSTANT_OVERFLOW (t)
5453 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5455 else if (TREE_CODE (arg0) == REAL_CST)
5456 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5458 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5459 return TREE_OPERAND (arg0, 0);
5460 /* Convert -((double)float) into (double)(-float). */
5461 else if (TREE_CODE (arg0) == NOP_EXPR
5462 && TREE_CODE (type) == REAL_TYPE)
5464 tree targ0 = strip_float_extensions (arg0);
5466 return convert (type, build1 (NEGATE_EXPR, TREE_TYPE (targ0), targ0));
5470 /* Convert - (a - b) to (b - a) for non-floating-point. */
5471 else if (TREE_CODE (arg0) == MINUS_EXPR
5472 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5473 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
5474 TREE_OPERAND (arg0, 0));
5476 /* Convert -f(x) into f(-x) where f is sin, tan or atan. */
5477 switch (builtin_mathfn_code (arg0))
5486 case BUILT_IN_ATANF:
5487 case BUILT_IN_ATANL:
5488 if (negate_expr_p (TREE_VALUE (TREE_OPERAND (arg0, 1))))
5490 tree fndecl, arg, arglist;
5492 fndecl = get_callee_fndecl (arg0);
5493 arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5494 arg = fold (build1 (NEGATE_EXPR, type, arg));
5495 arglist = build_tree_list (NULL_TREE, arg);
5496 return build_function_call_expr (fndecl, arglist);
5508 if (TREE_CODE (arg0) == INTEGER_CST)
5510 /* If the value is unsigned, then the absolute value is
5511 the same as the ordinary value. */
5512 if (TREE_UNSIGNED (type))
5514 /* Similarly, if the value is non-negative. */
5515 else if (INT_CST_LT (integer_minus_one_node, arg0))
5517 /* If the value is negative, then the absolute value is
5521 unsigned HOST_WIDE_INT low;
5523 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5524 TREE_INT_CST_HIGH (arg0),
5526 t = build_int_2 (low, high);
5527 TREE_TYPE (t) = type;
5529 = (TREE_OVERFLOW (arg0)
5530 | force_fit_type (t, overflow));
5531 TREE_CONSTANT_OVERFLOW (t)
5532 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5535 else if (TREE_CODE (arg0) == REAL_CST)
5537 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5538 t = build_real (type,
5539 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5542 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5543 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5544 /* Convert fabs((double)float) into (double)fabsf(float). */
5545 else if (TREE_CODE (arg0) == NOP_EXPR
5546 && TREE_CODE (type) == REAL_TYPE)
5548 tree targ0 = strip_float_extensions (arg0);
5550 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5553 else if (tree_expr_nonnegative_p (arg0))
5558 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5559 return convert (type, arg0);
5560 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5561 return build (COMPLEX_EXPR, type,
5562 TREE_OPERAND (arg0, 0),
5563 negate_expr (TREE_OPERAND (arg0, 1)));
5564 else if (TREE_CODE (arg0) == COMPLEX_CST)
5565 return build_complex (type, TREE_REALPART (arg0),
5566 negate_expr (TREE_IMAGPART (arg0)));
5567 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5568 return fold (build (TREE_CODE (arg0), type,
5569 fold (build1 (CONJ_EXPR, type,
5570 TREE_OPERAND (arg0, 0))),
5571 fold (build1 (CONJ_EXPR,
5572 type, TREE_OPERAND (arg0, 1)))));
5573 else if (TREE_CODE (arg0) == CONJ_EXPR)
5574 return TREE_OPERAND (arg0, 0);
5580 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5581 ~ TREE_INT_CST_HIGH (arg0));
5582 TREE_TYPE (t) = type;
5583 force_fit_type (t, 0);
5584 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5585 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5587 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5588 return TREE_OPERAND (arg0, 0);
5592 /* A + (-B) -> A - B */
5593 if (TREE_CODE (arg1) == NEGATE_EXPR)
5594 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5595 /* (-A) + B -> B - A */
5596 if (TREE_CODE (arg0) == NEGATE_EXPR)
5597 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5598 else if (! FLOAT_TYPE_P (type))
5600 if (integer_zerop (arg1))
5601 return non_lvalue (convert (type, arg0));
5603 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5604 with a constant, and the two constants have no bits in common,
5605 we should treat this as a BIT_IOR_EXPR since this may produce more
5607 if (TREE_CODE (arg0) == BIT_AND_EXPR
5608 && TREE_CODE (arg1) == BIT_AND_EXPR
5609 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5610 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5611 && integer_zerop (const_binop (BIT_AND_EXPR,
5612 TREE_OPERAND (arg0, 1),
5613 TREE_OPERAND (arg1, 1), 0)))
5615 code = BIT_IOR_EXPR;
5619 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5620 (plus (plus (mult) (mult)) (foo)) so that we can
5621 take advantage of the factoring cases below. */
5622 if ((TREE_CODE (arg0) == PLUS_EXPR
5623 && TREE_CODE (arg1) == MULT_EXPR)
5624 || (TREE_CODE (arg1) == PLUS_EXPR
5625 && TREE_CODE (arg0) == MULT_EXPR))
5627 tree parg0, parg1, parg, marg;
5629 if (TREE_CODE (arg0) == PLUS_EXPR)
5630 parg = arg0, marg = arg1;
5632 parg = arg1, marg = arg0;
5633 parg0 = TREE_OPERAND (parg, 0);
5634 parg1 = TREE_OPERAND (parg, 1);
5638 if (TREE_CODE (parg0) == MULT_EXPR
5639 && TREE_CODE (parg1) != MULT_EXPR)
5640 return fold (build (PLUS_EXPR, type,
5641 fold (build (PLUS_EXPR, type,
5642 convert (type, parg0),
5643 convert (type, marg))),
5644 convert (type, parg1)));
5645 if (TREE_CODE (parg0) != MULT_EXPR
5646 && TREE_CODE (parg1) == MULT_EXPR)
5647 return fold (build (PLUS_EXPR, type,
5648 fold (build (PLUS_EXPR, type,
5649 convert (type, parg1),
5650 convert (type, marg))),
5651 convert (type, parg0)));
5654 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5656 tree arg00, arg01, arg10, arg11;
5657 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5659 /* (A * C) + (B * C) -> (A+B) * C.
5660 We are most concerned about the case where C is a constant,
5661 but other combinations show up during loop reduction. Since
5662 it is not difficult, try all four possibilities. */
5664 arg00 = TREE_OPERAND (arg0, 0);
5665 arg01 = TREE_OPERAND (arg0, 1);
5666 arg10 = TREE_OPERAND (arg1, 0);
5667 arg11 = TREE_OPERAND (arg1, 1);
5670 if (operand_equal_p (arg01, arg11, 0))
5671 same = arg01, alt0 = arg00, alt1 = arg10;
5672 else if (operand_equal_p (arg00, arg10, 0))
5673 same = arg00, alt0 = arg01, alt1 = arg11;
5674 else if (operand_equal_p (arg00, arg11, 0))
5675 same = arg00, alt0 = arg01, alt1 = arg10;
5676 else if (operand_equal_p (arg01, arg10, 0))
5677 same = arg01, alt0 = arg00, alt1 = arg11;
5679 /* No identical multiplicands; see if we can find a common
5680 power-of-two factor in non-power-of-two multiplies. This
5681 can help in multi-dimensional array access. */
5682 else if (TREE_CODE (arg01) == INTEGER_CST
5683 && TREE_CODE (arg11) == INTEGER_CST
5684 && TREE_INT_CST_HIGH (arg01) == 0
5685 && TREE_INT_CST_HIGH (arg11) == 0)
5687 HOST_WIDE_INT int01, int11, tmp;
5688 int01 = TREE_INT_CST_LOW (arg01);
5689 int11 = TREE_INT_CST_LOW (arg11);
5691 /* Move min of absolute values to int11. */
5692 if ((int01 >= 0 ? int01 : -int01)
5693 < (int11 >= 0 ? int11 : -int11))
5695 tmp = int01, int01 = int11, int11 = tmp;
5696 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5697 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5700 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5702 alt0 = fold (build (MULT_EXPR, type, arg00,
5703 build_int_2 (int01 / int11, 0)));
5710 return fold (build (MULT_EXPR, type,
5711 fold (build (PLUS_EXPR, type, alt0, alt1)),
5717 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5718 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5719 return non_lvalue (convert (type, arg0));
5721 /* Likewise if the operands are reversed. */
5722 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5723 return non_lvalue (convert (type, arg1));
5725 /* Convert x+x into x*2.0. */
5726 if (operand_equal_p (arg0, arg1, 0)
5727 && SCALAR_FLOAT_TYPE_P (type))
5728 return fold (build (MULT_EXPR, type, arg0,
5729 build_real (type, dconst2)));
5731 /* Convert x*c+x into x*(c+1). */
5732 if (flag_unsafe_math_optimizations
5733 && TREE_CODE (arg0) == MULT_EXPR
5734 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5735 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5736 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5740 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5741 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5742 return fold (build (MULT_EXPR, type, arg1,
5743 build_real (type, c)));
5746 /* Convert x+x*c into x*(c+1). */
5747 if (flag_unsafe_math_optimizations
5748 && TREE_CODE (arg1) == MULT_EXPR
5749 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5750 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5751 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5755 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5756 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5757 return fold (build (MULT_EXPR, type, arg0,
5758 build_real (type, c)));
5761 /* Convert x*c1+x*c2 into x*(c1+c2). */
5762 if (flag_unsafe_math_optimizations
5763 && TREE_CODE (arg0) == MULT_EXPR
5764 && TREE_CODE (arg1) == MULT_EXPR
5765 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5766 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5767 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5768 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5769 && operand_equal_p (TREE_OPERAND (arg0, 0),
5770 TREE_OPERAND (arg1, 0), 0))
5772 REAL_VALUE_TYPE c1, c2;
5774 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5775 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5776 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5777 return fold (build (MULT_EXPR, type,
5778 TREE_OPERAND (arg0, 0),
5779 build_real (type, c1)));
5784 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5785 is a rotate of A by C1 bits. */
5786 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5787 is a rotate of A by B bits. */
5789 enum tree_code code0, code1;
5790 code0 = TREE_CODE (arg0);
5791 code1 = TREE_CODE (arg1);
5792 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5793 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5794 && operand_equal_p (TREE_OPERAND (arg0, 0),
5795 TREE_OPERAND (arg1, 0), 0)
5796 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5798 tree tree01, tree11;
5799 enum tree_code code01, code11;
5801 tree01 = TREE_OPERAND (arg0, 1);
5802 tree11 = TREE_OPERAND (arg1, 1);
5803 STRIP_NOPS (tree01);
5804 STRIP_NOPS (tree11);
5805 code01 = TREE_CODE (tree01);
5806 code11 = TREE_CODE (tree11);
5807 if (code01 == INTEGER_CST
5808 && code11 == INTEGER_CST
5809 && TREE_INT_CST_HIGH (tree01) == 0
5810 && TREE_INT_CST_HIGH (tree11) == 0
5811 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5812 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5813 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5814 code0 == LSHIFT_EXPR ? tree01 : tree11);
5815 else if (code11 == MINUS_EXPR)
5817 tree tree110, tree111;
5818 tree110 = TREE_OPERAND (tree11, 0);
5819 tree111 = TREE_OPERAND (tree11, 1);
5820 STRIP_NOPS (tree110);
5821 STRIP_NOPS (tree111);
5822 if (TREE_CODE (tree110) == INTEGER_CST
5823 && 0 == compare_tree_int (tree110,
5825 (TREE_TYPE (TREE_OPERAND
5827 && operand_equal_p (tree01, tree111, 0))
5828 return build ((code0 == LSHIFT_EXPR
5831 type, TREE_OPERAND (arg0, 0), tree01);
5833 else if (code01 == MINUS_EXPR)
5835 tree tree010, tree011;
5836 tree010 = TREE_OPERAND (tree01, 0);
5837 tree011 = TREE_OPERAND (tree01, 1);
5838 STRIP_NOPS (tree010);
5839 STRIP_NOPS (tree011);
5840 if (TREE_CODE (tree010) == INTEGER_CST
5841 && 0 == compare_tree_int (tree010,
5843 (TREE_TYPE (TREE_OPERAND
5845 && operand_equal_p (tree11, tree011, 0))
5846 return build ((code0 != LSHIFT_EXPR
5849 type, TREE_OPERAND (arg0, 0), tree11);
5855 /* In most languages, can't associate operations on floats through
5856 parentheses. Rather than remember where the parentheses were, we
5857 don't associate floats at all, unless the user has specified
5858 -funsafe-math-optimizations. */
5861 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
5863 tree var0, con0, lit0, minus_lit0;
5864 tree var1, con1, lit1, minus_lit1;
5866 /* Split both trees into variables, constants, and literals. Then
5867 associate each group together, the constants with literals,
5868 then the result with variables. This increases the chances of
5869 literals being recombined later and of generating relocatable
5870 expressions for the sum of a constant and literal. */
5871 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
5872 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
5873 code == MINUS_EXPR);
5875 /* Only do something if we found more than two objects. Otherwise,
5876 nothing has changed and we risk infinite recursion. */
5877 if (2 < ((var0 != 0) + (var1 != 0)
5878 + (con0 != 0) + (con1 != 0)
5879 + (lit0 != 0) + (lit1 != 0)
5880 + (minus_lit0 != 0) + (minus_lit1 != 0)))
5882 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
5883 if (code == MINUS_EXPR)
5886 var0 = associate_trees (var0, var1, code, type);
5887 con0 = associate_trees (con0, con1, code, type);
5888 lit0 = associate_trees (lit0, lit1, code, type);
5889 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
5891 /* Preserve the MINUS_EXPR if the negative part of the literal is
5892 greater than the positive part. Otherwise, the multiplicative
5893 folding code (i.e extract_muldiv) may be fooled in case
5894 unsigned constants are subtracted, like in the following
5895 example: ((X*2 + 4) - 8U)/2. */
5896 if (minus_lit0 && lit0)
5898 if (TREE_CODE (lit0) == INTEGER_CST
5899 && TREE_CODE (minus_lit0) == INTEGER_CST
5900 && tree_int_cst_lt (lit0, minus_lit0))
5902 minus_lit0 = associate_trees (minus_lit0, lit0,
5908 lit0 = associate_trees (lit0, minus_lit0,
5916 return convert (type, associate_trees (var0, minus_lit0,
5920 con0 = associate_trees (con0, minus_lit0,
5922 return convert (type, associate_trees (var0, con0,
5927 con0 = associate_trees (con0, lit0, code, type);
5928 return convert (type, associate_trees (var0, con0, code, type));
5934 t1 = const_binop (code, arg0, arg1, 0);
5935 if (t1 != NULL_TREE)
5937 /* The return value should always have
5938 the same type as the original expression. */
5939 if (TREE_TYPE (t1) != TREE_TYPE (t))
5940 t1 = convert (TREE_TYPE (t), t1);
5947 /* A - (-B) -> A + B */
5948 if (TREE_CODE (arg1) == NEGATE_EXPR)
5949 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5950 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
5951 if (TREE_CODE (arg0) == NEGATE_EXPR
5952 && (FLOAT_TYPE_P (type)
5953 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
5954 && negate_expr_p (arg1)
5955 && (! TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
5956 && (! TREE_SIDE_EFFECTS (arg1) || TREE_CONSTANT (arg0)))
5957 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
5958 TREE_OPERAND (arg0, 0)));
5960 if (! FLOAT_TYPE_P (type))
5962 if (! wins && integer_zerop (arg0))
5963 return negate_expr (convert (type, arg1));
5964 if (integer_zerop (arg1))
5965 return non_lvalue (convert (type, arg0));
5967 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
5968 about the case where C is a constant, just try one of the
5969 four possibilities. */
5971 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
5972 && operand_equal_p (TREE_OPERAND (arg0, 1),
5973 TREE_OPERAND (arg1, 1), 0))
5974 return fold (build (MULT_EXPR, type,
5975 fold (build (MINUS_EXPR, type,
5976 TREE_OPERAND (arg0, 0),
5977 TREE_OPERAND (arg1, 0))),
5978 TREE_OPERAND (arg0, 1)));
5980 /* Fold A - (A & B) into ~B & A. */
5981 if (!TREE_SIDE_EFFECTS (arg0)
5982 && TREE_CODE (arg1) == BIT_AND_EXPR)
5984 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
5985 return fold (build (BIT_AND_EXPR, type,
5986 fold (build1 (BIT_NOT_EXPR, type,
5987 TREE_OPERAND (arg1, 0))),
5989 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
5990 return fold (build (BIT_AND_EXPR, type,
5991 fold (build1 (BIT_NOT_EXPR, type,
5992 TREE_OPERAND (arg1, 1))),
5997 /* See if ARG1 is zero and X - ARG1 reduces to X. */
5998 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
5999 return non_lvalue (convert (type, arg0));
6001 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6002 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6003 (-ARG1 + ARG0) reduces to -ARG1. */
6004 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6005 return negate_expr (convert (type, arg1));
6007 /* Fold &x - &x. This can happen from &x.foo - &x.
6008 This is unsafe for certain floats even in non-IEEE formats.
6009 In IEEE, it is unsafe because it does wrong for NaNs.
6010 Also note that operand_equal_p is always false if an operand
6013 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6014 && operand_equal_p (arg0, arg1, 0))
6015 return convert (type, integer_zero_node);
6020 /* (-A) * (-B) -> A * B */
6021 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6022 return fold (build (MULT_EXPR, type,
6023 TREE_OPERAND (arg0, 0),
6024 negate_expr (arg1)));
6025 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6026 return fold (build (MULT_EXPR, type,
6028 TREE_OPERAND (arg1, 0)));
6030 if (! FLOAT_TYPE_P (type))
6032 if (integer_zerop (arg1))
6033 return omit_one_operand (type, arg1, arg0);
6034 if (integer_onep (arg1))
6035 return non_lvalue (convert (type, arg0));
6037 /* (a * (1 << b)) is (a << b) */
6038 if (TREE_CODE (arg1) == LSHIFT_EXPR
6039 && integer_onep (TREE_OPERAND (arg1, 0)))
6040 return fold (build (LSHIFT_EXPR, type, arg0,
6041 TREE_OPERAND (arg1, 1)));
6042 if (TREE_CODE (arg0) == LSHIFT_EXPR
6043 && integer_onep (TREE_OPERAND (arg0, 0)))
6044 return fold (build (LSHIFT_EXPR, type, arg1,
6045 TREE_OPERAND (arg0, 1)));
6047 if (TREE_CODE (arg1) == INTEGER_CST
6048 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6049 convert (type, arg1),
6051 return convert (type, tem);
6056 /* Maybe fold x * 0 to 0. The expressions aren't the same
6057 when x is NaN, since x * 0 is also NaN. Nor are they the
6058 same in modes with signed zeros, since multiplying a
6059 negative value by 0 gives -0, not +0. */
6060 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6061 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6062 && real_zerop (arg1))
6063 return omit_one_operand (type, arg1, arg0);
6064 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6065 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6066 && real_onep (arg1))
6067 return non_lvalue (convert (type, arg0));
6069 /* Transform x * -1.0 into -x. */
6070 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6071 && real_minus_onep (arg1))
6072 return fold (build1 (NEGATE_EXPR, type, arg0));
6074 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6075 if (flag_unsafe_math_optimizations
6076 && TREE_CODE (arg0) == RDIV_EXPR
6077 && TREE_CODE (arg1) == REAL_CST
6078 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6080 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6083 return fold (build (RDIV_EXPR, type, tem,
6084 TREE_OPERAND (arg0, 1)));
6087 if (flag_unsafe_math_optimizations)
6089 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6090 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6092 /* Optimizations of sqrt(...)*sqrt(...). */
6093 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6094 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6095 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6097 tree sqrtfn, arg, arglist;
6098 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6099 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6101 /* Optimize sqrt(x)*sqrt(x) as x. */
6102 if (operand_equal_p (arg00, arg10, 0)
6103 && ! HONOR_SNANS (TYPE_MODE (type)))
6106 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6107 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6108 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6109 arglist = build_tree_list (NULL_TREE, arg);
6110 return build_function_call_expr (sqrtfn, arglist);
6113 /* Optimize expN(x)*expN(y) as expN(x+y). */
6114 if (fcode0 == fcode1
6115 && (fcode0 == BUILT_IN_EXP
6116 || fcode0 == BUILT_IN_EXPF
6117 || fcode0 == BUILT_IN_EXPL
6118 || fcode0 == BUILT_IN_EXP2
6119 || fcode0 == BUILT_IN_EXP2F
6120 || fcode0 == BUILT_IN_EXP2L
6121 || fcode0 == BUILT_IN_EXP10
6122 || fcode0 == BUILT_IN_EXP10F
6123 || fcode0 == BUILT_IN_EXP10L
6124 || fcode0 == BUILT_IN_POW10
6125 || fcode0 == BUILT_IN_POW10F
6126 || fcode0 == BUILT_IN_POW10L))
6128 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6129 tree arg = build (PLUS_EXPR, type,
6130 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6131 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6132 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6133 return build_function_call_expr (expfn, arglist);
6136 /* Optimizations of pow(...)*pow(...). */
6137 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6138 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6139 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6141 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6142 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6144 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6145 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6148 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6149 if (operand_equal_p (arg01, arg11, 0))
6151 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6152 tree arg = build (MULT_EXPR, type, arg00, arg10);
6153 tree arglist = tree_cons (NULL_TREE, fold (arg),
6154 build_tree_list (NULL_TREE,
6156 return build_function_call_expr (powfn, arglist);
6159 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6160 if (operand_equal_p (arg00, arg10, 0))
6162 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6163 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6164 tree arglist = tree_cons (NULL_TREE, arg00,
6165 build_tree_list (NULL_TREE,
6167 return build_function_call_expr (powfn, arglist);
6171 /* Optimize tan(x)*cos(x) as sin(x). */
6172 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6173 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6174 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6175 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6176 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6177 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6178 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6179 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6187 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6191 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6195 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6201 if (sinfn != NULL_TREE)
6202 return build_function_call_expr (sinfn,
6203 TREE_OPERAND (arg0, 1));
6206 /* Optimize x*pow(x,c) as pow(x,c+1). */
6207 if (fcode1 == BUILT_IN_POW
6208 || fcode1 == BUILT_IN_POWF
6209 || fcode1 == BUILT_IN_POWL)
6211 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6212 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6214 if (TREE_CODE (arg11) == REAL_CST
6215 && ! TREE_CONSTANT_OVERFLOW (arg11)
6216 && operand_equal_p (arg0, arg10, 0))
6218 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6222 c = TREE_REAL_CST (arg11);
6223 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6224 arg = build_real (type, c);
6225 arglist = build_tree_list (NULL_TREE, arg);
6226 arglist = tree_cons (NULL_TREE, arg0, arglist);
6227 return build_function_call_expr (powfn, arglist);
6231 /* Optimize pow(x,c)*x as pow(x,c+1). */
6232 if (fcode0 == BUILT_IN_POW
6233 || fcode0 == BUILT_IN_POWF
6234 || fcode0 == BUILT_IN_POWL)
6236 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6237 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6239 if (TREE_CODE (arg01) == REAL_CST
6240 && ! TREE_CONSTANT_OVERFLOW (arg01)
6241 && operand_equal_p (arg1, arg00, 0))
6243 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6247 c = TREE_REAL_CST (arg01);
6248 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6249 arg = build_real (type, c);
6250 arglist = build_tree_list (NULL_TREE, arg);
6251 arglist = tree_cons (NULL_TREE, arg1, arglist);
6252 return build_function_call_expr (powfn, arglist);
6256 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6258 && operand_equal_p (arg0, arg1, 0))
6262 if (type == double_type_node)
6263 powfn = implicit_built_in_decls[BUILT_IN_POW];
6264 else if (type == float_type_node)
6265 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6266 else if (type == long_double_type_node)
6267 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6273 tree arg = build_real (type, dconst2);
6274 tree arglist = build_tree_list (NULL_TREE, arg);
6275 arglist = tree_cons (NULL_TREE, arg0, arglist);
6276 return build_function_call_expr (powfn, arglist);
6285 if (integer_all_onesp (arg1))
6286 return omit_one_operand (type, arg1, arg0);
6287 if (integer_zerop (arg1))
6288 return non_lvalue (convert (type, arg0));
6289 t1 = distribute_bit_expr (code, type, arg0, arg1);
6290 if (t1 != NULL_TREE)
6293 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6295 This results in more efficient code for machines without a NAND
6296 instruction. Combine will canonicalize to the first form
6297 which will allow use of NAND instructions provided by the
6298 backend if they exist. */
6299 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6300 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6302 return fold (build1 (BIT_NOT_EXPR, type,
6303 build (BIT_AND_EXPR, type,
6304 TREE_OPERAND (arg0, 0),
6305 TREE_OPERAND (arg1, 0))));
6308 /* See if this can be simplified into a rotate first. If that
6309 is unsuccessful continue in the association code. */
6313 if (integer_zerop (arg1))
6314 return non_lvalue (convert (type, arg0));
6315 if (integer_all_onesp (arg1))
6316 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6318 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6319 with a constant, and the two constants have no bits in common,
6320 we should treat this as a BIT_IOR_EXPR since this may produce more
6322 if (TREE_CODE (arg0) == BIT_AND_EXPR
6323 && TREE_CODE (arg1) == BIT_AND_EXPR
6324 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6325 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6326 && integer_zerop (const_binop (BIT_AND_EXPR,
6327 TREE_OPERAND (arg0, 1),
6328 TREE_OPERAND (arg1, 1), 0)))
6330 code = BIT_IOR_EXPR;
6334 /* See if this can be simplified into a rotate first. If that
6335 is unsuccessful continue in the association code. */
6339 if (integer_all_onesp (arg1))
6340 return non_lvalue (convert (type, arg0));
6341 if (integer_zerop (arg1))
6342 return omit_one_operand (type, arg1, arg0);
6343 t1 = distribute_bit_expr (code, type, arg0, arg1);
6344 if (t1 != NULL_TREE)
6346 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6347 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6348 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6351 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6353 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6354 && (~TREE_INT_CST_LOW (arg1)
6355 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6356 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6359 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6361 This results in more efficient code for machines without a NOR
6362 instruction. Combine will canonicalize to the first form
6363 which will allow use of NOR instructions provided by the
6364 backend if they exist. */
6365 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6366 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6368 return fold (build1 (BIT_NOT_EXPR, type,
6369 build (BIT_IOR_EXPR, type,
6370 TREE_OPERAND (arg0, 0),
6371 TREE_OPERAND (arg1, 0))));
6377 /* Don't touch a floating-point divide by zero unless the mode
6378 of the constant can represent infinity. */
6379 if (TREE_CODE (arg1) == REAL_CST
6380 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6381 && real_zerop (arg1))
6384 /* (-A) / (-B) -> A / B */
6385 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6386 return fold (build (RDIV_EXPR, type,
6387 TREE_OPERAND (arg0, 0),
6388 negate_expr (arg1)));
6389 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6390 return fold (build (RDIV_EXPR, type,
6392 TREE_OPERAND (arg1, 0)));
6394 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6395 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6396 && real_onep (arg1))
6397 return non_lvalue (convert (type, arg0));
6399 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6400 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6401 && real_minus_onep (arg1))
6402 return non_lvalue (convert (type, negate_expr (arg0)));
6404 /* If ARG1 is a constant, we can convert this to a multiply by the
6405 reciprocal. This does not have the same rounding properties,
6406 so only do this if -funsafe-math-optimizations. We can actually
6407 always safely do it if ARG1 is a power of two, but it's hard to
6408 tell if it is or not in a portable manner. */
6409 if (TREE_CODE (arg1) == REAL_CST)
6411 if (flag_unsafe_math_optimizations
6412 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6414 return fold (build (MULT_EXPR, type, arg0, tem));
6415 /* Find the reciprocal if optimizing and the result is exact. */
6419 r = TREE_REAL_CST (arg1);
6420 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6422 tem = build_real (type, r);
6423 return fold (build (MULT_EXPR, type, arg0, tem));
6427 /* Convert A/B/C to A/(B*C). */
6428 if (flag_unsafe_math_optimizations
6429 && TREE_CODE (arg0) == RDIV_EXPR)
6430 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6431 fold (build (MULT_EXPR, type,
6432 TREE_OPERAND (arg0, 1), arg1))));
6434 /* Convert A/(B/C) to (A/B)*C. */
6435 if (flag_unsafe_math_optimizations
6436 && TREE_CODE (arg1) == RDIV_EXPR)
6437 return fold (build (MULT_EXPR, type,
6438 fold (build (RDIV_EXPR, type, arg0,
6439 TREE_OPERAND (arg1, 0))),
6440 TREE_OPERAND (arg1, 1)));
6442 /* Convert C1/(X*C2) into (C1/C2)/X. */
6443 if (flag_unsafe_math_optimizations
6444 && TREE_CODE (arg1) == MULT_EXPR
6445 && TREE_CODE (arg0) == REAL_CST
6446 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6448 tree tem = const_binop (RDIV_EXPR, arg0,
6449 TREE_OPERAND (arg1, 1), 0);
6451 return fold (build (RDIV_EXPR, type, tem,
6452 TREE_OPERAND (arg1, 0)));
6455 if (flag_unsafe_math_optimizations)
6457 enum built_in_function fcode = builtin_mathfn_code (arg1);
6458 /* Optimize x/expN(y) into x*expN(-y). */
6459 if (fcode == BUILT_IN_EXP
6460 || fcode == BUILT_IN_EXPF
6461 || fcode == BUILT_IN_EXPL
6462 || fcode == BUILT_IN_EXP2
6463 || fcode == BUILT_IN_EXP2F
6464 || fcode == BUILT_IN_EXP2L
6465 || fcode == BUILT_IN_EXP10
6466 || fcode == BUILT_IN_EXP10F
6467 || fcode == BUILT_IN_EXP10L
6468 || fcode == BUILT_IN_POW10
6469 || fcode == BUILT_IN_POW10F
6470 || fcode == BUILT_IN_POW10L)
6472 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6473 tree arg = build1 (NEGATE_EXPR, type,
6474 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6475 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6476 arg1 = build_function_call_expr (expfn, arglist);
6477 return fold (build (MULT_EXPR, type, arg0, arg1));
6480 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6481 if (fcode == BUILT_IN_POW
6482 || fcode == BUILT_IN_POWF
6483 || fcode == BUILT_IN_POWL)
6485 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6486 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6487 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6488 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6489 tree arglist = tree_cons(NULL_TREE, arg10,
6490 build_tree_list (NULL_TREE, neg11));
6491 arg1 = build_function_call_expr (powfn, arglist);
6492 return fold (build (MULT_EXPR, type, arg0, arg1));
6496 if (flag_unsafe_math_optimizations)
6498 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6499 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6501 /* Optimize sin(x)/cos(x) as tan(x). */
6502 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6503 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6504 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6505 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6506 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6510 if (fcode0 == BUILT_IN_SIN)
6511 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6512 else if (fcode0 == BUILT_IN_SINF)
6513 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6514 else if (fcode0 == BUILT_IN_SINL)
6515 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6519 if (tanfn != NULL_TREE)
6520 return build_function_call_expr (tanfn,
6521 TREE_OPERAND (arg0, 1));
6524 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6525 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6526 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6527 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6528 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6529 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6533 if (fcode0 == BUILT_IN_COS)
6534 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6535 else if (fcode0 == BUILT_IN_COSF)
6536 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6537 else if (fcode0 == BUILT_IN_COSL)
6538 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6542 if (tanfn != NULL_TREE)
6544 tree tmp = TREE_OPERAND (arg0, 1);
6545 tmp = build_function_call_expr (tanfn, tmp);
6546 return fold (build (RDIV_EXPR, type,
6547 build_real (type, dconst1),
6552 /* Optimize pow(x,c)/x as pow(x,c-1). */
6553 if (fcode0 == BUILT_IN_POW
6554 || fcode0 == BUILT_IN_POWF
6555 || fcode0 == BUILT_IN_POWL)
6557 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6558 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6559 if (TREE_CODE (arg01) == REAL_CST
6560 && ! TREE_CONSTANT_OVERFLOW (arg01)
6561 && operand_equal_p (arg1, arg00, 0))
6563 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6567 c = TREE_REAL_CST (arg01);
6568 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6569 arg = build_real (type, c);
6570 arglist = build_tree_list (NULL_TREE, arg);
6571 arglist = tree_cons (NULL_TREE, arg1, arglist);
6572 return build_function_call_expr (powfn, arglist);
6578 case TRUNC_DIV_EXPR:
6579 case ROUND_DIV_EXPR:
6580 case FLOOR_DIV_EXPR:
6582 case EXACT_DIV_EXPR:
6583 if (integer_onep (arg1))
6584 return non_lvalue (convert (type, arg0));
6585 if (integer_zerop (arg1))
6588 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6589 operation, EXACT_DIV_EXPR.
6591 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6592 At one time others generated faster code, it's not clear if they do
6593 after the last round to changes to the DIV code in expmed.c. */
6594 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6595 && multiple_of_p (type, arg0, arg1))
6596 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6598 if (TREE_CODE (arg1) == INTEGER_CST
6599 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6601 return convert (type, tem);
6606 case FLOOR_MOD_EXPR:
6607 case ROUND_MOD_EXPR:
6608 case TRUNC_MOD_EXPR:
6609 if (integer_onep (arg1))
6610 return omit_one_operand (type, integer_zero_node, arg0);
6611 if (integer_zerop (arg1))
6614 if (TREE_CODE (arg1) == INTEGER_CST
6615 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6617 return convert (type, tem);
6623 if (integer_all_onesp (arg0))
6624 return omit_one_operand (type, arg0, arg1);
6628 /* Optimize -1 >> x for arithmetic right shifts. */
6629 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6630 return omit_one_operand (type, arg0, arg1);
6631 /* ... fall through ... */
6635 if (integer_zerop (arg1))
6636 return non_lvalue (convert (type, arg0));
6637 if (integer_zerop (arg0))
6638 return omit_one_operand (type, arg0, arg1);
6640 /* Since negative shift count is not well-defined,
6641 don't try to compute it in the compiler. */
6642 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6644 /* Rewrite an LROTATE_EXPR by a constant into an
6645 RROTATE_EXPR by a new constant. */
6646 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6650 TREE_SET_CODE (t, RROTATE_EXPR);
6651 code = RROTATE_EXPR;
6652 TREE_OPERAND (t, 1) = arg1
6655 convert (TREE_TYPE (arg1),
6656 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
6658 if (tree_int_cst_sgn (arg1) < 0)
6662 /* If we have a rotate of a bit operation with the rotate count and
6663 the second operand of the bit operation both constant,
6664 permute the two operations. */
6665 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6666 && (TREE_CODE (arg0) == BIT_AND_EXPR
6667 || TREE_CODE (arg0) == BIT_IOR_EXPR
6668 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6669 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6670 return fold (build (TREE_CODE (arg0), type,
6671 fold (build (code, type,
6672 TREE_OPERAND (arg0, 0), arg1)),
6673 fold (build (code, type,
6674 TREE_OPERAND (arg0, 1), arg1))));
6676 /* Two consecutive rotates adding up to the width of the mode can
6678 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6679 && TREE_CODE (arg0) == RROTATE_EXPR
6680 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6681 && TREE_INT_CST_HIGH (arg1) == 0
6682 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6683 && ((TREE_INT_CST_LOW (arg1)
6684 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6685 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6686 return TREE_OPERAND (arg0, 0);
6691 if (operand_equal_p (arg0, arg1, 0))
6692 return omit_one_operand (type, arg0, arg1);
6693 if (INTEGRAL_TYPE_P (type)
6694 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6695 return omit_one_operand (type, arg1, arg0);
6699 if (operand_equal_p (arg0, arg1, 0))
6700 return omit_one_operand (type, arg0, arg1);
6701 if (INTEGRAL_TYPE_P (type)
6702 && TYPE_MAX_VALUE (type)
6703 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6704 return omit_one_operand (type, arg1, arg0);
6707 case TRUTH_NOT_EXPR:
6708 /* Note that the operand of this must be an int
6709 and its values must be 0 or 1.
6710 ("true" is a fixed value perhaps depending on the language,
6711 but we don't handle values other than 1 correctly yet.) */
6712 tem = invert_truthvalue (arg0);
6713 /* Avoid infinite recursion. */
6714 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6716 tem = fold_single_bit_test (code, arg0, arg1, type);
6721 return convert (type, tem);
6723 case TRUTH_ANDIF_EXPR:
6724 /* Note that the operands of this must be ints
6725 and their values must be 0 or 1.
6726 ("true" is a fixed value perhaps depending on the language.) */
6727 /* If first arg is constant zero, return it. */
6728 if (integer_zerop (arg0))
6729 return convert (type, arg0);
6730 case TRUTH_AND_EXPR:
6731 /* If either arg is constant true, drop it. */
6732 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6733 return non_lvalue (convert (type, arg1));
6734 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6735 /* Preserve sequence points. */
6736 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6737 return non_lvalue (convert (type, arg0));
6738 /* If second arg is constant zero, result is zero, but first arg
6739 must be evaluated. */
6740 if (integer_zerop (arg1))
6741 return omit_one_operand (type, arg1, arg0);
6742 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6743 case will be handled here. */
6744 if (integer_zerop (arg0))
6745 return omit_one_operand (type, arg0, arg1);
6748 /* We only do these simplifications if we are optimizing. */
6752 /* Check for things like (A || B) && (A || C). We can convert this
6753 to A || (B && C). Note that either operator can be any of the four
6754 truth and/or operations and the transformation will still be
6755 valid. Also note that we only care about order for the
6756 ANDIF and ORIF operators. If B contains side effects, this
6757 might change the truth-value of A. */
6758 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6759 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6760 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6761 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6762 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6763 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6765 tree a00 = TREE_OPERAND (arg0, 0);
6766 tree a01 = TREE_OPERAND (arg0, 1);
6767 tree a10 = TREE_OPERAND (arg1, 0);
6768 tree a11 = TREE_OPERAND (arg1, 1);
6769 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6770 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6771 && (code == TRUTH_AND_EXPR
6772 || code == TRUTH_OR_EXPR));
6774 if (operand_equal_p (a00, a10, 0))
6775 return fold (build (TREE_CODE (arg0), type, a00,
6776 fold (build (code, type, a01, a11))));
6777 else if (commutative && operand_equal_p (a00, a11, 0))
6778 return fold (build (TREE_CODE (arg0), type, a00,
6779 fold (build (code, type, a01, a10))));
6780 else if (commutative && operand_equal_p (a01, a10, 0))
6781 return fold (build (TREE_CODE (arg0), type, a01,
6782 fold (build (code, type, a00, a11))));
6784 /* This case if tricky because we must either have commutative
6785 operators or else A10 must not have side-effects. */
6787 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6788 && operand_equal_p (a01, a11, 0))
6789 return fold (build (TREE_CODE (arg0), type,
6790 fold (build (code, type, a00, a10)),
6794 /* See if we can build a range comparison. */
6795 if (0 != (tem = fold_range_test (t)))
6798 /* Check for the possibility of merging component references. If our
6799 lhs is another similar operation, try to merge its rhs with our
6800 rhs. Then try to merge our lhs and rhs. */
6801 if (TREE_CODE (arg0) == code
6802 && 0 != (tem = fold_truthop (code, type,
6803 TREE_OPERAND (arg0, 1), arg1)))
6804 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6806 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6811 case TRUTH_ORIF_EXPR:
6812 /* Note that the operands of this must be ints
6813 and their values must be 0 or true.
6814 ("true" is a fixed value perhaps depending on the language.) */
6815 /* If first arg is constant true, return it. */
6816 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6817 return convert (type, arg0);
6819 /* If either arg is constant zero, drop it. */
6820 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
6821 return non_lvalue (convert (type, arg1));
6822 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
6823 /* Preserve sequence points. */
6824 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6825 return non_lvalue (convert (type, arg0));
6826 /* If second arg is constant true, result is true, but we must
6827 evaluate first arg. */
6828 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
6829 return omit_one_operand (type, arg1, arg0);
6830 /* Likewise for first arg, but note this only occurs here for
6832 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6833 return omit_one_operand (type, arg0, arg1);
6836 case TRUTH_XOR_EXPR:
6837 /* If either arg is constant zero, drop it. */
6838 if (integer_zerop (arg0))
6839 return non_lvalue (convert (type, arg1));
6840 if (integer_zerop (arg1))
6841 return non_lvalue (convert (type, arg0));
6842 /* If either arg is constant true, this is a logical inversion. */
6843 if (integer_onep (arg0))
6844 return non_lvalue (convert (type, invert_truthvalue (arg1)));
6845 if (integer_onep (arg1))
6846 return non_lvalue (convert (type, invert_truthvalue (arg0)));
6855 /* If one arg is a real or integer constant, put it last. */
6856 if ((TREE_CODE (arg0) == INTEGER_CST
6857 && TREE_CODE (arg1) != INTEGER_CST)
6858 || (TREE_CODE (arg0) == REAL_CST
6859 && TREE_CODE (arg0) != REAL_CST))
6863 TREE_OPERAND (t, 0) = arg1;
6864 TREE_OPERAND (t, 1) = arg0;
6865 arg0 = TREE_OPERAND (t, 0);
6866 arg1 = TREE_OPERAND (t, 1);
6867 code = swap_tree_comparison (code);
6868 TREE_SET_CODE (t, code);
6871 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
6873 tree targ0 = strip_float_extensions (arg0);
6874 tree targ1 = strip_float_extensions (arg1);
6875 tree newtype = TREE_TYPE (targ0);
6877 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
6878 newtype = TREE_TYPE (targ1);
6880 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
6881 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
6882 return fold (build (code, type, convert (newtype, targ0),
6883 convert (newtype, targ1)));
6885 /* (-a) CMP (-b) -> b CMP a */
6886 if (TREE_CODE (arg0) == NEGATE_EXPR
6887 && TREE_CODE (arg1) == NEGATE_EXPR)
6888 return fold (build (code, type, TREE_OPERAND (arg1, 0),
6889 TREE_OPERAND (arg0, 0)));
6891 if (TREE_CODE (arg1) == REAL_CST)
6893 REAL_VALUE_TYPE cst;
6894 cst = TREE_REAL_CST (arg1);
6896 /* (-a) CMP CST -> a swap(CMP) (-CST) */
6897 if (TREE_CODE (arg0) == NEGATE_EXPR)
6899 fold (build (swap_tree_comparison (code), type,
6900 TREE_OPERAND (arg0, 0),
6901 build_real (TREE_TYPE (arg1),
6902 REAL_VALUE_NEGATE (cst))));
6904 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
6905 /* a CMP (-0) -> a CMP 0 */
6906 if (REAL_VALUE_MINUS_ZERO (cst))
6907 return fold (build (code, type, arg0,
6908 build_real (TREE_TYPE (arg1), dconst0)));
6910 /* x != NaN is always true, other ops are always false. */
6911 if (REAL_VALUE_ISNAN (cst)
6912 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
6914 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
6915 return omit_one_operand (type, convert (type, t), arg0);
6918 /* Fold comparisons against infinity. */
6919 if (REAL_VALUE_ISINF (cst))
6921 tem = fold_inf_compare (code, type, arg0, arg1);
6922 if (tem != NULL_TREE)
6927 /* If this is a comparison of a real constant with a PLUS_EXPR
6928 or a MINUS_EXPR of a real constant, we can convert it into a
6929 comparison with a revised real constant as long as no overflow
6930 occurs when unsafe_math_optimizations are enabled. */
6931 if (flag_unsafe_math_optimizations
6932 && TREE_CODE (arg1) == REAL_CST
6933 && (TREE_CODE (arg0) == PLUS_EXPR
6934 || TREE_CODE (arg0) == MINUS_EXPR)
6935 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6936 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
6937 ? MINUS_EXPR : PLUS_EXPR,
6938 arg1, TREE_OPERAND (arg0, 1), 0))
6939 && ! TREE_CONSTANT_OVERFLOW (tem))
6940 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6942 /* Likewise, we can simplify a comparison of a real constant with
6943 a MINUS_EXPR whose first operand is also a real constant, i.e.
6944 (c1 - x) < c2 becomes x > c1-c2. */
6945 if (flag_unsafe_math_optimizations
6946 && TREE_CODE (arg1) == REAL_CST
6947 && TREE_CODE (arg0) == MINUS_EXPR
6948 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
6949 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
6951 && ! TREE_CONSTANT_OVERFLOW (tem))
6952 return fold (build (swap_tree_comparison (code), type,
6953 TREE_OPERAND (arg0, 1), tem));
6955 /* Fold comparisons against built-in math functions. */
6956 if (TREE_CODE (arg1) == REAL_CST
6957 && flag_unsafe_math_optimizations
6958 && ! flag_errno_math)
6960 enum built_in_function fcode = builtin_mathfn_code (arg0);
6962 if (fcode != END_BUILTINS)
6964 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
6965 if (tem != NULL_TREE)
6971 /* Convert foo++ == CONST into ++foo == CONST + INCR.
6972 First, see if one arg is constant; find the constant arg
6973 and the other one. */
6975 tree constop = 0, varop = NULL_TREE;
6976 int constopnum = -1;
6978 if (TREE_CONSTANT (arg1))
6979 constopnum = 1, constop = arg1, varop = arg0;
6980 if (TREE_CONSTANT (arg0))
6981 constopnum = 0, constop = arg0, varop = arg1;
6983 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
6985 /* This optimization is invalid for ordered comparisons
6986 if CONST+INCR overflows or if foo+incr might overflow.
6987 This optimization is invalid for floating point due to rounding.
6988 For pointer types we assume overflow doesn't happen. */
6989 if (POINTER_TYPE_P (TREE_TYPE (varop))
6990 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
6991 && (code == EQ_EXPR || code == NE_EXPR)))
6994 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
6995 constop, TREE_OPERAND (varop, 1)));
6997 /* Do not overwrite the current varop to be a preincrement,
6998 create a new node so that we won't confuse our caller who
6999 might create trees and throw them away, reusing the
7000 arguments that they passed to build. This shows up in
7001 the THEN or ELSE parts of ?: being postincrements. */
7002 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7003 TREE_OPERAND (varop, 0),
7004 TREE_OPERAND (varop, 1));
7006 /* If VAROP is a reference to a bitfield, we must mask
7007 the constant by the width of the field. */
7008 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7009 && DECL_BIT_FIELD(TREE_OPERAND
7010 (TREE_OPERAND (varop, 0), 1)))
7013 = TREE_INT_CST_LOW (DECL_SIZE
7015 (TREE_OPERAND (varop, 0), 1)));
7016 tree mask, unsigned_type;
7017 unsigned int precision;
7018 tree folded_compare;
7020 /* First check whether the comparison would come out
7021 always the same. If we don't do that we would
7022 change the meaning with the masking. */
7023 if (constopnum == 0)
7024 folded_compare = fold (build (code, type, constop,
7025 TREE_OPERAND (varop, 0)));
7027 folded_compare = fold (build (code, type,
7028 TREE_OPERAND (varop, 0),
7030 if (integer_zerop (folded_compare)
7031 || integer_onep (folded_compare))
7032 return omit_one_operand (type, folded_compare, varop);
7034 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7035 precision = TYPE_PRECISION (unsigned_type);
7036 mask = build_int_2 (~0, ~0);
7037 TREE_TYPE (mask) = unsigned_type;
7038 force_fit_type (mask, 0);
7039 mask = const_binop (RSHIFT_EXPR, mask,
7040 size_int (precision - size), 0);
7041 newconst = fold (build (BIT_AND_EXPR,
7042 TREE_TYPE (varop), newconst,
7043 convert (TREE_TYPE (varop),
7047 t = build (code, type,
7048 (constopnum == 0) ? newconst : varop,
7049 (constopnum == 1) ? newconst : varop);
7053 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7055 if (POINTER_TYPE_P (TREE_TYPE (varop))
7056 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7057 && (code == EQ_EXPR || code == NE_EXPR)))
7060 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7061 constop, TREE_OPERAND (varop, 1)));
7063 /* Do not overwrite the current varop to be a predecrement,
7064 create a new node so that we won't confuse our caller who
7065 might create trees and throw them away, reusing the
7066 arguments that they passed to build. This shows up in
7067 the THEN or ELSE parts of ?: being postdecrements. */
7068 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7069 TREE_OPERAND (varop, 0),
7070 TREE_OPERAND (varop, 1));
7072 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7073 && DECL_BIT_FIELD(TREE_OPERAND
7074 (TREE_OPERAND (varop, 0), 1)))
7077 = TREE_INT_CST_LOW (DECL_SIZE
7079 (TREE_OPERAND (varop, 0), 1)));
7080 tree mask, unsigned_type;
7081 unsigned int precision;
7082 tree folded_compare;
7084 if (constopnum == 0)
7085 folded_compare = fold (build (code, type, constop,
7086 TREE_OPERAND (varop, 0)));
7088 folded_compare = fold (build (code, type,
7089 TREE_OPERAND (varop, 0),
7091 if (integer_zerop (folded_compare)
7092 || integer_onep (folded_compare))
7093 return omit_one_operand (type, folded_compare, varop);
7095 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7096 precision = TYPE_PRECISION (unsigned_type);
7097 mask = build_int_2 (~0, ~0);
7098 TREE_TYPE (mask) = TREE_TYPE (varop);
7099 force_fit_type (mask, 0);
7100 mask = const_binop (RSHIFT_EXPR, mask,
7101 size_int (precision - size), 0);
7102 newconst = fold (build (BIT_AND_EXPR,
7103 TREE_TYPE (varop), newconst,
7104 convert (TREE_TYPE (varop),
7108 t = build (code, type,
7109 (constopnum == 0) ? newconst : varop,
7110 (constopnum == 1) ? newconst : varop);
7116 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7117 This transformation affects the cases which are handled in later
7118 optimizations involving comparisons with non-negative constants. */
7119 if (TREE_CODE (arg1) == INTEGER_CST
7120 && TREE_CODE (arg0) != INTEGER_CST
7121 && tree_int_cst_sgn (arg1) > 0)
7127 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7128 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7133 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7134 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7142 /* Comparisons with the highest or lowest possible integer of
7143 the specified size will have known values. */
7145 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7147 if (TREE_CODE (arg1) == INTEGER_CST
7148 && ! TREE_CONSTANT_OVERFLOW (arg1)
7149 && width <= HOST_BITS_PER_WIDE_INT
7150 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7151 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7153 unsigned HOST_WIDE_INT signed_max;
7154 unsigned HOST_WIDE_INT max, min;
7156 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7158 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7160 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7166 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7169 if (TREE_INT_CST_HIGH (arg1) == 0
7170 && TREE_INT_CST_LOW (arg1) == max)
7174 return omit_one_operand (type,
7175 convert (type, integer_zero_node),
7181 TREE_SET_CODE (t, EQ_EXPR);
7184 return omit_one_operand (type,
7185 convert (type, integer_one_node),
7191 TREE_SET_CODE (t, NE_EXPR);
7194 /* The GE_EXPR and LT_EXPR cases above are not normally
7195 reached because of previous transformations. */
7200 else if (TREE_INT_CST_HIGH (arg1) == 0
7201 && TREE_INT_CST_LOW (arg1) == max - 1)
7206 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7207 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7211 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7212 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7217 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7218 && TREE_INT_CST_LOW (arg1) == min)
7222 return omit_one_operand (type,
7223 convert (type, integer_zero_node),
7229 TREE_SET_CODE (t, EQ_EXPR);
7233 return omit_one_operand (type,
7234 convert (type, integer_one_node),
7240 TREE_SET_CODE (t, NE_EXPR);
7246 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7247 && TREE_INT_CST_LOW (arg1) == min + 1)
7252 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7253 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7257 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7258 t = build (code, type, TREE_OPERAND (t, 0), arg1);
7264 else if (TREE_INT_CST_HIGH (arg1) == 0
7265 && TREE_INT_CST_LOW (arg1) == signed_max
7266 && TREE_UNSIGNED (TREE_TYPE (arg1))
7267 /* signed_type does not work on pointer types. */
7268 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7270 /* The following case also applies to X < signed_max+1
7271 and X >= signed_max+1 because previous transformations. */
7272 if (code == LE_EXPR || code == GT_EXPR)
7275 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7276 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7278 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7279 type, convert (st0, arg0),
7280 convert (st1, integer_zero_node)));
7286 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7287 a MINUS_EXPR of a constant, we can convert it into a comparison with
7288 a revised constant as long as no overflow occurs. */
7289 if ((code == EQ_EXPR || code == NE_EXPR)
7290 && TREE_CODE (arg1) == INTEGER_CST
7291 && (TREE_CODE (arg0) == PLUS_EXPR
7292 || TREE_CODE (arg0) == MINUS_EXPR)
7293 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7294 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7295 ? MINUS_EXPR : PLUS_EXPR,
7296 arg1, TREE_OPERAND (arg0, 1), 0))
7297 && ! TREE_CONSTANT_OVERFLOW (tem))
7298 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7300 /* Similarly for a NEGATE_EXPR. */
7301 else if ((code == EQ_EXPR || code == NE_EXPR)
7302 && TREE_CODE (arg0) == NEGATE_EXPR
7303 && TREE_CODE (arg1) == INTEGER_CST
7304 && 0 != (tem = negate_expr (arg1))
7305 && TREE_CODE (tem) == INTEGER_CST
7306 && ! TREE_CONSTANT_OVERFLOW (tem))
7307 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7309 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7310 for !=. Don't do this for ordered comparisons due to overflow. */
7311 else if ((code == NE_EXPR || code == EQ_EXPR)
7312 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7313 return fold (build (code, type,
7314 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7316 /* If we are widening one operand of an integer comparison,
7317 see if the other operand is similarly being widened. Perhaps we
7318 can do the comparison in the narrower type. */
7319 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7320 && TREE_CODE (arg0) == NOP_EXPR
7321 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7322 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7323 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7324 || (TREE_CODE (t1) == INTEGER_CST
7325 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7326 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7328 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7329 constant, we can simplify it. */
7330 else if (TREE_CODE (arg1) == INTEGER_CST
7331 && (TREE_CODE (arg0) == MIN_EXPR
7332 || TREE_CODE (arg0) == MAX_EXPR)
7333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7334 return optimize_minmax_comparison (t);
7336 /* If we are comparing an ABS_EXPR with a constant, we can
7337 convert all the cases into explicit comparisons, but they may
7338 well not be faster than doing the ABS and one comparison.
7339 But ABS (X) <= C is a range comparison, which becomes a subtraction
7340 and a comparison, and is probably faster. */
7341 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7342 && TREE_CODE (arg0) == ABS_EXPR
7343 && ! TREE_SIDE_EFFECTS (arg0)
7344 && (0 != (tem = negate_expr (arg1)))
7345 && TREE_CODE (tem) == INTEGER_CST
7346 && ! TREE_CONSTANT_OVERFLOW (tem))
7347 return fold (build (TRUTH_ANDIF_EXPR, type,
7348 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7349 build (LE_EXPR, type,
7350 TREE_OPERAND (arg0, 0), arg1)));
7352 /* If this is an EQ or NE comparison with zero and ARG0 is
7353 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7354 two operations, but the latter can be done in one less insn
7355 on machines that have only two-operand insns or on which a
7356 constant cannot be the first operand. */
7357 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7358 && TREE_CODE (arg0) == BIT_AND_EXPR)
7360 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7361 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7363 fold (build (code, type,
7364 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7366 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7367 TREE_OPERAND (arg0, 1),
7368 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7369 convert (TREE_TYPE (arg0),
7372 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7373 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7375 fold (build (code, type,
7376 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7378 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7379 TREE_OPERAND (arg0, 0),
7380 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7381 convert (TREE_TYPE (arg0),
7386 /* If this is an NE or EQ comparison of zero against the result of a
7387 signed MOD operation whose second operand is a power of 2, make
7388 the MOD operation unsigned since it is simpler and equivalent. */
7389 if ((code == NE_EXPR || code == EQ_EXPR)
7390 && integer_zerop (arg1)
7391 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7392 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7393 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7394 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7395 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7396 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7398 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7399 tree newmod = build (TREE_CODE (arg0), newtype,
7400 convert (newtype, TREE_OPERAND (arg0, 0)),
7401 convert (newtype, TREE_OPERAND (arg0, 1)));
7403 return build (code, type, newmod, convert (newtype, arg1));
7406 /* If this is an NE comparison of zero with an AND of one, remove the
7407 comparison since the AND will give the correct value. */
7408 if (code == NE_EXPR && integer_zerop (arg1)
7409 && TREE_CODE (arg0) == BIT_AND_EXPR
7410 && integer_onep (TREE_OPERAND (arg0, 1)))
7411 return convert (type, arg0);
7413 /* If we have (A & C) == C where C is a power of 2, convert this into
7414 (A & C) != 0. Similarly for NE_EXPR. */
7415 if ((code == EQ_EXPR || code == NE_EXPR)
7416 && TREE_CODE (arg0) == BIT_AND_EXPR
7417 && integer_pow2p (TREE_OPERAND (arg0, 1))
7418 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7419 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7420 arg0, integer_zero_node));
7422 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7423 2, then fold the expression into shifts and logical operations. */
7424 tem = fold_single_bit_test (code, arg0, arg1, type);
7428 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7429 Similarly for NE_EXPR. */
7430 if ((code == EQ_EXPR || code == NE_EXPR)
7431 && TREE_CODE (arg0) == BIT_AND_EXPR
7432 && TREE_CODE (arg1) == INTEGER_CST
7433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7436 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7437 arg1, build1 (BIT_NOT_EXPR,
7438 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7439 TREE_OPERAND (arg0, 1))));
7440 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7441 if (integer_nonzerop (dandnotc))
7442 return omit_one_operand (type, rslt, arg0);
7445 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7446 Similarly for NE_EXPR. */
7447 if ((code == EQ_EXPR || code == NE_EXPR)
7448 && TREE_CODE (arg0) == BIT_IOR_EXPR
7449 && TREE_CODE (arg1) == INTEGER_CST
7450 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7453 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7454 TREE_OPERAND (arg0, 1),
7455 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7456 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7457 if (integer_nonzerop (candnotd))
7458 return omit_one_operand (type, rslt, arg0);
7461 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7462 and similarly for >= into !=. */
7463 if ((code == LT_EXPR || code == GE_EXPR)
7464 && TREE_UNSIGNED (TREE_TYPE (arg0))
7465 && TREE_CODE (arg1) == LSHIFT_EXPR
7466 && integer_onep (TREE_OPERAND (arg1, 0)))
7467 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7468 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7469 TREE_OPERAND (arg1, 1)),
7470 convert (TREE_TYPE (arg0), integer_zero_node));
7472 else if ((code == LT_EXPR || code == GE_EXPR)
7473 && TREE_UNSIGNED (TREE_TYPE (arg0))
7474 && (TREE_CODE (arg1) == NOP_EXPR
7475 || TREE_CODE (arg1) == CONVERT_EXPR)
7476 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7477 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7479 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7480 convert (TREE_TYPE (arg0),
7481 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7482 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7483 convert (TREE_TYPE (arg0), integer_zero_node));
7485 /* Simplify comparison of something with itself. (For IEEE
7486 floating-point, we can only do some of these simplifications.) */
7487 if (operand_equal_p (arg0, arg1, 0))
7494 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7495 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7496 return constant_boolean_node (1, type);
7500 TREE_SET_CODE (t, code);
7504 /* For NE, we can only do this simplification if integer
7505 or we don't honor IEEE floating point NaNs. */
7506 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7507 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7509 /* ... fall through ... */
7512 return constant_boolean_node (0, type);
7518 /* If we are comparing an expression that just has comparisons
7519 of two integer values, arithmetic expressions of those comparisons,
7520 and constants, we can simplify it. There are only three cases
7521 to check: the two values can either be equal, the first can be
7522 greater, or the second can be greater. Fold the expression for
7523 those three values. Since each value must be 0 or 1, we have
7524 eight possibilities, each of which corresponds to the constant 0
7525 or 1 or one of the six possible comparisons.
7527 This handles common cases like (a > b) == 0 but also handles
7528 expressions like ((x > y) - (y > x)) > 0, which supposedly
7529 occur in macroized code. */
7531 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7533 tree cval1 = 0, cval2 = 0;
7536 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7537 /* Don't handle degenerate cases here; they should already
7538 have been handled anyway. */
7539 && cval1 != 0 && cval2 != 0
7540 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7541 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7542 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7543 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7544 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7545 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7546 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7548 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7549 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7551 /* We can't just pass T to eval_subst in case cval1 or cval2
7552 was the same as ARG1. */
7555 = fold (build (code, type,
7556 eval_subst (arg0, cval1, maxval, cval2, minval),
7559 = fold (build (code, type,
7560 eval_subst (arg0, cval1, maxval, cval2, maxval),
7563 = fold (build (code, type,
7564 eval_subst (arg0, cval1, minval, cval2, maxval),
7567 /* All three of these results should be 0 or 1. Confirm they
7568 are. Then use those values to select the proper code
7571 if ((integer_zerop (high_result)
7572 || integer_onep (high_result))
7573 && (integer_zerop (equal_result)
7574 || integer_onep (equal_result))
7575 && (integer_zerop (low_result)
7576 || integer_onep (low_result)))
7578 /* Make a 3-bit mask with the high-order bit being the
7579 value for `>', the next for '=', and the low for '<'. */
7580 switch ((integer_onep (high_result) * 4)
7581 + (integer_onep (equal_result) * 2)
7582 + integer_onep (low_result))
7586 return omit_one_operand (type, integer_zero_node, arg0);
7607 return omit_one_operand (type, integer_one_node, arg0);
7610 t = build (code, type, cval1, cval2);
7612 return save_expr (t);
7619 /* If this is a comparison of a field, we may be able to simplify it. */
7620 if (((TREE_CODE (arg0) == COMPONENT_REF
7621 && (*lang_hooks.can_use_bit_fields_p) ())
7622 || TREE_CODE (arg0) == BIT_FIELD_REF)
7623 && (code == EQ_EXPR || code == NE_EXPR)
7624 /* Handle the constant case even without -O
7625 to make sure the warnings are given. */
7626 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7628 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7632 /* If this is a comparison of complex values and either or both sides
7633 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7634 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7635 This may prevent needless evaluations. */
7636 if ((code == EQ_EXPR || code == NE_EXPR)
7637 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7638 && (TREE_CODE (arg0) == COMPLEX_EXPR
7639 || TREE_CODE (arg1) == COMPLEX_EXPR
7640 || TREE_CODE (arg0) == COMPLEX_CST
7641 || TREE_CODE (arg1) == COMPLEX_CST))
7643 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7644 tree real0, imag0, real1, imag1;
7646 arg0 = save_expr (arg0);
7647 arg1 = save_expr (arg1);
7648 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7649 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7650 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7651 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7653 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7656 fold (build (code, type, real0, real1)),
7657 fold (build (code, type, imag0, imag1))));
7660 /* Optimize comparisons of strlen vs zero to a compare of the
7661 first character of the string vs zero. To wit,
7662 strlen(ptr) == 0 => *ptr == 0
7663 strlen(ptr) != 0 => *ptr != 0
7664 Other cases should reduce to one of these two (or a constant)
7665 due to the return value of strlen being unsigned. */
7666 if ((code == EQ_EXPR || code == NE_EXPR)
7667 && integer_zerop (arg1)
7668 && TREE_CODE (arg0) == CALL_EXPR)
7670 tree fndecl = get_callee_fndecl (arg0);
7674 && DECL_BUILT_IN (fndecl)
7675 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7676 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7677 && (arglist = TREE_OPERAND (arg0, 1))
7678 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7679 && ! TREE_CHAIN (arglist))
7680 return fold (build (code, type,
7681 build1 (INDIRECT_REF, char_type_node,
7682 TREE_VALUE(arglist)),
7683 integer_zero_node));
7686 /* From here on, the only cases we handle are when the result is
7687 known to be a constant.
7689 To compute GT, swap the arguments and do LT.
7690 To compute GE, do LT and invert the result.
7691 To compute LE, swap the arguments, do LT and invert the result.
7692 To compute NE, do EQ and invert the result.
7694 Therefore, the code below must handle only EQ and LT. */
7696 if (code == LE_EXPR || code == GT_EXPR)
7698 tem = arg0, arg0 = arg1, arg1 = tem;
7699 code = swap_tree_comparison (code);
7702 /* Note that it is safe to invert for real values here because we
7703 will check below in the one case that it matters. */
7707 if (code == NE_EXPR || code == GE_EXPR)
7710 code = invert_tree_comparison (code);
7713 /* Compute a result for LT or EQ if args permit;
7714 otherwise return T. */
7715 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7717 if (code == EQ_EXPR)
7718 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7720 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7721 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7722 : INT_CST_LT (arg0, arg1)),
7726 #if 0 /* This is no longer useful, but breaks some real code. */
7727 /* Assume a nonexplicit constant cannot equal an explicit one,
7728 since such code would be undefined anyway.
7729 Exception: on sysvr4, using #pragma weak,
7730 a label can come out as 0. */
7731 else if (TREE_CODE (arg1) == INTEGER_CST
7732 && !integer_zerop (arg1)
7733 && TREE_CONSTANT (arg0)
7734 && TREE_CODE (arg0) == ADDR_EXPR
7736 t1 = build_int_2 (0, 0);
7738 /* Two real constants can be compared explicitly. */
7739 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7741 /* If either operand is a NaN, the result is false with two
7742 exceptions: First, an NE_EXPR is true on NaNs, but that case
7743 is already handled correctly since we will be inverting the
7744 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7745 or a GE_EXPR into a LT_EXPR, we must return true so that it
7746 will be inverted into false. */
7748 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7749 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7750 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7752 else if (code == EQ_EXPR)
7753 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7754 TREE_REAL_CST (arg1)),
7757 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7758 TREE_REAL_CST (arg1)),
7762 if (t1 == NULL_TREE)
7766 TREE_INT_CST_LOW (t1) ^= 1;
7768 TREE_TYPE (t1) = type;
7769 if (TREE_CODE (type) == BOOLEAN_TYPE)
7770 return (*lang_hooks.truthvalue_conversion) (t1);
7774 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7775 so all simple results must be passed through pedantic_non_lvalue. */
7776 if (TREE_CODE (arg0) == INTEGER_CST)
7777 return pedantic_non_lvalue
7778 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
7779 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7780 return pedantic_omit_one_operand (type, arg1, arg0);
7782 /* If the second operand is zero, invert the comparison and swap
7783 the second and third operands. Likewise if the second operand
7784 is constant and the third is not or if the third operand is
7785 equivalent to the first operand of the comparison. */
7787 if (integer_zerop (arg1)
7788 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
7789 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7790 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7791 TREE_OPERAND (t, 2),
7792 TREE_OPERAND (arg0, 1))))
7794 /* See if this can be inverted. If it can't, possibly because
7795 it was a floating-point inequality comparison, don't do
7797 tem = invert_truthvalue (arg0);
7799 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
7801 t = build (code, type, tem,
7802 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
7804 /* arg1 should be the first argument of the new T. */
7805 arg1 = TREE_OPERAND (t, 1);
7810 /* If we have A op B ? A : C, we may be able to convert this to a
7811 simpler expression, depending on the operation and the values
7812 of B and C. Signed zeros prevent all of these transformations,
7813 for reasons given above each one. */
7815 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7816 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7817 arg1, TREE_OPERAND (arg0, 1))
7818 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7820 tree arg2 = TREE_OPERAND (t, 2);
7821 enum tree_code comp_code = TREE_CODE (arg0);
7825 /* If we have A op 0 ? A : -A, consider applying the following
7828 A == 0? A : -A same as -A
7829 A != 0? A : -A same as A
7830 A >= 0? A : -A same as abs (A)
7831 A > 0? A : -A same as abs (A)
7832 A <= 0? A : -A same as -abs (A)
7833 A < 0? A : -A same as -abs (A)
7835 None of these transformations work for modes with signed
7836 zeros. If A is +/-0, the first two transformations will
7837 change the sign of the result (from +0 to -0, or vice
7838 versa). The last four will fix the sign of the result,
7839 even though the original expressions could be positive or
7840 negative, depending on the sign of A.
7842 Note that all these transformations are correct if A is
7843 NaN, since the two alternatives (A and -A) are also NaNs. */
7844 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7845 ? real_zerop (TREE_OPERAND (arg0, 1))
7846 : integer_zerop (TREE_OPERAND (arg0, 1)))
7847 && TREE_CODE (arg2) == NEGATE_EXPR
7848 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7856 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7859 return pedantic_non_lvalue (convert (type, arg1));
7862 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7863 arg1 = convert ((*lang_hooks.types.signed_type)
7864 (TREE_TYPE (arg1)), arg1);
7865 return pedantic_non_lvalue
7866 (convert (type, fold (build1 (ABS_EXPR,
7867 TREE_TYPE (arg1), arg1))));
7870 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7871 arg1 = convert ((lang_hooks.types.signed_type)
7872 (TREE_TYPE (arg1)), arg1);
7873 return pedantic_non_lvalue
7874 (negate_expr (convert (type,
7875 fold (build1 (ABS_EXPR,
7882 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
7883 A == 0 ? A : 0 is always 0 unless A is -0. Note that
7884 both transformations are correct when A is NaN: A != 0
7885 is then true, and A == 0 is false. */
7887 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
7889 if (comp_code == NE_EXPR)
7890 return pedantic_non_lvalue (convert (type, arg1));
7891 else if (comp_code == EQ_EXPR)
7892 return pedantic_non_lvalue (convert (type, integer_zero_node));
7895 /* Try some transformations of A op B ? A : B.
7897 A == B? A : B same as B
7898 A != B? A : B same as A
7899 A >= B? A : B same as max (A, B)
7900 A > B? A : B same as max (B, A)
7901 A <= B? A : B same as min (A, B)
7902 A < B? A : B same as min (B, A)
7904 As above, these transformations don't work in the presence
7905 of signed zeros. For example, if A and B are zeros of
7906 opposite sign, the first two transformations will change
7907 the sign of the result. In the last four, the original
7908 expressions give different results for (A=+0, B=-0) and
7909 (A=-0, B=+0), but the transformed expressions do not.
7911 The first two transformations are correct if either A or B
7912 is a NaN. In the first transformation, the condition will
7913 be false, and B will indeed be chosen. In the case of the
7914 second transformation, the condition A != B will be true,
7915 and A will be chosen.
7917 The conversions to max() and min() are not correct if B is
7918 a number and A is not. The conditions in the original
7919 expressions will be false, so all four give B. The min()
7920 and max() versions would give a NaN instead. */
7921 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
7922 arg2, TREE_OPERAND (arg0, 0)))
7924 tree comp_op0 = TREE_OPERAND (arg0, 0);
7925 tree comp_op1 = TREE_OPERAND (arg0, 1);
7926 tree comp_type = TREE_TYPE (comp_op0);
7928 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
7929 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
7939 return pedantic_non_lvalue (convert (type, arg2));
7941 return pedantic_non_lvalue (convert (type, arg1));
7944 /* In C++ a ?: expression can be an lvalue, so put the
7945 operand which will be used if they are equal first
7946 so that we can convert this back to the
7947 corresponding COND_EXPR. */
7948 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7949 return pedantic_non_lvalue
7950 (convert (type, fold (build (MIN_EXPR, comp_type,
7951 (comp_code == LE_EXPR
7952 ? comp_op0 : comp_op1),
7953 (comp_code == LE_EXPR
7954 ? comp_op1 : comp_op0)))));
7958 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
7959 return pedantic_non_lvalue
7960 (convert (type, fold (build (MAX_EXPR, comp_type,
7961 (comp_code == GE_EXPR
7962 ? comp_op0 : comp_op1),
7963 (comp_code == GE_EXPR
7964 ? comp_op1 : comp_op0)))));
7971 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
7972 we might still be able to simplify this. For example,
7973 if C1 is one less or one more than C2, this might have started
7974 out as a MIN or MAX and been transformed by this function.
7975 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
7977 if (INTEGRAL_TYPE_P (type)
7978 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7979 && TREE_CODE (arg2) == INTEGER_CST)
7983 /* We can replace A with C1 in this case. */
7984 arg1 = convert (type, TREE_OPERAND (arg0, 1));
7985 t = build (code, type, TREE_OPERAND (t, 0), arg1,
7986 TREE_OPERAND (t, 2));
7990 /* If C1 is C2 + 1, this is min(A, C2). */
7991 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
7992 && operand_equal_p (TREE_OPERAND (arg0, 1),
7993 const_binop (PLUS_EXPR, arg2,
7994 integer_one_node, 0), 1))
7995 return pedantic_non_lvalue
7996 (fold (build (MIN_EXPR, type, arg1, arg2)));
8000 /* If C1 is C2 - 1, this is min(A, C2). */
8001 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8002 && operand_equal_p (TREE_OPERAND (arg0, 1),
8003 const_binop (MINUS_EXPR, arg2,
8004 integer_one_node, 0), 1))
8005 return pedantic_non_lvalue
8006 (fold (build (MIN_EXPR, type, arg1, arg2)));
8010 /* If C1 is C2 - 1, this is max(A, C2). */
8011 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8012 && operand_equal_p (TREE_OPERAND (arg0, 1),
8013 const_binop (MINUS_EXPR, arg2,
8014 integer_one_node, 0), 1))
8015 return pedantic_non_lvalue
8016 (fold (build (MAX_EXPR, type, arg1, arg2)));
8020 /* If C1 is C2 + 1, this is max(A, C2). */
8021 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8022 && operand_equal_p (TREE_OPERAND (arg0, 1),
8023 const_binop (PLUS_EXPR, arg2,
8024 integer_one_node, 0), 1))
8025 return pedantic_non_lvalue
8026 (fold (build (MAX_EXPR, type, arg1, arg2)));
8035 /* If the second operand is simpler than the third, swap them
8036 since that produces better jump optimization results. */
8037 if ((TREE_CONSTANT (arg1) || DECL_P (arg1)
8038 || TREE_CODE (arg1) == SAVE_EXPR)
8039 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
8040 || DECL_P (TREE_OPERAND (t, 2))
8041 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
8043 /* See if this can be inverted. If it can't, possibly because
8044 it was a floating-point inequality comparison, don't do
8046 tem = invert_truthvalue (arg0);
8048 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8050 t = build (code, type, tem,
8051 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
8053 /* arg1 should be the first argument of the new T. */
8054 arg1 = TREE_OPERAND (t, 1);
8059 /* Convert A ? 1 : 0 to simply A. */
8060 if (integer_onep (TREE_OPERAND (t, 1))
8061 && integer_zerop (TREE_OPERAND (t, 2))
8062 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8063 call to fold will try to move the conversion inside
8064 a COND, which will recurse. In that case, the COND_EXPR
8065 is probably the best choice, so leave it alone. */
8066 && type == TREE_TYPE (arg0))
8067 return pedantic_non_lvalue (arg0);
8069 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8070 over COND_EXPR in cases such as floating point comparisons. */
8071 if (integer_zerop (TREE_OPERAND (t, 1))
8072 && integer_onep (TREE_OPERAND (t, 2))
8073 && truth_value_p (TREE_CODE (arg0)))
8074 return pedantic_non_lvalue (convert (type,
8075 invert_truthvalue (arg0)));
8077 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8078 operation is simply A & 2. */
8080 if (integer_zerop (TREE_OPERAND (t, 2))
8081 && TREE_CODE (arg0) == NE_EXPR
8082 && integer_zerop (TREE_OPERAND (arg0, 1))
8083 && integer_pow2p (arg1)
8084 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8085 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8087 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8089 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8090 if (integer_zerop (TREE_OPERAND (t, 2))
8091 && truth_value_p (TREE_CODE (arg0))
8092 && truth_value_p (TREE_CODE (arg1)))
8093 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8096 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8097 if (integer_onep (TREE_OPERAND (t, 2))
8098 && truth_value_p (TREE_CODE (arg0))
8099 && truth_value_p (TREE_CODE (arg1)))
8101 /* Only perform transformation if ARG0 is easily inverted. */
8102 tem = invert_truthvalue (arg0);
8103 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8104 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8111 /* When pedantic, a compound expression can be neither an lvalue
8112 nor an integer constant expression. */
8113 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8115 /* Don't let (0, 0) be null pointer constant. */
8116 if (integer_zerop (arg1))
8117 return build1 (NOP_EXPR, type, arg1);
8118 return convert (type, arg1);
8122 return build_complex (type, arg0, arg1);
8126 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8128 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8129 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8130 TREE_OPERAND (arg0, 1));
8131 else if (TREE_CODE (arg0) == COMPLEX_CST)
8132 return TREE_REALPART (arg0);
8133 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8134 return fold (build (TREE_CODE (arg0), type,
8135 fold (build1 (REALPART_EXPR, type,
8136 TREE_OPERAND (arg0, 0))),
8137 fold (build1 (REALPART_EXPR,
8138 type, TREE_OPERAND (arg0, 1)))));
8142 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8143 return convert (type, integer_zero_node);
8144 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8145 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8146 TREE_OPERAND (arg0, 0));
8147 else if (TREE_CODE (arg0) == COMPLEX_CST)
8148 return TREE_IMAGPART (arg0);
8149 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8150 return fold (build (TREE_CODE (arg0), type,
8151 fold (build1 (IMAGPART_EXPR, type,
8152 TREE_OPERAND (arg0, 0))),
8153 fold (build1 (IMAGPART_EXPR, type,
8154 TREE_OPERAND (arg0, 1)))));
8157 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8159 case CLEANUP_POINT_EXPR:
8160 if (! has_cleanups (arg0))
8161 return TREE_OPERAND (t, 0);
8164 enum tree_code code0 = TREE_CODE (arg0);
8165 int kind0 = TREE_CODE_CLASS (code0);
8166 tree arg00 = TREE_OPERAND (arg0, 0);
8169 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8170 return fold (build1 (code0, type,
8171 fold (build1 (CLEANUP_POINT_EXPR,
8172 TREE_TYPE (arg00), arg00))));
8174 if (kind0 == '<' || kind0 == '2'
8175 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8176 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8177 || code0 == TRUTH_XOR_EXPR)
8179 arg01 = TREE_OPERAND (arg0, 1);
8181 if (TREE_CONSTANT (arg00)
8182 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8183 && ! has_cleanups (arg00)))
8184 return fold (build (code0, type, arg00,
8185 fold (build1 (CLEANUP_POINT_EXPR,
8186 TREE_TYPE (arg01), arg01))));
8188 if (TREE_CONSTANT (arg01))
8189 return fold (build (code0, type,
8190 fold (build1 (CLEANUP_POINT_EXPR,
8191 TREE_TYPE (arg00), arg00)),
8199 /* Check for a built-in function. */
8200 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8201 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8203 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8205 tree tmp = fold_builtin (expr);
8213 } /* switch (code) */
8216 #ifdef ENABLE_FOLD_CHECKING
8219 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8220 static void fold_check_failed (tree, tree);
8221 void print_fold_checksum (tree);
8223 /* When --enable-checking=fold, compute a digest of expr before
8224 and after actual fold call to see if fold did not accidentally
8225 change original expr. */
8232 unsigned char checksum_before[16], checksum_after[16];
8235 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8236 md5_init_ctx (&ctx);
8237 fold_checksum_tree (expr, &ctx, ht);
8238 md5_finish_ctx (&ctx, checksum_before);
8241 ret = fold_1 (expr);
8243 md5_init_ctx (&ctx);
8244 fold_checksum_tree (expr, &ctx, ht);
8245 md5_finish_ctx (&ctx, checksum_after);
8248 if (memcmp (checksum_before, checksum_after, 16))
8249 fold_check_failed (expr, ret);
8255 print_fold_checksum (tree expr)
8258 unsigned char checksum[16], cnt;
8261 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8262 md5_init_ctx (&ctx);
8263 fold_checksum_tree (expr, &ctx, ht);
8264 md5_finish_ctx (&ctx, checksum);
8266 for (cnt = 0; cnt < 16; ++cnt)
8267 fprintf (stderr, "%02x", checksum[cnt]);
8268 putc ('\n', stderr);
8272 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8274 internal_error ("fold check: original tree changed by fold");
8278 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8281 enum tree_code code;
8282 char buf[sizeof (struct tree_decl)];
8285 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8286 > sizeof (struct tree_decl)
8287 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8291 slot = htab_find_slot (ht, expr, INSERT);
8295 code = TREE_CODE (expr);
8296 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8298 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8299 memcpy (buf, expr, tree_size (expr));
8301 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8303 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8305 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8306 memcpy (buf, expr, tree_size (expr));
8308 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8310 else if (TREE_CODE_CLASS (code) == 't'
8311 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8313 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8314 memcpy (buf, expr, tree_size (expr));
8316 TYPE_POINTER_TO (expr) = NULL;
8317 TYPE_REFERENCE_TO (expr) = NULL;
8319 md5_process_bytes (expr, tree_size (expr), ctx);
8320 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8321 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8322 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8323 len = TREE_CODE_LENGTH (code);
8324 switch (TREE_CODE_CLASS (code))
8330 md5_process_bytes (TREE_STRING_POINTER (expr),
8331 TREE_STRING_LENGTH (expr), ctx);
8334 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8335 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8338 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8348 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8349 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8352 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8353 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8362 case SAVE_EXPR: len = 2; break;
8363 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8364 case RTL_EXPR: len = 0; break;
8365 case WITH_CLEANUP_EXPR: len = 2; break;
8374 for (i = 0; i < len; ++i)
8375 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8378 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8379 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8380 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8381 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8382 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8383 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8384 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8385 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8386 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8387 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8388 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8391 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8392 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8393 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8394 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8395 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8396 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8397 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8398 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8399 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8400 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8409 /* Perform constant folding and related simplification of initializer
8410 expression EXPR. This behaves identically to "fold" but ignores
8411 potential run-time traps and exceptions that fold must preserve. */
8414 fold_initializer (tree expr)
8416 int saved_signaling_nans = flag_signaling_nans;
8417 int saved_trapping_math = flag_trapping_math;
8418 int saved_trapv = flag_trapv;
8421 flag_signaling_nans = 0;
8422 flag_trapping_math = 0;
8425 result = fold (expr);
8427 flag_signaling_nans = saved_signaling_nans;
8428 flag_trapping_math = saved_trapping_math;
8429 flag_trapv = saved_trapv;
8434 /* Determine if first argument is a multiple of second argument. Return 0 if
8435 it is not, or we cannot easily determined it to be.
8437 An example of the sort of thing we care about (at this point; this routine
8438 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8439 fold cases do now) is discovering that
8441 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8447 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8449 This code also handles discovering that
8451 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8453 is a multiple of 8 so we don't have to worry about dealing with a
8456 Note that we *look* inside a SAVE_EXPR only to determine how it was
8457 calculated; it is not safe for fold to do much of anything else with the
8458 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8459 at run time. For example, the latter example above *cannot* be implemented
8460 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8461 evaluation time of the original SAVE_EXPR is not necessarily the same at
8462 the time the new expression is evaluated. The only optimization of this
8463 sort that would be valid is changing
8465 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8469 SAVE_EXPR (I) * SAVE_EXPR (J)
8471 (where the same SAVE_EXPR (J) is used in the original and the
8472 transformed version). */
8475 multiple_of_p (tree type, tree top, tree bottom)
8477 if (operand_equal_p (top, bottom, 0))
8480 if (TREE_CODE (type) != INTEGER_TYPE)
8483 switch (TREE_CODE (top))
8486 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8487 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8491 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8492 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8495 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8499 op1 = TREE_OPERAND (top, 1);
8500 /* const_binop may not detect overflow correctly,
8501 so check for it explicitly here. */
8502 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8503 > TREE_INT_CST_LOW (op1)
8504 && TREE_INT_CST_HIGH (op1) == 0
8505 && 0 != (t1 = convert (type,
8506 const_binop (LSHIFT_EXPR, size_one_node,
8508 && ! TREE_OVERFLOW (t1))
8509 return multiple_of_p (type, t1, bottom);
8514 /* Can't handle conversions from non-integral or wider integral type. */
8515 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8516 || (TYPE_PRECISION (type)
8517 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8520 /* .. fall through ... */
8523 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8526 if (TREE_CODE (bottom) != INTEGER_CST
8527 || (TREE_UNSIGNED (type)
8528 && (tree_int_cst_sgn (top) < 0
8529 || tree_int_cst_sgn (bottom) < 0)))
8531 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8539 /* Return true if `t' is known to be non-negative. */
8542 tree_expr_nonnegative_p (tree t)
8544 switch (TREE_CODE (t))
8554 /* These are undefined at zero. This is true even if
8555 C[LT]Z_DEFINED_VALUE_AT_ZERO is set, since what we're
8556 computing here is a user-visible property. */
8560 return tree_int_cst_sgn (t) >= 0;
8563 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8566 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8567 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8568 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8570 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8571 both unsigned and at least 2 bits shorter than the result. */
8572 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8573 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8574 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8576 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8577 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8578 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8579 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8581 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8582 TYPE_PRECISION (inner2)) + 1;
8583 return prec < TYPE_PRECISION (TREE_TYPE (t));
8589 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8591 /* x * x for floating point x is always non-negative. */
8592 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8594 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8595 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8598 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8599 both unsigned and their total bits is shorter than the result. */
8600 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8601 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8602 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8604 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8605 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8606 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8607 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8608 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8609 < TYPE_PRECISION (TREE_TYPE (t));
8613 case TRUNC_DIV_EXPR:
8615 case FLOOR_DIV_EXPR:
8616 case ROUND_DIV_EXPR:
8617 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8618 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8620 case TRUNC_MOD_EXPR:
8622 case FLOOR_MOD_EXPR:
8623 case ROUND_MOD_EXPR:
8624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8627 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8628 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8632 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8633 tree outer_type = TREE_TYPE (t);
8635 if (TREE_CODE (outer_type) == REAL_TYPE)
8637 if (TREE_CODE (inner_type) == REAL_TYPE)
8638 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8639 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8641 if (TREE_UNSIGNED (inner_type))
8643 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8646 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8648 if (TREE_CODE (inner_type) == REAL_TYPE)
8649 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8650 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8651 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8652 && TREE_UNSIGNED (inner_type);
8658 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8659 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8661 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8663 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8664 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8666 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8667 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8669 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8671 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8673 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8674 case NON_LVALUE_EXPR:
8675 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8677 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8679 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8683 tree fndecl = get_callee_fndecl (t);
8684 tree arglist = TREE_OPERAND (t, 1);
8686 && DECL_BUILT_IN (fndecl)
8687 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8688 switch (DECL_FUNCTION_CODE (fndecl))
8691 case BUILT_IN_CABSL:
8692 case BUILT_IN_CABSF:
8697 case BUILT_IN_EXP2F:
8698 case BUILT_IN_EXP2L:
8699 case BUILT_IN_EXP10:
8700 case BUILT_IN_EXP10F:
8701 case BUILT_IN_EXP10L:
8702 case BUILT_IN_POW10:
8703 case BUILT_IN_POW10F:
8704 case BUILT_IN_POW10L:
8706 case BUILT_IN_FABSF:
8707 case BUILT_IN_FABSL:
8709 case BUILT_IN_SQRTF:
8710 case BUILT_IN_SQRTL:
8714 case BUILT_IN_ATANF:
8715 case BUILT_IN_ATANL:
8717 case BUILT_IN_CEILF:
8718 case BUILT_IN_CEILL:
8719 case BUILT_IN_FLOOR:
8720 case BUILT_IN_FLOORF:
8721 case BUILT_IN_FLOORL:
8722 case BUILT_IN_NEARBYINT:
8723 case BUILT_IN_NEARBYINTF:
8724 case BUILT_IN_NEARBYINTL:
8725 case BUILT_IN_ROUND:
8726 case BUILT_IN_ROUNDF:
8727 case BUILT_IN_ROUNDL:
8728 case BUILT_IN_TRUNC:
8729 case BUILT_IN_TRUNCF:
8730 case BUILT_IN_TRUNCL:
8731 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8736 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8743 /* ... fall through ... */
8746 if (truth_value_p (TREE_CODE (t)))
8747 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8751 /* We don't know sign of `t', so be conservative and return false. */
8755 /* Return true if `r' is known to be non-negative.
8756 Only handles constants at the moment. */
8759 rtl_expr_nonnegative_p (rtx r)
8761 switch (GET_CODE (r))
8764 return INTVAL (r) >= 0;
8767 if (GET_MODE (r) == VOIDmode)
8768 return CONST_DOUBLE_HIGH (r) >= 0;
8776 units = CONST_VECTOR_NUNITS (r);
8778 for (i = 0; i < units; ++i)
8780 elt = CONST_VECTOR_ELT (r, i);
8781 if (!rtl_expr_nonnegative_p (elt))
8790 /* These are always nonnegative. */
8798 #include "gt-fold-const.h"