1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
47 #include "coretypes.h"
58 #include "langhooks.h"
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static tree fold_convert (tree, tree);
74 static enum tree_code invert_tree_comparison (enum tree_code);
75 static enum tree_code swap_tree_comparison (enum tree_code);
76 static int comparison_to_compcode (enum tree_code);
77 static enum tree_code compcode_to_comparison (int);
78 static int truth_value_p (enum tree_code);
79 static int operand_equal_for_comparison_p (tree, tree, tree);
80 static int twoval_comparison_p (tree, tree *, tree *, int *);
81 static tree eval_subst (tree, tree, tree, tree, tree);
82 static tree pedantic_omit_one_operand (tree, tree, tree);
83 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84 static tree make_bit_field_ref (tree, tree, int, int, int);
85 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87 enum machine_mode *, int *, int *,
89 static int all_ones_mask_p (tree, int);
90 static tree sign_bit_p (tree, tree);
91 static int simple_operand_p (tree);
92 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93 static tree make_range (tree, int *, tree *, tree *);
94 static tree build_range_check (tree, tree, int, tree, tree);
95 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
97 static tree fold_range_test (tree);
98 static tree unextend (tree, int, int, tree);
99 static tree fold_truthop (enum tree_code, tree, tree, tree);
100 static tree optimize_minmax_comparison (tree);
101 static tree extract_muldiv (tree, tree, enum tree_code, tree);
102 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103 static tree strip_compound_expr (tree, tree);
104 static int multiple_of_p (tree, tree, tree);
105 static tree constant_boolean_node (int, tree);
106 static int count_cond (tree, int);
107 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
109 static bool fold_real_zero_addition_p (tree, tree, int);
110 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
112 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113 static bool reorder_operands_p (tree, tree);
114 static bool tree_swap_operands_p (tree, tree, bool);
116 static tree fold_negate_const (tree, tree);
118 /* The following constants represent a bit based encoding of GCC's
119 comparison operators. This encoding simplifies transformations
120 on relational comparison operators, such as AND and OR. */
121 #define COMPCODE_FALSE 0
122 #define COMPCODE_LT 1
123 #define COMPCODE_EQ 2
124 #define COMPCODE_LE 3
125 #define COMPCODE_GT 4
126 #define COMPCODE_NE 5
127 #define COMPCODE_GE 6
128 #define COMPCODE_TRUE 7
130 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
131 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
132 and SUM1. Then this yields nonzero if overflow occurred during the
135 Overflow occurs if A and B have the same sign, but A and SUM differ in
136 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
138 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
140 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
141 We do that by representing the two-word integer in 4 words, with only
142 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
143 number. The value of the word is LOWPART + HIGHPART * BASE. */
146 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
147 #define HIGHPART(x) \
148 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
149 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
151 /* Unpack a two-word integer into 4 words.
152 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
153 WORDS points to the array of HOST_WIDE_INTs. */
156 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
158 words[0] = LOWPART (low);
159 words[1] = HIGHPART (low);
160 words[2] = LOWPART (hi);
161 words[3] = HIGHPART (hi);
164 /* Pack an array of 4 words into a two-word integer.
165 WORDS points to the array of words.
166 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
169 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
172 *low = words[0] + words[1] * BASE;
173 *hi = words[2] + words[3] * BASE;
176 /* Make the integer constant T valid for its type by setting to 0 or 1 all
177 the bits in the constant that don't belong in the type.
179 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
180 nonzero, a signed overflow has already occurred in calculating T, so
184 force_fit_type (tree t, int overflow)
186 unsigned HOST_WIDE_INT low;
190 if (TREE_CODE (t) == REAL_CST)
192 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
193 Consider doing it via real_convert now. */
197 else if (TREE_CODE (t) != INTEGER_CST)
200 low = TREE_INT_CST_LOW (t);
201 high = TREE_INT_CST_HIGH (t);
203 if (POINTER_TYPE_P (TREE_TYPE (t))
204 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
207 prec = TYPE_PRECISION (TREE_TYPE (t));
209 /* First clear all bits that are beyond the type's precision. */
211 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
213 else if (prec > HOST_BITS_PER_WIDE_INT)
214 TREE_INT_CST_HIGH (t)
215 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
218 TREE_INT_CST_HIGH (t) = 0;
219 if (prec < HOST_BITS_PER_WIDE_INT)
220 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
223 /* Unsigned types do not suffer sign extension or overflow unless they
225 if (TREE_UNSIGNED (TREE_TYPE (t))
226 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
227 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
230 /* If the value's sign bit is set, extend the sign. */
231 if (prec != 2 * HOST_BITS_PER_WIDE_INT
232 && (prec > HOST_BITS_PER_WIDE_INT
233 ? 0 != (TREE_INT_CST_HIGH (t)
235 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
236 : 0 != (TREE_INT_CST_LOW (t)
237 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
239 /* Value is negative:
240 set to 1 all the bits that are outside this type's precision. */
241 if (prec > HOST_BITS_PER_WIDE_INT)
242 TREE_INT_CST_HIGH (t)
243 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
246 TREE_INT_CST_HIGH (t) = -1;
247 if (prec < HOST_BITS_PER_WIDE_INT)
248 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
252 /* Return nonzero if signed overflow occurred. */
254 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
258 /* Add two doubleword integers with doubleword result.
259 Each argument is given as two `HOST_WIDE_INT' pieces.
260 One argument is L1 and H1; the other, L2 and H2.
261 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
264 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
265 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
266 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
268 unsigned HOST_WIDE_INT l;
272 h = h1 + h2 + (l < l1);
276 return OVERFLOW_SUM_SIGN (h1, h2, h);
279 /* Negate a doubleword integer with doubleword result.
280 Return nonzero if the operation overflows, assuming it's signed.
281 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
282 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
285 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
286 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
292 return (*hv & h1) < 0;
302 /* Multiply two doubleword integers with doubleword result.
303 Return nonzero if the operation overflows, assuming it's signed.
304 Each argument is given as two `HOST_WIDE_INT' pieces.
305 One argument is L1 and H1; the other, L2 and H2.
306 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
310 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
311 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313 HOST_WIDE_INT arg1[4];
314 HOST_WIDE_INT arg2[4];
315 HOST_WIDE_INT prod[4 * 2];
316 unsigned HOST_WIDE_INT carry;
318 unsigned HOST_WIDE_INT toplow, neglow;
319 HOST_WIDE_INT tophigh, neghigh;
321 encode (arg1, l1, h1);
322 encode (arg2, l2, h2);
324 memset (prod, 0, sizeof prod);
326 for (i = 0; i < 4; i++)
329 for (j = 0; j < 4; j++)
332 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
333 carry += arg1[i] * arg2[j];
334 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
336 prod[k] = LOWPART (carry);
337 carry = HIGHPART (carry);
342 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
344 /* Check for overflow by calculating the top half of the answer in full;
345 it should agree with the low half's sign bit. */
346 decode (prod + 4, &toplow, &tophigh);
349 neg_double (l2, h2, &neglow, &neghigh);
350 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354 neg_double (l1, h1, &neglow, &neghigh);
355 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
357 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
360 /* Shift the doubleword integer in L1, H1 left by COUNT places
361 keeping only PREC bits of result.
362 Shift right if COUNT is negative.
363 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
364 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
367 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
368 HOST_WIDE_INT count, unsigned int prec,
369 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
371 unsigned HOST_WIDE_INT signmask;
375 rshift_double (l1, h1, -count, prec, lv, hv, arith);
379 if (SHIFT_COUNT_TRUNCATED)
382 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
384 /* Shifting by the host word size is undefined according to the
385 ANSI standard, so we must handle this as a special case. */
389 else if (count >= HOST_BITS_PER_WIDE_INT)
391 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
396 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
401 /* Sign extend all bits that are beyond the precision. */
403 signmask = -((prec > HOST_BITS_PER_WIDE_INT
404 ? ((unsigned HOST_WIDE_INT) *hv
405 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406 : (*lv >> (prec - 1))) & 1);
408 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
410 else if (prec >= HOST_BITS_PER_WIDE_INT)
412 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
418 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419 *lv |= signmask << prec;
423 /* Shift the doubleword integer in L1, H1 right by COUNT places
424 keeping only PREC bits of result. COUNT must be positive.
425 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
429 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430 HOST_WIDE_INT count, unsigned int prec,
431 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
434 unsigned HOST_WIDE_INT signmask;
437 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
440 if (SHIFT_COUNT_TRUNCATED)
443 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
445 /* Shifting by the host word size is undefined according to the
446 ANSI standard, so we must handle this as a special case. */
450 else if (count >= HOST_BITS_PER_WIDE_INT)
453 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
457 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
459 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
462 /* Zero / sign extend all bits that are beyond the precision. */
464 if (count >= (HOST_WIDE_INT)prec)
469 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
471 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
473 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
474 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
479 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
480 *lv |= signmask << (prec - count);
484 /* Rotate the doubleword integer in L1, H1 left by COUNT places
485 keeping only PREC bits of result.
486 Rotate right if COUNT is negative.
487 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
490 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
491 HOST_WIDE_INT count, unsigned int prec,
492 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
494 unsigned HOST_WIDE_INT s1l, s2l;
495 HOST_WIDE_INT s1h, s2h;
501 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
502 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
507 /* Rotate the doubleword integer in L1, H1 left by COUNT places
508 keeping only PREC bits of result. COUNT must be positive.
509 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
512 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
513 HOST_WIDE_INT count, unsigned int prec,
514 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
516 unsigned HOST_WIDE_INT s1l, s2l;
517 HOST_WIDE_INT s1h, s2h;
523 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
524 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
530 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
531 CODE is a tree code for a kind of division, one of
532 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
534 It controls how the quotient is rounded to an integer.
535 Return nonzero if the operation overflows.
536 UNS nonzero says do unsigned division. */
539 div_and_round_double (enum tree_code code, int uns,
540 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
541 HOST_WIDE_INT hnum_orig,
542 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
543 HOST_WIDE_INT hden_orig,
544 unsigned HOST_WIDE_INT *lquo,
545 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
549 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
550 HOST_WIDE_INT den[4], quo[4];
552 unsigned HOST_WIDE_INT work;
553 unsigned HOST_WIDE_INT carry = 0;
554 unsigned HOST_WIDE_INT lnum = lnum_orig;
555 HOST_WIDE_INT hnum = hnum_orig;
556 unsigned HOST_WIDE_INT lden = lden_orig;
557 HOST_WIDE_INT hden = hden_orig;
560 if (hden == 0 && lden == 0)
561 overflow = 1, lden = 1;
563 /* Calculate quotient sign and convert operands to unsigned. */
569 /* (minimum integer) / (-1) is the only overflow case. */
570 if (neg_double (lnum, hnum, &lnum, &hnum)
571 && ((HOST_WIDE_INT) lden & hden) == -1)
577 neg_double (lden, hden, &lden, &hden);
581 if (hnum == 0 && hden == 0)
582 { /* single precision */
584 /* This unsigned division rounds toward zero. */
590 { /* trivial case: dividend < divisor */
591 /* hden != 0 already checked. */
598 memset (quo, 0, sizeof quo);
600 memset (num, 0, sizeof num); /* to zero 9th element */
601 memset (den, 0, sizeof den);
603 encode (num, lnum, hnum);
604 encode (den, lden, hden);
606 /* Special code for when the divisor < BASE. */
607 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
609 /* hnum != 0 already checked. */
610 for (i = 4 - 1; i >= 0; i--)
612 work = num[i] + carry * BASE;
613 quo[i] = work / lden;
619 /* Full double precision division,
620 with thanks to Don Knuth's "Seminumerical Algorithms". */
621 int num_hi_sig, den_hi_sig;
622 unsigned HOST_WIDE_INT quo_est, scale;
624 /* Find the highest nonzero divisor digit. */
625 for (i = 4 - 1;; i--)
632 /* Insure that the first digit of the divisor is at least BASE/2.
633 This is required by the quotient digit estimation algorithm. */
635 scale = BASE / (den[den_hi_sig] + 1);
637 { /* scale divisor and dividend */
639 for (i = 0; i <= 4 - 1; i++)
641 work = (num[i] * scale) + carry;
642 num[i] = LOWPART (work);
643 carry = HIGHPART (work);
648 for (i = 0; i <= 4 - 1; i++)
650 work = (den[i] * scale) + carry;
651 den[i] = LOWPART (work);
652 carry = HIGHPART (work);
653 if (den[i] != 0) den_hi_sig = i;
660 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
662 /* Guess the next quotient digit, quo_est, by dividing the first
663 two remaining dividend digits by the high order quotient digit.
664 quo_est is never low and is at most 2 high. */
665 unsigned HOST_WIDE_INT tmp;
667 num_hi_sig = i + den_hi_sig + 1;
668 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
669 if (num[num_hi_sig] != den[den_hi_sig])
670 quo_est = work / den[den_hi_sig];
674 /* Refine quo_est so it's usually correct, and at most one high. */
675 tmp = work - quo_est * den[den_hi_sig];
677 && (den[den_hi_sig - 1] * quo_est
678 > (tmp * BASE + num[num_hi_sig - 2])))
681 /* Try QUO_EST as the quotient digit, by multiplying the
682 divisor by QUO_EST and subtracting from the remaining dividend.
683 Keep in mind that QUO_EST is the I - 1st digit. */
686 for (j = 0; j <= den_hi_sig; j++)
688 work = quo_est * den[j] + carry;
689 carry = HIGHPART (work);
690 work = num[i + j] - LOWPART (work);
691 num[i + j] = LOWPART (work);
692 carry += HIGHPART (work) != 0;
695 /* If quo_est was high by one, then num[i] went negative and
696 we need to correct things. */
697 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
700 carry = 0; /* add divisor back in */
701 for (j = 0; j <= den_hi_sig; j++)
703 work = num[i + j] + den[j] + carry;
704 carry = HIGHPART (work);
705 num[i + j] = LOWPART (work);
708 num [num_hi_sig] += carry;
711 /* Store the quotient digit. */
716 decode (quo, lquo, hquo);
719 /* If result is negative, make it so. */
721 neg_double (*lquo, *hquo, lquo, hquo);
723 /* Compute trial remainder: rem = num - (quo * den) */
724 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
725 neg_double (*lrem, *hrem, lrem, hrem);
726 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
731 case TRUNC_MOD_EXPR: /* round toward zero */
732 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
736 case FLOOR_MOD_EXPR: /* round toward negative infinity */
737 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
740 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
748 case CEIL_MOD_EXPR: /* round toward positive infinity */
749 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
751 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
759 case ROUND_MOD_EXPR: /* round to closest integer */
761 unsigned HOST_WIDE_INT labs_rem = *lrem;
762 HOST_WIDE_INT habs_rem = *hrem;
763 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
764 HOST_WIDE_INT habs_den = hden, htwice;
766 /* Get absolute values. */
768 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
770 neg_double (lden, hden, &labs_den, &habs_den);
772 /* If (2 * abs (lrem) >= abs (lden)) */
773 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
774 labs_rem, habs_rem, <wice, &htwice);
776 if (((unsigned HOST_WIDE_INT) habs_den
777 < (unsigned HOST_WIDE_INT) htwice)
778 || (((unsigned HOST_WIDE_INT) habs_den
779 == (unsigned HOST_WIDE_INT) htwice)
780 && (labs_den < ltwice)))
784 add_double (*lquo, *hquo,
785 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
800 /* Compute true remainder: rem = num - (quo * den) */
801 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
802 neg_double (*lrem, *hrem, lrem, hrem);
803 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
807 /* Return true if built-in mathematical function specified by CODE
808 preserves the sign of it argument, i.e. -f(x) == f(-x). */
811 negate_mathfn_p (enum built_in_function code)
835 /* Determine whether an expression T can be cheaply negated using
836 the function negate_expr. */
839 negate_expr_p (tree t)
841 unsigned HOST_WIDE_INT val;
848 type = TREE_TYPE (t);
851 switch (TREE_CODE (t))
854 if (TREE_UNSIGNED (type) || ! flag_trapv)
857 /* Check that -CST will not overflow type. */
858 prec = TYPE_PRECISION (type);
859 if (prec > HOST_BITS_PER_WIDE_INT)
861 if (TREE_INT_CST_LOW (t) != 0)
863 prec -= HOST_BITS_PER_WIDE_INT;
864 val = TREE_INT_CST_HIGH (t);
867 val = TREE_INT_CST_LOW (t);
868 if (prec < HOST_BITS_PER_WIDE_INT)
869 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
870 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
877 return negate_expr_p (TREE_REALPART (t))
878 && negate_expr_p (TREE_IMAGPART (t));
881 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
883 /* -(A + B) -> (-B) - A. */
884 if (negate_expr_p (TREE_OPERAND (t, 1))
885 && reorder_operands_p (TREE_OPERAND (t, 0),
886 TREE_OPERAND (t, 1)))
888 /* -(A + B) -> (-A) - B. */
889 return negate_expr_p (TREE_OPERAND (t, 0));
892 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
893 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
894 && reorder_operands_p (TREE_OPERAND (t, 0),
895 TREE_OPERAND (t, 1));
898 if (TREE_UNSIGNED (TREE_TYPE (t)))
904 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
905 return negate_expr_p (TREE_OPERAND (t, 1))
906 || negate_expr_p (TREE_OPERAND (t, 0));
910 /* Negate -((double)float) as (double)(-float). */
911 if (TREE_CODE (type) == REAL_TYPE)
913 tree tem = strip_float_extensions (t);
915 return negate_expr_p (tem);
920 /* Negate -f(x) as f(-x). */
921 if (negate_mathfn_p (builtin_mathfn_code (t)))
922 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
926 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
927 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
929 tree op1 = TREE_OPERAND (t, 1);
930 if (TREE_INT_CST_HIGH (op1) == 0
931 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
932 == TREE_INT_CST_LOW (op1))
943 /* Given T, an expression, return the negation of T. Allow for T to be
944 null, in which case return null. */
955 type = TREE_TYPE (t);
958 switch (TREE_CODE (t))
961 tem = fold_negate_const (t, type);
962 if (! TREE_OVERFLOW (tem)
963 || TREE_UNSIGNED (type)
969 tem = fold_negate_const (t, type);
970 /* Two's complement FP formats, such as c4x, may overflow. */
971 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
972 return fold_convert (type, tem);
977 tree rpart = negate_expr (TREE_REALPART (t));
978 tree ipart = negate_expr (TREE_IMAGPART (t));
980 if ((TREE_CODE (rpart) == REAL_CST
981 && TREE_CODE (ipart) == REAL_CST)
982 || (TREE_CODE (rpart) == INTEGER_CST
983 && TREE_CODE (ipart) == INTEGER_CST))
984 return build_complex (type, rpart, ipart);
989 return fold_convert (type, TREE_OPERAND (t, 0));
992 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
994 /* -(A + B) -> (-B) - A. */
995 if (negate_expr_p (TREE_OPERAND (t, 1))
996 && reorder_operands_p (TREE_OPERAND (t, 0),
997 TREE_OPERAND (t, 1)))
998 return fold_convert (type,
999 fold (build (MINUS_EXPR, TREE_TYPE (t),
1000 negate_expr (TREE_OPERAND (t, 1)),
1001 TREE_OPERAND (t, 0))));
1002 /* -(A + B) -> (-A) - B. */
1003 if (negate_expr_p (TREE_OPERAND (t, 0)))
1004 return fold_convert (type,
1005 fold (build (MINUS_EXPR, TREE_TYPE (t),
1006 negate_expr (TREE_OPERAND (t, 0)),
1007 TREE_OPERAND (t, 1))));
1012 /* - (A - B) -> B - A */
1013 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1014 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1015 return fold_convert (type,
1016 fold (build (MINUS_EXPR, TREE_TYPE (t),
1017 TREE_OPERAND (t, 1),
1018 TREE_OPERAND (t, 0))));
1022 if (TREE_UNSIGNED (TREE_TYPE (t)))
1028 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1030 tem = TREE_OPERAND (t, 1);
1031 if (negate_expr_p (tem))
1032 return fold_convert (type,
1033 fold (build (TREE_CODE (t), TREE_TYPE (t),
1034 TREE_OPERAND (t, 0),
1035 negate_expr (tem))));
1036 tem = TREE_OPERAND (t, 0);
1037 if (negate_expr_p (tem))
1038 return fold_convert (type,
1039 fold (build (TREE_CODE (t), TREE_TYPE (t),
1041 TREE_OPERAND (t, 1))));
1046 /* Convert -((double)float) into (double)(-float). */
1047 if (TREE_CODE (type) == REAL_TYPE)
1049 tem = strip_float_extensions (t);
1050 if (tem != t && negate_expr_p (tem))
1051 return fold_convert (type, negate_expr (tem));
1056 /* Negate -f(x) as f(-x). */
1057 if (negate_mathfn_p (builtin_mathfn_code (t))
1058 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1060 tree fndecl, arg, arglist;
1062 fndecl = get_callee_fndecl (t);
1063 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1064 arglist = build_tree_list (NULL_TREE, arg);
1065 return build_function_call_expr (fndecl, arglist);
1070 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1071 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1073 tree op1 = TREE_OPERAND (t, 1);
1074 if (TREE_INT_CST_HIGH (op1) == 0
1075 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1076 == TREE_INT_CST_LOW (op1))
1078 tree ntype = TREE_UNSIGNED (type)
1079 ? lang_hooks.types.signed_type (type)
1080 : lang_hooks.types.unsigned_type (type);
1081 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1082 temp = fold (build2 (RSHIFT_EXPR, ntype, temp, op1));
1083 return fold_convert (type, temp);
1092 tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1093 return fold_convert (type, tem);
1096 /* Split a tree IN into a constant, literal and variable parts that could be
1097 combined with CODE to make IN. "constant" means an expression with
1098 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1099 commutative arithmetic operation. Store the constant part into *CONP,
1100 the literal in *LITP and return the variable part. If a part isn't
1101 present, set it to null. If the tree does not decompose in this way,
1102 return the entire tree as the variable part and the other parts as null.
1104 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1105 case, we negate an operand that was subtracted. Except if it is a
1106 literal for which we use *MINUS_LITP instead.
1108 If NEGATE_P is true, we are negating all of IN, again except a literal
1109 for which we use *MINUS_LITP instead.
1111 If IN is itself a literal or constant, return it as appropriate.
1113 Note that we do not guarantee that any of the three values will be the
1114 same type as IN, but they will have the same signedness and mode. */
1117 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1118 tree *minus_litp, int negate_p)
1126 /* Strip any conversions that don't change the machine mode or signedness. */
1127 STRIP_SIGN_NOPS (in);
1129 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1131 else if (TREE_CODE (in) == code
1132 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1133 /* We can associate addition and subtraction together (even
1134 though the C standard doesn't say so) for integers because
1135 the value is not affected. For reals, the value might be
1136 affected, so we can't. */
1137 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1138 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1140 tree op0 = TREE_OPERAND (in, 0);
1141 tree op1 = TREE_OPERAND (in, 1);
1142 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1143 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1145 /* First see if either of the operands is a literal, then a constant. */
1146 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1147 *litp = op0, op0 = 0;
1148 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1149 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1151 if (op0 != 0 && TREE_CONSTANT (op0))
1152 *conp = op0, op0 = 0;
1153 else if (op1 != 0 && TREE_CONSTANT (op1))
1154 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1156 /* If we haven't dealt with either operand, this is not a case we can
1157 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1158 if (op0 != 0 && op1 != 0)
1163 var = op1, neg_var_p = neg1_p;
1165 /* Now do any needed negations. */
1167 *minus_litp = *litp, *litp = 0;
1169 *conp = negate_expr (*conp);
1171 var = negate_expr (var);
1173 else if (TREE_CONSTANT (in))
1181 *minus_litp = *litp, *litp = 0;
1182 else if (*minus_litp)
1183 *litp = *minus_litp, *minus_litp = 0;
1184 *conp = negate_expr (*conp);
1185 var = negate_expr (var);
1191 /* Re-associate trees split by the above function. T1 and T2 are either
1192 expressions to associate or null. Return the new expression, if any. If
1193 we build an operation, do it in TYPE and with CODE. */
1196 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1203 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1204 try to fold this since we will have infinite recursion. But do
1205 deal with any NEGATE_EXPRs. */
1206 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1207 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1209 if (code == PLUS_EXPR)
1211 if (TREE_CODE (t1) == NEGATE_EXPR)
1212 return build (MINUS_EXPR, type, fold_convert (type, t2),
1213 fold_convert (type, TREE_OPERAND (t1, 0)));
1214 else if (TREE_CODE (t2) == NEGATE_EXPR)
1215 return build (MINUS_EXPR, type, fold_convert (type, t1),
1216 fold_convert (type, TREE_OPERAND (t2, 0)));
1218 return build (code, type, fold_convert (type, t1),
1219 fold_convert (type, t2));
1222 return fold (build (code, type, fold_convert (type, t1),
1223 fold_convert (type, t2)));
1226 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1227 to produce a new constant.
1229 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1232 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1234 unsigned HOST_WIDE_INT int1l, int2l;
1235 HOST_WIDE_INT int1h, int2h;
1236 unsigned HOST_WIDE_INT low;
1238 unsigned HOST_WIDE_INT garbagel;
1239 HOST_WIDE_INT garbageh;
1241 tree type = TREE_TYPE (arg1);
1242 int uns = TREE_UNSIGNED (type);
1244 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1246 int no_overflow = 0;
1248 int1l = TREE_INT_CST_LOW (arg1);
1249 int1h = TREE_INT_CST_HIGH (arg1);
1250 int2l = TREE_INT_CST_LOW (arg2);
1251 int2h = TREE_INT_CST_HIGH (arg2);
1256 low = int1l | int2l, hi = int1h | int2h;
1260 low = int1l ^ int2l, hi = int1h ^ int2h;
1264 low = int1l & int2l, hi = int1h & int2h;
1270 /* It's unclear from the C standard whether shifts can overflow.
1271 The following code ignores overflow; perhaps a C standard
1272 interpretation ruling is needed. */
1273 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1281 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1286 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1290 neg_double (int2l, int2h, &low, &hi);
1291 add_double (int1l, int1h, low, hi, &low, &hi);
1292 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1296 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1299 case TRUNC_DIV_EXPR:
1300 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1301 case EXACT_DIV_EXPR:
1302 /* This is a shortcut for a common special case. */
1303 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1304 && ! TREE_CONSTANT_OVERFLOW (arg1)
1305 && ! TREE_CONSTANT_OVERFLOW (arg2)
1306 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1308 if (code == CEIL_DIV_EXPR)
1311 low = int1l / int2l, hi = 0;
1315 /* ... fall through ... */
1317 case ROUND_DIV_EXPR:
1318 if (int2h == 0 && int2l == 1)
1320 low = int1l, hi = int1h;
1323 if (int1l == int2l && int1h == int2h
1324 && ! (int1l == 0 && int1h == 0))
1329 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1330 &low, &hi, &garbagel, &garbageh);
1333 case TRUNC_MOD_EXPR:
1334 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1335 /* This is a shortcut for a common special case. */
1336 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1337 && ! TREE_CONSTANT_OVERFLOW (arg1)
1338 && ! TREE_CONSTANT_OVERFLOW (arg2)
1339 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1341 if (code == CEIL_MOD_EXPR)
1343 low = int1l % int2l, hi = 0;
1347 /* ... fall through ... */
1349 case ROUND_MOD_EXPR:
1350 overflow = div_and_round_double (code, uns,
1351 int1l, int1h, int2l, int2h,
1352 &garbagel, &garbageh, &low, &hi);
1358 low = (((unsigned HOST_WIDE_INT) int1h
1359 < (unsigned HOST_WIDE_INT) int2h)
1360 || (((unsigned HOST_WIDE_INT) int1h
1361 == (unsigned HOST_WIDE_INT) int2h)
1364 low = (int1h < int2h
1365 || (int1h == int2h && int1l < int2l));
1367 if (low == (code == MIN_EXPR))
1368 low = int1l, hi = int1h;
1370 low = int2l, hi = int2h;
1377 /* If this is for a sizetype, can be represented as one (signed)
1378 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1381 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1382 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1383 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1384 return size_int_type_wide (low, type);
1387 t = build_int_2 (low, hi);
1388 TREE_TYPE (t) = TREE_TYPE (arg1);
1393 ? (!uns || is_sizetype) && overflow
1394 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1396 | TREE_OVERFLOW (arg1)
1397 | TREE_OVERFLOW (arg2));
1399 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1400 So check if force_fit_type truncated the value. */
1402 && ! TREE_OVERFLOW (t)
1403 && (TREE_INT_CST_HIGH (t) != hi
1404 || TREE_INT_CST_LOW (t) != low))
1405 TREE_OVERFLOW (t) = 1;
1407 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1408 | TREE_CONSTANT_OVERFLOW (arg1)
1409 | TREE_CONSTANT_OVERFLOW (arg2));
1413 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1414 constant. We assume ARG1 and ARG2 have the same data type, or at least
1415 are the same kind of constant and the same machine mode.
1417 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1420 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1425 if (TREE_CODE (arg1) == INTEGER_CST)
1426 return int_const_binop (code, arg1, arg2, notrunc);
1428 if (TREE_CODE (arg1) == REAL_CST)
1430 enum machine_mode mode;
1433 REAL_VALUE_TYPE value;
1436 d1 = TREE_REAL_CST (arg1);
1437 d2 = TREE_REAL_CST (arg2);
1439 type = TREE_TYPE (arg1);
1440 mode = TYPE_MODE (type);
1442 /* Don't perform operation if we honor signaling NaNs and
1443 either operand is a NaN. */
1444 if (HONOR_SNANS (mode)
1445 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1448 /* Don't perform operation if it would raise a division
1449 by zero exception. */
1450 if (code == RDIV_EXPR
1451 && REAL_VALUES_EQUAL (d2, dconst0)
1452 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1455 /* If either operand is a NaN, just return it. Otherwise, set up
1456 for floating-point trap; we return an overflow. */
1457 if (REAL_VALUE_ISNAN (d1))
1459 else if (REAL_VALUE_ISNAN (d2))
1462 REAL_ARITHMETIC (value, code, d1, d2);
1464 t = build_real (type, real_value_truncate (mode, value));
1467 = (force_fit_type (t, 0)
1468 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1469 TREE_CONSTANT_OVERFLOW (t)
1471 | TREE_CONSTANT_OVERFLOW (arg1)
1472 | TREE_CONSTANT_OVERFLOW (arg2);
1475 if (TREE_CODE (arg1) == COMPLEX_CST)
1477 tree type = TREE_TYPE (arg1);
1478 tree r1 = TREE_REALPART (arg1);
1479 tree i1 = TREE_IMAGPART (arg1);
1480 tree r2 = TREE_REALPART (arg2);
1481 tree i2 = TREE_IMAGPART (arg2);
1487 t = build_complex (type,
1488 const_binop (PLUS_EXPR, r1, r2, notrunc),
1489 const_binop (PLUS_EXPR, i1, i2, notrunc));
1493 t = build_complex (type,
1494 const_binop (MINUS_EXPR, r1, r2, notrunc),
1495 const_binop (MINUS_EXPR, i1, i2, notrunc));
1499 t = build_complex (type,
1500 const_binop (MINUS_EXPR,
1501 const_binop (MULT_EXPR,
1503 const_binop (MULT_EXPR,
1506 const_binop (PLUS_EXPR,
1507 const_binop (MULT_EXPR,
1509 const_binop (MULT_EXPR,
1517 = const_binop (PLUS_EXPR,
1518 const_binop (MULT_EXPR, r2, r2, notrunc),
1519 const_binop (MULT_EXPR, i2, i2, notrunc),
1522 t = build_complex (type,
1524 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1525 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1526 const_binop (PLUS_EXPR,
1527 const_binop (MULT_EXPR, r1, r2,
1529 const_binop (MULT_EXPR, i1, i2,
1532 magsquared, notrunc),
1534 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1535 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1536 const_binop (MINUS_EXPR,
1537 const_binop (MULT_EXPR, i1, r2,
1539 const_binop (MULT_EXPR, r1, i2,
1542 magsquared, notrunc));
1554 /* These are the hash table functions for the hash table of INTEGER_CST
1555 nodes of a sizetype. */
1557 /* Return the hash code code X, an INTEGER_CST. */
1560 size_htab_hash (const void *x)
1564 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1565 ^ htab_hash_pointer (TREE_TYPE (t))
1566 ^ (TREE_OVERFLOW (t) << 20));
1569 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1570 is the same as that given by *Y, which is the same. */
1573 size_htab_eq (const void *x, const void *y)
1578 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1579 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1580 && TREE_TYPE (xt) == TREE_TYPE (yt)
1581 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1584 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1585 bits are given by NUMBER and of the sizetype represented by KIND. */
1588 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1590 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1593 /* Likewise, but the desired type is specified explicitly. */
1595 static GTY (()) tree new_const;
1596 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1600 size_int_type_wide (HOST_WIDE_INT number, tree type)
1606 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1607 new_const = make_node (INTEGER_CST);
1610 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1611 hash table, we return the value from the hash table. Otherwise, we
1612 place that in the hash table and make a new node for the next time. */
1613 TREE_INT_CST_LOW (new_const) = number;
1614 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1615 TREE_TYPE (new_const) = type;
1616 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1617 = force_fit_type (new_const, 0);
1619 slot = htab_find_slot (size_htab, new_const, INSERT);
1625 new_const = make_node (INTEGER_CST);
1629 return (tree) *slot;
1632 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1633 is a tree code. The type of the result is taken from the operands.
1634 Both must be the same type integer type and it must be a size type.
1635 If the operands are constant, so is the result. */
1638 size_binop (enum tree_code code, tree arg0, tree arg1)
1640 tree type = TREE_TYPE (arg0);
1642 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1643 || type != TREE_TYPE (arg1))
1646 /* Handle the special case of two integer constants faster. */
1647 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1649 /* And some specific cases even faster than that. */
1650 if (code == PLUS_EXPR && integer_zerop (arg0))
1652 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1653 && integer_zerop (arg1))
1655 else if (code == MULT_EXPR && integer_onep (arg0))
1658 /* Handle general case of two integer constants. */
1659 return int_const_binop (code, arg0, arg1, 0);
1662 if (arg0 == error_mark_node || arg1 == error_mark_node)
1663 return error_mark_node;
1665 return fold (build (code, type, arg0, arg1));
1668 /* Given two values, either both of sizetype or both of bitsizetype,
1669 compute the difference between the two values. Return the value
1670 in signed type corresponding to the type of the operands. */
1673 size_diffop (tree arg0, tree arg1)
1675 tree type = TREE_TYPE (arg0);
1678 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1679 || type != TREE_TYPE (arg1))
1682 /* If the type is already signed, just do the simple thing. */
1683 if (! TREE_UNSIGNED (type))
1684 return size_binop (MINUS_EXPR, arg0, arg1);
1686 ctype = (type == bitsizetype || type == ubitsizetype
1687 ? sbitsizetype : ssizetype);
1689 /* If either operand is not a constant, do the conversions to the signed
1690 type and subtract. The hardware will do the right thing with any
1691 overflow in the subtraction. */
1692 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1693 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1694 fold_convert (ctype, arg1));
1696 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1697 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1698 overflow) and negate (which can't either). Special-case a result
1699 of zero while we're here. */
1700 if (tree_int_cst_equal (arg0, arg1))
1701 return fold_convert (ctype, integer_zero_node);
1702 else if (tree_int_cst_lt (arg1, arg0))
1703 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1705 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1706 fold_convert (ctype, size_binop (MINUS_EXPR,
1711 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1712 type TYPE. If no simplification can be done return NULL_TREE. */
1715 fold_convert_const (enum tree_code code, tree type, tree arg1)
1720 if (TREE_TYPE (arg1) == type)
1723 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1725 if (TREE_CODE (arg1) == INTEGER_CST)
1727 /* If we would build a constant wider than GCC supports,
1728 leave the conversion unfolded. */
1729 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1732 /* If we are trying to make a sizetype for a small integer, use
1733 size_int to pick up cached types to reduce duplicate nodes. */
1734 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1735 && !TREE_CONSTANT_OVERFLOW (arg1)
1736 && compare_tree_int (arg1, 10000) < 0)
1737 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1739 /* Given an integer constant, make new constant with new type,
1740 appropriately sign-extended or truncated. */
1741 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1742 TREE_INT_CST_HIGH (arg1));
1743 TREE_TYPE (t) = type;
1744 /* Indicate an overflow if (1) ARG1 already overflowed,
1745 or (2) force_fit_type indicates an overflow.
1746 Tell force_fit_type that an overflow has already occurred
1747 if ARG1 is a too-large unsigned value and T is signed.
1748 But don't indicate an overflow if converting a pointer. */
1750 = ((force_fit_type (t,
1751 (TREE_INT_CST_HIGH (arg1) < 0
1752 && (TREE_UNSIGNED (type)
1753 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1754 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1755 || TREE_OVERFLOW (arg1));
1756 TREE_CONSTANT_OVERFLOW (t)
1757 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1760 else if (TREE_CODE (arg1) == REAL_CST)
1762 /* The following code implements the floating point to integer
1763 conversion rules required by the Java Language Specification,
1764 that IEEE NaNs are mapped to zero and values that overflow
1765 the target precision saturate, i.e. values greater than
1766 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1767 are mapped to INT_MIN. These semantics are allowed by the
1768 C and C++ standards that simply state that the behavior of
1769 FP-to-integer conversion is unspecified upon overflow. */
1771 HOST_WIDE_INT high, low;
1774 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1778 case FIX_TRUNC_EXPR:
1779 real_trunc (&r, VOIDmode, &x);
1783 real_ceil (&r, VOIDmode, &x);
1786 case FIX_FLOOR_EXPR:
1787 real_floor (&r, VOIDmode, &x);
1794 /* If R is NaN, return zero and show we have an overflow. */
1795 if (REAL_VALUE_ISNAN (r))
1802 /* See if R is less than the lower bound or greater than the
1807 tree lt = TYPE_MIN_VALUE (type);
1808 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1809 if (REAL_VALUES_LESS (r, l))
1812 high = TREE_INT_CST_HIGH (lt);
1813 low = TREE_INT_CST_LOW (lt);
1819 tree ut = TYPE_MAX_VALUE (type);
1822 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1823 if (REAL_VALUES_LESS (u, r))
1826 high = TREE_INT_CST_HIGH (ut);
1827 low = TREE_INT_CST_LOW (ut);
1833 REAL_VALUE_TO_INT (&low, &high, r);
1835 t = build_int_2 (low, high);
1836 TREE_TYPE (t) = type;
1838 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1839 TREE_CONSTANT_OVERFLOW (t)
1840 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1844 else if (TREE_CODE (type) == REAL_TYPE)
1846 if (TREE_CODE (arg1) == INTEGER_CST)
1847 return build_real_from_int_cst (type, arg1);
1848 if (TREE_CODE (arg1) == REAL_CST)
1850 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1852 /* We make a copy of ARG1 so that we don't modify an
1853 existing constant tree. */
1854 t = copy_node (arg1);
1855 TREE_TYPE (t) = type;
1859 t = build_real (type,
1860 real_value_truncate (TYPE_MODE (type),
1861 TREE_REAL_CST (arg1)));
1864 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1865 TREE_CONSTANT_OVERFLOW (t)
1866 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1873 /* Convert expression ARG to type TYPE. Used by the middle-end for
1874 simple conversions in preference to calling the front-end's convert. */
1877 fold_convert (tree type, tree arg)
1879 tree orig = TREE_TYPE (arg);
1885 if (TREE_CODE (arg) == ERROR_MARK
1886 || TREE_CODE (type) == ERROR_MARK
1887 || TREE_CODE (orig) == ERROR_MARK)
1888 return error_mark_node;
1890 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1891 return fold (build1 (NOP_EXPR, type, arg));
1893 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1895 if (TREE_CODE (arg) == INTEGER_CST)
1897 tem = fold_convert_const (NOP_EXPR, type, arg);
1898 if (tem != NULL_TREE)
1901 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1902 return fold (build1 (NOP_EXPR, type, arg));
1903 if (TREE_CODE (orig) == COMPLEX_TYPE)
1905 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1906 return fold_convert (type, tem);
1908 if (TREE_CODE (orig) == VECTOR_TYPE
1909 && GET_MODE_SIZE (TYPE_MODE (type))
1910 == GET_MODE_SIZE (TYPE_MODE (orig)))
1911 return fold (build1 (NOP_EXPR, type, arg));
1913 else if (TREE_CODE (type) == REAL_TYPE)
1915 if (TREE_CODE (arg) == INTEGER_CST)
1917 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1918 if (tem != NULL_TREE)
1921 else if (TREE_CODE (arg) == REAL_CST)
1923 tem = fold_convert_const (NOP_EXPR, type, arg);
1924 if (tem != NULL_TREE)
1928 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1929 return fold (build1 (FLOAT_EXPR, type, arg));
1930 if (TREE_CODE (orig) == REAL_TYPE)
1931 return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1933 if (TREE_CODE (orig) == COMPLEX_TYPE)
1935 tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1936 return fold_convert (type, tem);
1939 else if (TREE_CODE (type) == COMPLEX_TYPE)
1941 if (INTEGRAL_TYPE_P (orig)
1942 || POINTER_TYPE_P (orig)
1943 || TREE_CODE (orig) == REAL_TYPE)
1944 return build (COMPLEX_EXPR, type,
1945 fold_convert (TREE_TYPE (type), arg),
1946 fold_convert (TREE_TYPE (type), integer_zero_node));
1947 if (TREE_CODE (orig) == COMPLEX_TYPE)
1951 if (TREE_CODE (arg) == COMPLEX_EXPR)
1953 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1954 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1955 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1958 arg = save_expr (arg);
1959 rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1960 ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1961 rpart = fold_convert (TREE_TYPE (type), rpart);
1962 ipart = fold_convert (TREE_TYPE (type), ipart);
1963 return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1966 else if (TREE_CODE (type) == VECTOR_TYPE)
1968 if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1969 && GET_MODE_SIZE (TYPE_MODE (type))
1970 == GET_MODE_SIZE (TYPE_MODE (orig)))
1971 return fold (build1 (NOP_EXPR, type, arg));
1972 if (TREE_CODE (orig) == VECTOR_TYPE
1973 && GET_MODE_SIZE (TYPE_MODE (type))
1974 == GET_MODE_SIZE (TYPE_MODE (orig)))
1975 return fold (build1 (NOP_EXPR, type, arg));
1977 else if (VOID_TYPE_P (type))
1978 return fold (build1 (CONVERT_EXPR, type, arg));
1982 /* Return an expr equal to X but certainly not valid as an lvalue. */
1989 /* These things are certainly not lvalues. */
1990 if (TREE_CODE (x) == NON_LVALUE_EXPR
1991 || TREE_CODE (x) == INTEGER_CST
1992 || TREE_CODE (x) == REAL_CST
1993 || TREE_CODE (x) == STRING_CST
1994 || TREE_CODE (x) == ADDR_EXPR)
1997 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1998 TREE_CONSTANT (result) = TREE_CONSTANT (x);
2002 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2003 Zero means allow extended lvalues. */
2005 int pedantic_lvalues;
2007 /* When pedantic, return an expr equal to X but certainly not valid as a
2008 pedantic lvalue. Otherwise, return X. */
2011 pedantic_non_lvalue (tree x)
2013 if (pedantic_lvalues)
2014 return non_lvalue (x);
2019 /* Given a tree comparison code, return the code that is the logical inverse
2020 of the given code. It is not safe to do this for floating-point
2021 comparisons, except for NE_EXPR and EQ_EXPR. */
2023 static enum tree_code
2024 invert_tree_comparison (enum tree_code code)
2045 /* Similar, but return the comparison that results if the operands are
2046 swapped. This is safe for floating-point. */
2048 static enum tree_code
2049 swap_tree_comparison (enum tree_code code)
2070 /* Convert a comparison tree code from an enum tree_code representation
2071 into a compcode bit-based encoding. This function is the inverse of
2072 compcode_to_comparison. */
2075 comparison_to_compcode (enum tree_code code)
2096 /* Convert a compcode bit-based encoding of a comparison operator back
2097 to GCC's enum tree_code representation. This function is the
2098 inverse of comparison_to_compcode. */
2100 static enum tree_code
2101 compcode_to_comparison (int code)
2122 /* Return nonzero if CODE is a tree code that represents a truth value. */
2125 truth_value_p (enum tree_code code)
2127 return (TREE_CODE_CLASS (code) == '<'
2128 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2129 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2130 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2133 /* Return nonzero if two operands (typically of the same tree node)
2134 are necessarily equal. If either argument has side-effects this
2135 function returns zero.
2137 If ONLY_CONST is nonzero, only return nonzero for constants.
2138 This function tests whether the operands are indistinguishable;
2139 it does not test whether they are equal using C's == operation.
2140 The distinction is important for IEEE floating point, because
2141 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2142 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2144 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2145 even though it may hold multiple values during a function.
2146 This is because a GCC tree node guarantees that nothing else is
2147 executed between the evaluation of its "operands" (which may often
2148 be evaluated in arbitrary order). Hence if the operands themselves
2149 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2150 same value in each operand/subexpression. Hence a zero value for
2151 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2152 If comparing arbitrary expression trees, such as from different
2153 statements, ONLY_CONST must usually be nonzero. */
2156 operand_equal_p (tree arg0, tree arg1, int only_const)
2160 /* If both types don't have the same signedness, then we can't consider
2161 them equal. We must check this before the STRIP_NOPS calls
2162 because they may change the signedness of the arguments. */
2163 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2169 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2170 /* This is needed for conversions and for COMPONENT_REF.
2171 Might as well play it safe and always test this. */
2172 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2173 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2174 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2177 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2178 We don't care about side effects in that case because the SAVE_EXPR
2179 takes care of that for us. In all other cases, two expressions are
2180 equal if they have no side effects. If we have two identical
2181 expressions with side effects that should be treated the same due
2182 to the only side effects being identical SAVE_EXPR's, that will
2183 be detected in the recursive calls below. */
2184 if (arg0 == arg1 && ! only_const
2185 && (TREE_CODE (arg0) == SAVE_EXPR
2186 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2189 /* Next handle constant cases, those for which we can return 1 even
2190 if ONLY_CONST is set. */
2191 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2192 switch (TREE_CODE (arg0))
2195 return (! TREE_CONSTANT_OVERFLOW (arg0)
2196 && ! TREE_CONSTANT_OVERFLOW (arg1)
2197 && tree_int_cst_equal (arg0, arg1));
2200 return (! TREE_CONSTANT_OVERFLOW (arg0)
2201 && ! TREE_CONSTANT_OVERFLOW (arg1)
2202 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2203 TREE_REAL_CST (arg1)));
2209 if (TREE_CONSTANT_OVERFLOW (arg0)
2210 || TREE_CONSTANT_OVERFLOW (arg1))
2213 v1 = TREE_VECTOR_CST_ELTS (arg0);
2214 v2 = TREE_VECTOR_CST_ELTS (arg1);
2217 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2220 v1 = TREE_CHAIN (v1);
2221 v2 = TREE_CHAIN (v2);
2228 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2230 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2234 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2235 && ! memcmp (TREE_STRING_POINTER (arg0),
2236 TREE_STRING_POINTER (arg1),
2237 TREE_STRING_LENGTH (arg0)));
2240 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2249 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2252 /* Two conversions are equal only if signedness and modes match. */
2253 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2254 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2255 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2258 return operand_equal_p (TREE_OPERAND (arg0, 0),
2259 TREE_OPERAND (arg1, 0), 0);
2263 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2264 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2268 /* For commutative ops, allow the other order. */
2269 return (commutative_tree_code (TREE_CODE (arg0))
2270 && operand_equal_p (TREE_OPERAND (arg0, 0),
2271 TREE_OPERAND (arg1, 1), 0)
2272 && operand_equal_p (TREE_OPERAND (arg0, 1),
2273 TREE_OPERAND (arg1, 0), 0));
2276 /* If either of the pointer (or reference) expressions we are
2277 dereferencing contain a side effect, these cannot be equal. */
2278 if (TREE_SIDE_EFFECTS (arg0)
2279 || TREE_SIDE_EFFECTS (arg1))
2282 switch (TREE_CODE (arg0))
2285 return operand_equal_p (TREE_OPERAND (arg0, 0),
2286 TREE_OPERAND (arg1, 0), 0);
2290 case ARRAY_RANGE_REF:
2291 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2292 TREE_OPERAND (arg1, 0), 0)
2293 && operand_equal_p (TREE_OPERAND (arg0, 1),
2294 TREE_OPERAND (arg1, 1), 0));
2297 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2298 TREE_OPERAND (arg1, 0), 0)
2299 && operand_equal_p (TREE_OPERAND (arg0, 1),
2300 TREE_OPERAND (arg1, 1), 0)
2301 && operand_equal_p (TREE_OPERAND (arg0, 2),
2302 TREE_OPERAND (arg1, 2), 0));
2308 switch (TREE_CODE (arg0))
2311 case TRUTH_NOT_EXPR:
2312 return operand_equal_p (TREE_OPERAND (arg0, 0),
2313 TREE_OPERAND (arg1, 0), 0);
2316 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2319 /* If the CALL_EXPRs call different functions, then they
2320 clearly can not be equal. */
2321 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2322 TREE_OPERAND (arg1, 0), 0))
2325 /* Only consider const functions equivalent. */
2326 fndecl = get_callee_fndecl (arg0);
2327 if (fndecl == NULL_TREE
2328 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2331 /* Now see if all the arguments are the same. operand_equal_p
2332 does not handle TREE_LIST, so we walk the operands here
2333 feeding them to operand_equal_p. */
2334 arg0 = TREE_OPERAND (arg0, 1);
2335 arg1 = TREE_OPERAND (arg1, 1);
2336 while (arg0 && arg1)
2338 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2341 arg0 = TREE_CHAIN (arg0);
2342 arg1 = TREE_CHAIN (arg1);
2345 /* If we get here and both argument lists are exhausted
2346 then the CALL_EXPRs are equal. */
2347 return ! (arg0 || arg1);
2354 /* Consider __builtin_sqrt equal to sqrt. */
2355 return TREE_CODE (arg0) == FUNCTION_DECL
2356 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2357 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2358 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2365 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2366 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2368 When in doubt, return 0. */
2371 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2373 int unsignedp1, unsignedpo;
2374 tree primarg0, primarg1, primother;
2375 unsigned int correct_width;
2377 if (operand_equal_p (arg0, arg1, 0))
2380 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2381 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2384 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2385 and see if the inner values are the same. This removes any
2386 signedness comparison, which doesn't matter here. */
2387 primarg0 = arg0, primarg1 = arg1;
2388 STRIP_NOPS (primarg0);
2389 STRIP_NOPS (primarg1);
2390 if (operand_equal_p (primarg0, primarg1, 0))
2393 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2394 actual comparison operand, ARG0.
2396 First throw away any conversions to wider types
2397 already present in the operands. */
2399 primarg1 = get_narrower (arg1, &unsignedp1);
2400 primother = get_narrower (other, &unsignedpo);
2402 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2403 if (unsignedp1 == unsignedpo
2404 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2405 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2407 tree type = TREE_TYPE (arg0);
2409 /* Make sure shorter operand is extended the right way
2410 to match the longer operand. */
2411 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2412 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2414 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2421 /* See if ARG is an expression that is either a comparison or is performing
2422 arithmetic on comparisons. The comparisons must only be comparing
2423 two different values, which will be stored in *CVAL1 and *CVAL2; if
2424 they are nonzero it means that some operands have already been found.
2425 No variables may be used anywhere else in the expression except in the
2426 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2427 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2429 If this is true, return 1. Otherwise, return zero. */
2432 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2434 enum tree_code code = TREE_CODE (arg);
2435 char class = TREE_CODE_CLASS (code);
2437 /* We can handle some of the 'e' cases here. */
2438 if (class == 'e' && code == TRUTH_NOT_EXPR)
2440 else if (class == 'e'
2441 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2442 || code == COMPOUND_EXPR))
2445 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2446 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2448 /* If we've already found a CVAL1 or CVAL2, this expression is
2449 two complex to handle. */
2450 if (*cval1 || *cval2)
2460 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2463 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2464 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2465 cval1, cval2, save_p));
2471 if (code == COND_EXPR)
2472 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2473 cval1, cval2, save_p)
2474 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2475 cval1, cval2, save_p)
2476 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2477 cval1, cval2, save_p));
2481 /* First see if we can handle the first operand, then the second. For
2482 the second operand, we know *CVAL1 can't be zero. It must be that
2483 one side of the comparison is each of the values; test for the
2484 case where this isn't true by failing if the two operands
2487 if (operand_equal_p (TREE_OPERAND (arg, 0),
2488 TREE_OPERAND (arg, 1), 0))
2492 *cval1 = TREE_OPERAND (arg, 0);
2493 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2495 else if (*cval2 == 0)
2496 *cval2 = TREE_OPERAND (arg, 0);
2497 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2502 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2504 else if (*cval2 == 0)
2505 *cval2 = TREE_OPERAND (arg, 1);
2506 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2518 /* ARG is a tree that is known to contain just arithmetic operations and
2519 comparisons. Evaluate the operations in the tree substituting NEW0 for
2520 any occurrence of OLD0 as an operand of a comparison and likewise for
2524 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2526 tree type = TREE_TYPE (arg);
2527 enum tree_code code = TREE_CODE (arg);
2528 char class = TREE_CODE_CLASS (code);
2530 /* We can handle some of the 'e' cases here. */
2531 if (class == 'e' && code == TRUTH_NOT_EXPR)
2533 else if (class == 'e'
2534 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2540 return fold (build1 (code, type,
2541 eval_subst (TREE_OPERAND (arg, 0),
2542 old0, new0, old1, new1)));
2545 return fold (build (code, type,
2546 eval_subst (TREE_OPERAND (arg, 0),
2547 old0, new0, old1, new1),
2548 eval_subst (TREE_OPERAND (arg, 1),
2549 old0, new0, old1, new1)));
2555 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2558 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2561 return fold (build (code, type,
2562 eval_subst (TREE_OPERAND (arg, 0),
2563 old0, new0, old1, new1),
2564 eval_subst (TREE_OPERAND (arg, 1),
2565 old0, new0, old1, new1),
2566 eval_subst (TREE_OPERAND (arg, 2),
2567 old0, new0, old1, new1)));
2571 /* Fall through - ??? */
2575 tree arg0 = TREE_OPERAND (arg, 0);
2576 tree arg1 = TREE_OPERAND (arg, 1);
2578 /* We need to check both for exact equality and tree equality. The
2579 former will be true if the operand has a side-effect. In that
2580 case, we know the operand occurred exactly once. */
2582 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2584 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2587 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2589 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2592 return fold (build (code, type, arg0, arg1));
2600 /* Return a tree for the case when the result of an expression is RESULT
2601 converted to TYPE and OMITTED was previously an operand of the expression
2602 but is now not needed (e.g., we folded OMITTED * 0).
2604 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2605 the conversion of RESULT to TYPE. */
2608 omit_one_operand (tree type, tree result, tree omitted)
2610 tree t = fold_convert (type, result);
2612 if (TREE_SIDE_EFFECTS (omitted))
2613 return build (COMPOUND_EXPR, type, omitted, t);
2615 return non_lvalue (t);
2618 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2621 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2623 tree t = fold_convert (type, result);
2625 if (TREE_SIDE_EFFECTS (omitted))
2626 return build (COMPOUND_EXPR, type, omitted, t);
2628 return pedantic_non_lvalue (t);
2631 /* Return a simplified tree node for the truth-negation of ARG. This
2632 never alters ARG itself. We assume that ARG is an operation that
2633 returns a truth value (0 or 1). */
2636 invert_truthvalue (tree arg)
2638 tree type = TREE_TYPE (arg);
2639 enum tree_code code = TREE_CODE (arg);
2641 if (code == ERROR_MARK)
2644 /* If this is a comparison, we can simply invert it, except for
2645 floating-point non-equality comparisons, in which case we just
2646 enclose a TRUTH_NOT_EXPR around what we have. */
2648 if (TREE_CODE_CLASS (code) == '<')
2650 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2651 && !flag_unsafe_math_optimizations
2654 return build1 (TRUTH_NOT_EXPR, type, arg);
2655 else if (code == UNORDERED_EXPR
2656 || code == ORDERED_EXPR
2657 || code == UNEQ_EXPR
2658 || code == UNLT_EXPR
2659 || code == UNLE_EXPR
2660 || code == UNGT_EXPR
2661 || code == UNGE_EXPR)
2662 return build1 (TRUTH_NOT_EXPR, type, arg);
2664 return build (invert_tree_comparison (code), type,
2665 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2671 return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2673 case TRUTH_AND_EXPR:
2674 return build (TRUTH_OR_EXPR, type,
2675 invert_truthvalue (TREE_OPERAND (arg, 0)),
2676 invert_truthvalue (TREE_OPERAND (arg, 1)));
2679 return build (TRUTH_AND_EXPR, type,
2680 invert_truthvalue (TREE_OPERAND (arg, 0)),
2681 invert_truthvalue (TREE_OPERAND (arg, 1)));
2683 case TRUTH_XOR_EXPR:
2684 /* Here we can invert either operand. We invert the first operand
2685 unless the second operand is a TRUTH_NOT_EXPR in which case our
2686 result is the XOR of the first operand with the inside of the
2687 negation of the second operand. */
2689 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2690 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2691 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2693 return build (TRUTH_XOR_EXPR, type,
2694 invert_truthvalue (TREE_OPERAND (arg, 0)),
2695 TREE_OPERAND (arg, 1));
2697 case TRUTH_ANDIF_EXPR:
2698 return build (TRUTH_ORIF_EXPR, type,
2699 invert_truthvalue (TREE_OPERAND (arg, 0)),
2700 invert_truthvalue (TREE_OPERAND (arg, 1)));
2702 case TRUTH_ORIF_EXPR:
2703 return build (TRUTH_ANDIF_EXPR, type,
2704 invert_truthvalue (TREE_OPERAND (arg, 0)),
2705 invert_truthvalue (TREE_OPERAND (arg, 1)));
2707 case TRUTH_NOT_EXPR:
2708 return TREE_OPERAND (arg, 0);
2711 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2712 invert_truthvalue (TREE_OPERAND (arg, 1)),
2713 invert_truthvalue (TREE_OPERAND (arg, 2)));
2716 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2717 invert_truthvalue (TREE_OPERAND (arg, 1)));
2719 case WITH_RECORD_EXPR:
2720 return build (WITH_RECORD_EXPR, type,
2721 invert_truthvalue (TREE_OPERAND (arg, 0)),
2722 TREE_OPERAND (arg, 1));
2724 case NON_LVALUE_EXPR:
2725 return invert_truthvalue (TREE_OPERAND (arg, 0));
2730 return build1 (TREE_CODE (arg), type,
2731 invert_truthvalue (TREE_OPERAND (arg, 0)));
2734 if (!integer_onep (TREE_OPERAND (arg, 1)))
2736 return build (EQ_EXPR, type, arg,
2737 fold_convert (type, integer_zero_node));
2740 return build1 (TRUTH_NOT_EXPR, type, arg);
2742 case CLEANUP_POINT_EXPR:
2743 return build1 (CLEANUP_POINT_EXPR, type,
2744 invert_truthvalue (TREE_OPERAND (arg, 0)));
2749 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2751 return build1 (TRUTH_NOT_EXPR, type, arg);
2754 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2755 operands are another bit-wise operation with a common input. If so,
2756 distribute the bit operations to save an operation and possibly two if
2757 constants are involved. For example, convert
2758 (A | B) & (A | C) into A | (B & C)
2759 Further simplification will occur if B and C are constants.
2761 If this optimization cannot be done, 0 will be returned. */
2764 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2769 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2770 || TREE_CODE (arg0) == code
2771 || (TREE_CODE (arg0) != BIT_AND_EXPR
2772 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2775 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2777 common = TREE_OPERAND (arg0, 0);
2778 left = TREE_OPERAND (arg0, 1);
2779 right = TREE_OPERAND (arg1, 1);
2781 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2783 common = TREE_OPERAND (arg0, 0);
2784 left = TREE_OPERAND (arg0, 1);
2785 right = TREE_OPERAND (arg1, 0);
2787 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2789 common = TREE_OPERAND (arg0, 1);
2790 left = TREE_OPERAND (arg0, 0);
2791 right = TREE_OPERAND (arg1, 1);
2793 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2795 common = TREE_OPERAND (arg0, 1);
2796 left = TREE_OPERAND (arg0, 0);
2797 right = TREE_OPERAND (arg1, 0);
2802 return fold (build (TREE_CODE (arg0), type, common,
2803 fold (build (code, type, left, right))));
2806 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2807 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2810 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2813 tree result = build (BIT_FIELD_REF, type, inner,
2814 size_int (bitsize), bitsize_int (bitpos));
2816 TREE_UNSIGNED (result) = unsignedp;
2821 /* Optimize a bit-field compare.
2823 There are two cases: First is a compare against a constant and the
2824 second is a comparison of two items where the fields are at the same
2825 bit position relative to the start of a chunk (byte, halfword, word)
2826 large enough to contain it. In these cases we can avoid the shift
2827 implicit in bitfield extractions.
2829 For constants, we emit a compare of the shifted constant with the
2830 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2831 compared. For two fields at the same position, we do the ANDs with the
2832 similar mask and compare the result of the ANDs.
2834 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2835 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2836 are the left and right operands of the comparison, respectively.
2838 If the optimization described above can be done, we return the resulting
2839 tree. Otherwise we return zero. */
2842 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2845 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2846 tree type = TREE_TYPE (lhs);
2847 tree signed_type, unsigned_type;
2848 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2849 enum machine_mode lmode, rmode, nmode;
2850 int lunsignedp, runsignedp;
2851 int lvolatilep = 0, rvolatilep = 0;
2852 tree linner, rinner = NULL_TREE;
2856 /* Get all the information about the extractions being done. If the bit size
2857 if the same as the size of the underlying object, we aren't doing an
2858 extraction at all and so can do nothing. We also don't want to
2859 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2860 then will no longer be able to replace it. */
2861 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2862 &lunsignedp, &lvolatilep);
2863 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2864 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2869 /* If this is not a constant, we can only do something if bit positions,
2870 sizes, and signedness are the same. */
2871 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2872 &runsignedp, &rvolatilep);
2874 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2875 || lunsignedp != runsignedp || offset != 0
2876 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2880 /* See if we can find a mode to refer to this field. We should be able to,
2881 but fail if we can't. */
2882 nmode = get_best_mode (lbitsize, lbitpos,
2883 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2884 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2885 TYPE_ALIGN (TREE_TYPE (rinner))),
2886 word_mode, lvolatilep || rvolatilep);
2887 if (nmode == VOIDmode)
2890 /* Set signed and unsigned types of the precision of this mode for the
2892 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
2893 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
2895 /* Compute the bit position and size for the new reference and our offset
2896 within it. If the new reference is the same size as the original, we
2897 won't optimize anything, so return zero. */
2898 nbitsize = GET_MODE_BITSIZE (nmode);
2899 nbitpos = lbitpos & ~ (nbitsize - 1);
2901 if (nbitsize == lbitsize)
2904 if (BYTES_BIG_ENDIAN)
2905 lbitpos = nbitsize - lbitsize - lbitpos;
2907 /* Make the mask to be used against the extracted field. */
2908 mask = build_int_2 (~0, ~0);
2909 TREE_TYPE (mask) = unsigned_type;
2910 force_fit_type (mask, 0);
2911 mask = fold_convert (unsigned_type, mask);
2912 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2913 mask = const_binop (RSHIFT_EXPR, mask,
2914 size_int (nbitsize - lbitsize - lbitpos), 0);
2917 /* If not comparing with constant, just rework the comparison
2919 return build (code, compare_type,
2920 build (BIT_AND_EXPR, unsigned_type,
2921 make_bit_field_ref (linner, unsigned_type,
2922 nbitsize, nbitpos, 1),
2924 build (BIT_AND_EXPR, unsigned_type,
2925 make_bit_field_ref (rinner, unsigned_type,
2926 nbitsize, nbitpos, 1),
2929 /* Otherwise, we are handling the constant case. See if the constant is too
2930 big for the field. Warn and return a tree of for 0 (false) if so. We do
2931 this not only for its own sake, but to avoid having to test for this
2932 error case below. If we didn't, we might generate wrong code.
2934 For unsigned fields, the constant shifted right by the field length should
2935 be all zero. For signed fields, the high-order bits should agree with
2940 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2941 fold_convert (unsigned_type, rhs),
2942 size_int (lbitsize), 0)))
2944 warning ("comparison is always %d due to width of bit-field",
2946 return fold_convert (compare_type,
2948 ? integer_one_node : integer_zero_node));
2953 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2954 size_int (lbitsize - 1), 0);
2955 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2957 warning ("comparison is always %d due to width of bit-field",
2959 return fold_convert (compare_type,
2961 ? integer_one_node : integer_zero_node));
2965 /* Single-bit compares should always be against zero. */
2966 if (lbitsize == 1 && ! integer_zerop (rhs))
2968 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2969 rhs = fold_convert (type, integer_zero_node);
2972 /* Make a new bitfield reference, shift the constant over the
2973 appropriate number of bits and mask it with the computed mask
2974 (in case this was a signed field). If we changed it, make a new one. */
2975 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2978 TREE_SIDE_EFFECTS (lhs) = 1;
2979 TREE_THIS_VOLATILE (lhs) = 1;
2982 rhs = fold (const_binop (BIT_AND_EXPR,
2983 const_binop (LSHIFT_EXPR,
2984 fold_convert (unsigned_type, rhs),
2985 size_int (lbitpos), 0),
2988 return build (code, compare_type,
2989 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2993 /* Subroutine for fold_truthop: decode a field reference.
2995 If EXP is a comparison reference, we return the innermost reference.
2997 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2998 set to the starting bit number.
3000 If the innermost field can be completely contained in a mode-sized
3001 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3003 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3004 otherwise it is not changed.
3006 *PUNSIGNEDP is set to the signedness of the field.
3008 *PMASK is set to the mask used. This is either contained in a
3009 BIT_AND_EXPR or derived from the width of the field.
3011 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3013 Return 0 if this is not a component reference or is one that we can't
3014 do anything with. */
3017 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3018 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3019 int *punsignedp, int *pvolatilep,
3020 tree *pmask, tree *pand_mask)
3022 tree outer_type = 0;
3024 tree mask, inner, offset;
3026 unsigned int precision;
3028 /* All the optimizations using this function assume integer fields.
3029 There are problems with FP fields since the type_for_size call
3030 below can fail for, e.g., XFmode. */
3031 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3034 /* We are interested in the bare arrangement of bits, so strip everything
3035 that doesn't affect the machine mode. However, record the type of the
3036 outermost expression if it may matter below. */
3037 if (TREE_CODE (exp) == NOP_EXPR
3038 || TREE_CODE (exp) == CONVERT_EXPR
3039 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3040 outer_type = TREE_TYPE (exp);
3043 if (TREE_CODE (exp) == BIT_AND_EXPR)
3045 and_mask = TREE_OPERAND (exp, 1);
3046 exp = TREE_OPERAND (exp, 0);
3047 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3048 if (TREE_CODE (and_mask) != INTEGER_CST)
3052 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3053 punsignedp, pvolatilep);
3054 if ((inner == exp && and_mask == 0)
3055 || *pbitsize < 0 || offset != 0
3056 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3059 /* If the number of bits in the reference is the same as the bitsize of
3060 the outer type, then the outer type gives the signedness. Otherwise
3061 (in case of a small bitfield) the signedness is unchanged. */
3062 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3063 *punsignedp = TREE_UNSIGNED (outer_type);
3065 /* Compute the mask to access the bitfield. */
3066 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3067 precision = TYPE_PRECISION (unsigned_type);
3069 mask = build_int_2 (~0, ~0);
3070 TREE_TYPE (mask) = unsigned_type;
3071 force_fit_type (mask, 0);
3072 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3073 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3075 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3077 mask = fold (build (BIT_AND_EXPR, unsigned_type,
3078 fold_convert (unsigned_type, and_mask), mask));
3081 *pand_mask = and_mask;
3085 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3089 all_ones_mask_p (tree mask, int size)
3091 tree type = TREE_TYPE (mask);
3092 unsigned int precision = TYPE_PRECISION (type);
3095 tmask = build_int_2 (~0, ~0);
3096 TREE_TYPE (tmask) = lang_hooks.types.signed_type (type);
3097 force_fit_type (tmask, 0);
3099 tree_int_cst_equal (mask,
3100 const_binop (RSHIFT_EXPR,
3101 const_binop (LSHIFT_EXPR, tmask,
3102 size_int (precision - size),
3104 size_int (precision - size), 0));
3107 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3108 represents the sign bit of EXP's type. If EXP represents a sign
3109 or zero extension, also test VAL against the unextended type.
3110 The return value is the (sub)expression whose sign bit is VAL,
3111 or NULL_TREE otherwise. */
3114 sign_bit_p (tree exp, tree val)
3116 unsigned HOST_WIDE_INT mask_lo, lo;
3117 HOST_WIDE_INT mask_hi, hi;
3121 /* Tree EXP must have an integral type. */
3122 t = TREE_TYPE (exp);
3123 if (! INTEGRAL_TYPE_P (t))
3126 /* Tree VAL must be an integer constant. */
3127 if (TREE_CODE (val) != INTEGER_CST
3128 || TREE_CONSTANT_OVERFLOW (val))
3131 width = TYPE_PRECISION (t);
3132 if (width > HOST_BITS_PER_WIDE_INT)
3134 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3137 mask_hi = ((unsigned HOST_WIDE_INT) -1
3138 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3144 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3147 mask_lo = ((unsigned HOST_WIDE_INT) -1
3148 >> (HOST_BITS_PER_WIDE_INT - width));
3151 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3152 treat VAL as if it were unsigned. */
3153 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3154 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3157 /* Handle extension from a narrower type. */
3158 if (TREE_CODE (exp) == NOP_EXPR
3159 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3160 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3165 /* Subroutine for fold_truthop: determine if an operand is simple enough
3166 to be evaluated unconditionally. */
3169 simple_operand_p (tree exp)
3171 /* Strip any conversions that don't change the machine mode. */
3172 while ((TREE_CODE (exp) == NOP_EXPR
3173 || TREE_CODE (exp) == CONVERT_EXPR)
3174 && (TYPE_MODE (TREE_TYPE (exp))
3175 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3176 exp = TREE_OPERAND (exp, 0);
3178 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3180 && ! TREE_ADDRESSABLE (exp)
3181 && ! TREE_THIS_VOLATILE (exp)
3182 && ! DECL_NONLOCAL (exp)
3183 /* Don't regard global variables as simple. They may be
3184 allocated in ways unknown to the compiler (shared memory,
3185 #pragma weak, etc). */
3186 && ! TREE_PUBLIC (exp)
3187 && ! DECL_EXTERNAL (exp)
3188 /* Loading a static variable is unduly expensive, but global
3189 registers aren't expensive. */
3190 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3193 /* The following functions are subroutines to fold_range_test and allow it to
3194 try to change a logical combination of comparisons into a range test.
3197 X == 2 || X == 3 || X == 4 || X == 5
3201 (unsigned) (X - 2) <= 3
3203 We describe each set of comparisons as being either inside or outside
3204 a range, using a variable named like IN_P, and then describe the
3205 range with a lower and upper bound. If one of the bounds is omitted,
3206 it represents either the highest or lowest value of the type.
3208 In the comments below, we represent a range by two numbers in brackets
3209 preceded by a "+" to designate being inside that range, or a "-" to
3210 designate being outside that range, so the condition can be inverted by
3211 flipping the prefix. An omitted bound is represented by a "-". For
3212 example, "- [-, 10]" means being outside the range starting at the lowest
3213 possible value and ending at 10, in other words, being greater than 10.
3214 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3217 We set up things so that the missing bounds are handled in a consistent
3218 manner so neither a missing bound nor "true" and "false" need to be
3219 handled using a special case. */
3221 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3222 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3223 and UPPER1_P are nonzero if the respective argument is an upper bound
3224 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3225 must be specified for a comparison. ARG1 will be converted to ARG0's
3226 type if both are specified. */
3229 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3230 tree arg1, int upper1_p)
3236 /* If neither arg represents infinity, do the normal operation.
3237 Else, if not a comparison, return infinity. Else handle the special
3238 comparison rules. Note that most of the cases below won't occur, but
3239 are handled for consistency. */
3241 if (arg0 != 0 && arg1 != 0)
3243 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3244 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3246 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3249 if (TREE_CODE_CLASS (code) != '<')
3252 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3253 for neither. In real maths, we cannot assume open ended ranges are
3254 the same. But, this is computer arithmetic, where numbers are finite.
3255 We can therefore make the transformation of any unbounded range with
3256 the value Z, Z being greater than any representable number. This permits
3257 us to treat unbounded ranges as equal. */
3258 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3259 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3263 result = sgn0 == sgn1;
3266 result = sgn0 != sgn1;
3269 result = sgn0 < sgn1;
3272 result = sgn0 <= sgn1;
3275 result = sgn0 > sgn1;
3278 result = sgn0 >= sgn1;
3284 return fold_convert (type, result ? integer_one_node : integer_zero_node);
3287 /* Given EXP, a logical expression, set the range it is testing into
3288 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3289 actually being tested. *PLOW and *PHIGH will be made of the same type
3290 as the returned expression. If EXP is not a comparison, we will most
3291 likely not be returning a useful value and range. */
3294 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3296 enum tree_code code;
3297 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3298 tree orig_type = NULL_TREE;
3300 tree low, high, n_low, n_high;
3302 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3303 and see if we can refine the range. Some of the cases below may not
3304 happen, but it doesn't seem worth worrying about this. We "continue"
3305 the outer loop when we've changed something; otherwise we "break"
3306 the switch, which will "break" the while. */
3309 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3313 code = TREE_CODE (exp);
3315 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3317 if (first_rtl_op (code) > 0)
3318 arg0 = TREE_OPERAND (exp, 0);
3319 if (TREE_CODE_CLASS (code) == '<'
3320 || TREE_CODE_CLASS (code) == '1'
3321 || TREE_CODE_CLASS (code) == '2')
3322 type = TREE_TYPE (arg0);
3323 if (TREE_CODE_CLASS (code) == '2'
3324 || TREE_CODE_CLASS (code) == '<'
3325 || (TREE_CODE_CLASS (code) == 'e'
3326 && TREE_CODE_LENGTH (code) > 1))
3327 arg1 = TREE_OPERAND (exp, 1);
3330 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3331 lose a cast by accident. */
3332 if (type != NULL_TREE && orig_type == NULL_TREE)
3337 case TRUTH_NOT_EXPR:
3338 in_p = ! in_p, exp = arg0;
3341 case EQ_EXPR: case NE_EXPR:
3342 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3343 /* We can only do something if the range is testing for zero
3344 and if the second operand is an integer constant. Note that
3345 saying something is "in" the range we make is done by
3346 complementing IN_P since it will set in the initial case of
3347 being not equal to zero; "out" is leaving it alone. */
3348 if (low == 0 || high == 0
3349 || ! integer_zerop (low) || ! integer_zerop (high)
3350 || TREE_CODE (arg1) != INTEGER_CST)
3355 case NE_EXPR: /* - [c, c] */
3358 case EQ_EXPR: /* + [c, c] */
3359 in_p = ! in_p, low = high = arg1;
3361 case GT_EXPR: /* - [-, c] */
3362 low = 0, high = arg1;
3364 case GE_EXPR: /* + [c, -] */
3365 in_p = ! in_p, low = arg1, high = 0;
3367 case LT_EXPR: /* - [c, -] */
3368 low = arg1, high = 0;
3370 case LE_EXPR: /* + [-, c] */
3371 in_p = ! in_p, low = 0, high = arg1;
3379 /* If this is an unsigned comparison, we also know that EXP is
3380 greater than or equal to zero. We base the range tests we make
3381 on that fact, so we record it here so we can parse existing
3383 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3385 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3386 1, fold_convert (type, integer_zero_node),
3390 in_p = n_in_p, low = n_low, high = n_high;
3392 /* If the high bound is missing, but we have a nonzero low
3393 bound, reverse the range so it goes from zero to the low bound
3395 if (high == 0 && low && ! integer_zerop (low))
3398 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3399 integer_one_node, 0);
3400 low = fold_convert (type, integer_zero_node);
3406 /* (-x) IN [a,b] -> x in [-b, -a] */
3407 n_low = range_binop (MINUS_EXPR, type,
3408 fold_convert (type, integer_zero_node),
3410 n_high = range_binop (MINUS_EXPR, type,
3411 fold_convert (type, integer_zero_node),
3413 low = n_low, high = n_high;
3419 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3420 fold_convert (type, integer_one_node));
3423 case PLUS_EXPR: case MINUS_EXPR:
3424 if (TREE_CODE (arg1) != INTEGER_CST)
3427 /* If EXP is signed, any overflow in the computation is undefined,
3428 so we don't worry about it so long as our computations on
3429 the bounds don't overflow. For unsigned, overflow is defined
3430 and this is exactly the right thing. */
3431 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3432 type, low, 0, arg1, 0);
3433 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3434 type, high, 1, arg1, 0);
3435 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3436 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3439 /* Check for an unsigned range which has wrapped around the maximum
3440 value thus making n_high < n_low, and normalize it. */
3441 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3443 low = range_binop (PLUS_EXPR, type, n_high, 0,
3444 integer_one_node, 0);
3445 high = range_binop (MINUS_EXPR, type, n_low, 0,
3446 integer_one_node, 0);
3448 /* If the range is of the form +/- [ x+1, x ], we won't
3449 be able to normalize it. But then, it represents the
3450 whole range or the empty set, so make it
3452 if (tree_int_cst_equal (n_low, low)
3453 && tree_int_cst_equal (n_high, high))
3459 low = n_low, high = n_high;
3464 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3465 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3468 if (! INTEGRAL_TYPE_P (type)
3469 || (low != 0 && ! int_fits_type_p (low, type))
3470 || (high != 0 && ! int_fits_type_p (high, type)))
3473 n_low = low, n_high = high;
3476 n_low = fold_convert (type, n_low);
3479 n_high = fold_convert (type, n_high);
3481 /* If we're converting from an unsigned to a signed type,
3482 we will be doing the comparison as unsigned. The tests above
3483 have already verified that LOW and HIGH are both positive.
3485 So we have to make sure that the original unsigned value will
3486 be interpreted as positive. */
3487 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3489 tree equiv_type = lang_hooks.types.type_for_mode
3490 (TYPE_MODE (type), 1);
3493 /* A range without an upper bound is, naturally, unbounded.
3494 Since convert would have cropped a very large value, use
3495 the max value for the destination type. */
3497 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3498 : TYPE_MAX_VALUE (type);
3500 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3501 high_positive = fold (build (RSHIFT_EXPR, type,
3505 integer_one_node)));
3507 /* If the low bound is specified, "and" the range with the
3508 range for which the original unsigned value will be
3512 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3513 1, n_low, n_high, 1,
3514 fold_convert (type, integer_zero_node),
3518 in_p = (n_in_p == in_p);
3522 /* Otherwise, "or" the range with the range of the input
3523 that will be interpreted as negative. */
3524 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3525 0, n_low, n_high, 1,
3526 fold_convert (type, integer_zero_node),
3530 in_p = (in_p != n_in_p);
3535 low = n_low, high = n_high;
3545 /* If EXP is a constant, we can evaluate whether this is true or false. */
3546 if (TREE_CODE (exp) == INTEGER_CST)
3548 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3550 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3556 *pin_p = in_p, *plow = low, *phigh = high;
3560 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3561 type, TYPE, return an expression to test if EXP is in (or out of, depending
3562 on IN_P) the range. */
3565 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3567 tree etype = TREE_TYPE (exp);
3571 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3572 return invert_truthvalue (value);
3574 if (low == 0 && high == 0)
3575 return fold_convert (type, integer_one_node);
3578 return fold (build (LE_EXPR, type, exp, high));
3581 return fold (build (GE_EXPR, type, exp, low));
3583 if (operand_equal_p (low, high, 0))
3584 return fold (build (EQ_EXPR, type, exp, low));
3586 if (integer_zerop (low))
3588 if (! TREE_UNSIGNED (etype))
3590 etype = lang_hooks.types.unsigned_type (etype);
3591 high = fold_convert (etype, high);
3592 exp = fold_convert (etype, exp);
3594 return build_range_check (type, exp, 1, 0, high);
3597 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3598 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3600 unsigned HOST_WIDE_INT lo;
3604 prec = TYPE_PRECISION (etype);
3605 if (prec <= HOST_BITS_PER_WIDE_INT)
3608 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3612 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3613 lo = (unsigned HOST_WIDE_INT) -1;
3616 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3618 if (TREE_UNSIGNED (etype))
3620 etype = lang_hooks.types.signed_type (etype);
3621 exp = fold_convert (etype, exp);
3623 return fold (build (GT_EXPR, type, exp,
3624 fold_convert (etype, integer_zero_node)));
3628 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3629 && ! TREE_OVERFLOW (value))
3630 return build_range_check (type,
3631 fold (build (MINUS_EXPR, etype, exp, low)),
3632 1, fold_convert (etype, integer_zero_node),
3638 /* Given two ranges, see if we can merge them into one. Return 1 if we
3639 can, 0 if we can't. Set the output range into the specified parameters. */
3642 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3643 tree high0, int in1_p, tree low1, tree high1)
3651 int lowequal = ((low0 == 0 && low1 == 0)
3652 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3653 low0, 0, low1, 0)));
3654 int highequal = ((high0 == 0 && high1 == 0)
3655 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3656 high0, 1, high1, 1)));
3658 /* Make range 0 be the range that starts first, or ends last if they
3659 start at the same value. Swap them if it isn't. */
3660 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3663 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3664 high1, 1, high0, 1))))
3666 temp = in0_p, in0_p = in1_p, in1_p = temp;
3667 tem = low0, low0 = low1, low1 = tem;
3668 tem = high0, high0 = high1, high1 = tem;
3671 /* Now flag two cases, whether the ranges are disjoint or whether the
3672 second range is totally subsumed in the first. Note that the tests
3673 below are simplified by the ones above. */
3674 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3675 high0, 1, low1, 0));
3676 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3677 high1, 1, high0, 1));
3679 /* We now have four cases, depending on whether we are including or
3680 excluding the two ranges. */
3683 /* If they don't overlap, the result is false. If the second range
3684 is a subset it is the result. Otherwise, the range is from the start
3685 of the second to the end of the first. */
3687 in_p = 0, low = high = 0;
3689 in_p = 1, low = low1, high = high1;
3691 in_p = 1, low = low1, high = high0;
3694 else if (in0_p && ! in1_p)
3696 /* If they don't overlap, the result is the first range. If they are
3697 equal, the result is false. If the second range is a subset of the
3698 first, and the ranges begin at the same place, we go from just after
3699 the end of the first range to the end of the second. If the second
3700 range is not a subset of the first, or if it is a subset and both
3701 ranges end at the same place, the range starts at the start of the
3702 first range and ends just before the second range.
3703 Otherwise, we can't describe this as a single range. */
3705 in_p = 1, low = low0, high = high0;
3706 else if (lowequal && highequal)
3707 in_p = 0, low = high = 0;
3708 else if (subset && lowequal)
3710 in_p = 1, high = high0;
3711 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3712 integer_one_node, 0);
3714 else if (! subset || highequal)
3716 in_p = 1, low = low0;
3717 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3718 integer_one_node, 0);
3724 else if (! in0_p && in1_p)
3726 /* If they don't overlap, the result is the second range. If the second
3727 is a subset of the first, the result is false. Otherwise,
3728 the range starts just after the first range and ends at the
3729 end of the second. */
3731 in_p = 1, low = low1, high = high1;
3732 else if (subset || highequal)
3733 in_p = 0, low = high = 0;
3736 in_p = 1, high = high1;
3737 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3738 integer_one_node, 0);
3744 /* The case where we are excluding both ranges. Here the complex case
3745 is if they don't overlap. In that case, the only time we have a
3746 range is if they are adjacent. If the second is a subset of the
3747 first, the result is the first. Otherwise, the range to exclude
3748 starts at the beginning of the first range and ends at the end of the
3752 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3753 range_binop (PLUS_EXPR, NULL_TREE,
3755 integer_one_node, 1),
3757 in_p = 0, low = low0, high = high1;
3762 in_p = 0, low = low0, high = high0;
3764 in_p = 0, low = low0, high = high1;
3767 *pin_p = in_p, *plow = low, *phigh = high;
3771 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3772 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3775 /* EXP is some logical combination of boolean tests. See if we can
3776 merge it into some range test. Return the new tree if so. */
3779 fold_range_test (tree exp)
3781 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3782 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3783 int in0_p, in1_p, in_p;
3784 tree low0, low1, low, high0, high1, high;
3785 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3786 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3789 /* If this is an OR operation, invert both sides; we will invert
3790 again at the end. */
3792 in0_p = ! in0_p, in1_p = ! in1_p;
3794 /* If both expressions are the same, if we can merge the ranges, and we
3795 can build the range test, return it or it inverted. If one of the
3796 ranges is always true or always false, consider it to be the same
3797 expression as the other. */
3798 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3799 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3801 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3803 : rhs != 0 ? rhs : integer_zero_node,
3805 return or_op ? invert_truthvalue (tem) : tem;
3807 /* On machines where the branch cost is expensive, if this is a
3808 short-circuited branch and the underlying object on both sides
3809 is the same, make a non-short-circuit operation. */
3810 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3811 && lhs != 0 && rhs != 0
3812 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3813 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3814 && operand_equal_p (lhs, rhs, 0))
3816 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3817 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3818 which cases we can't do this. */
3819 if (simple_operand_p (lhs))
3820 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3821 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3822 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3823 TREE_OPERAND (exp, 1));
3825 else if (lang_hooks.decls.global_bindings_p () == 0
3826 && ! CONTAINS_PLACEHOLDER_P (lhs))
3828 tree common = save_expr (lhs);
3830 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3831 or_op ? ! in0_p : in0_p,
3833 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3834 or_op ? ! in1_p : in1_p,
3836 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3837 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3838 TREE_TYPE (exp), lhs, rhs);
3845 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3846 bit value. Arrange things so the extra bits will be set to zero if and
3847 only if C is signed-extended to its full width. If MASK is nonzero,
3848 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3851 unextend (tree c, int p, int unsignedp, tree mask)
3853 tree type = TREE_TYPE (c);
3854 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3857 if (p == modesize || unsignedp)
3860 /* We work by getting just the sign bit into the low-order bit, then
3861 into the high-order bit, then sign-extend. We then XOR that value
3863 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3864 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3866 /* We must use a signed type in order to get an arithmetic right shift.
3867 However, we must also avoid introducing accidental overflows, so that
3868 a subsequent call to integer_zerop will work. Hence we must
3869 do the type conversion here. At this point, the constant is either
3870 zero or one, and the conversion to a signed type can never overflow.
3871 We could get an overflow if this conversion is done anywhere else. */
3872 if (TREE_UNSIGNED (type))
3873 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
3875 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3876 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3878 temp = const_binop (BIT_AND_EXPR, temp,
3879 fold_convert (TREE_TYPE (c), mask), 0);
3880 /* If necessary, convert the type back to match the type of C. */
3881 if (TREE_UNSIGNED (type))
3882 temp = fold_convert (type, temp);
3884 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3887 /* Find ways of folding logical expressions of LHS and RHS:
3888 Try to merge two comparisons to the same innermost item.
3889 Look for range tests like "ch >= '0' && ch <= '9'".
3890 Look for combinations of simple terms on machines with expensive branches
3891 and evaluate the RHS unconditionally.
3893 For example, if we have p->a == 2 && p->b == 4 and we can make an
3894 object large enough to span both A and B, we can do this with a comparison
3895 against the object ANDed with the a mask.
3897 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3898 operations to do this with one comparison.
3900 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3901 function and the one above.
3903 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3904 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3906 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3909 We return the simplified tree or 0 if no optimization is possible. */
3912 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3914 /* If this is the "or" of two comparisons, we can do something if
3915 the comparisons are NE_EXPR. If this is the "and", we can do something
3916 if the comparisons are EQ_EXPR. I.e.,
3917 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3919 WANTED_CODE is this operation code. For single bit fields, we can
3920 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3921 comparison for one-bit fields. */
3923 enum tree_code wanted_code;
3924 enum tree_code lcode, rcode;
3925 tree ll_arg, lr_arg, rl_arg, rr_arg;
3926 tree ll_inner, lr_inner, rl_inner, rr_inner;
3927 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3928 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3929 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3930 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3931 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3932 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3933 enum machine_mode lnmode, rnmode;
3934 tree ll_mask, lr_mask, rl_mask, rr_mask;
3935 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3936 tree l_const, r_const;
3937 tree lntype, rntype, result;
3938 int first_bit, end_bit;
3941 /* Start by getting the comparison codes. Fail if anything is volatile.
3942 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3943 it were surrounded with a NE_EXPR. */
3945 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3948 lcode = TREE_CODE (lhs);
3949 rcode = TREE_CODE (rhs);
3951 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3952 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3954 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3955 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3957 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3960 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3961 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3963 ll_arg = TREE_OPERAND (lhs, 0);
3964 lr_arg = TREE_OPERAND (lhs, 1);
3965 rl_arg = TREE_OPERAND (rhs, 0);
3966 rr_arg = TREE_OPERAND (rhs, 1);
3968 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3969 if (simple_operand_p (ll_arg)
3970 && simple_operand_p (lr_arg)
3971 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3975 if (operand_equal_p (ll_arg, rl_arg, 0)
3976 && operand_equal_p (lr_arg, rr_arg, 0))
3978 int lcompcode, rcompcode;
3980 lcompcode = comparison_to_compcode (lcode);
3981 rcompcode = comparison_to_compcode (rcode);
3982 compcode = (code == TRUTH_AND_EXPR)
3983 ? lcompcode & rcompcode
3984 : lcompcode | rcompcode;
3986 else if (operand_equal_p (ll_arg, rr_arg, 0)
3987 && operand_equal_p (lr_arg, rl_arg, 0))
3989 int lcompcode, rcompcode;
3991 rcode = swap_tree_comparison (rcode);
3992 lcompcode = comparison_to_compcode (lcode);
3993 rcompcode = comparison_to_compcode (rcode);
3994 compcode = (code == TRUTH_AND_EXPR)
3995 ? lcompcode & rcompcode
3996 : lcompcode | rcompcode;
4001 if (compcode == COMPCODE_TRUE)
4002 return fold_convert (truth_type, integer_one_node);
4003 else if (compcode == COMPCODE_FALSE)
4004 return fold_convert (truth_type, integer_zero_node);
4005 else if (compcode != -1)
4006 return build (compcode_to_comparison (compcode),
4007 truth_type, ll_arg, lr_arg);
4010 /* If the RHS can be evaluated unconditionally and its operands are
4011 simple, it wins to evaluate the RHS unconditionally on machines
4012 with expensive branches. In this case, this isn't a comparison
4013 that can be merged. Avoid doing this if the RHS is a floating-point
4014 comparison since those can trap. */
4016 if (BRANCH_COST >= 2
4017 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4018 && simple_operand_p (rl_arg)
4019 && simple_operand_p (rr_arg))
4021 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4022 if (code == TRUTH_OR_EXPR
4023 && lcode == NE_EXPR && integer_zerop (lr_arg)
4024 && rcode == NE_EXPR && integer_zerop (rr_arg)
4025 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4026 return build (NE_EXPR, truth_type,
4027 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4031 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4032 if (code == TRUTH_AND_EXPR
4033 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4034 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4035 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4036 return build (EQ_EXPR, truth_type,
4037 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4041 return build (code, truth_type, lhs, rhs);
4044 /* See if the comparisons can be merged. Then get all the parameters for
4047 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4048 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4052 ll_inner = decode_field_reference (ll_arg,
4053 &ll_bitsize, &ll_bitpos, &ll_mode,
4054 &ll_unsignedp, &volatilep, &ll_mask,
4056 lr_inner = decode_field_reference (lr_arg,
4057 &lr_bitsize, &lr_bitpos, &lr_mode,
4058 &lr_unsignedp, &volatilep, &lr_mask,
4060 rl_inner = decode_field_reference (rl_arg,
4061 &rl_bitsize, &rl_bitpos, &rl_mode,
4062 &rl_unsignedp, &volatilep, &rl_mask,
4064 rr_inner = decode_field_reference (rr_arg,
4065 &rr_bitsize, &rr_bitpos, &rr_mode,
4066 &rr_unsignedp, &volatilep, &rr_mask,
4069 /* It must be true that the inner operation on the lhs of each
4070 comparison must be the same if we are to be able to do anything.
4071 Then see if we have constants. If not, the same must be true for
4073 if (volatilep || ll_inner == 0 || rl_inner == 0
4074 || ! operand_equal_p (ll_inner, rl_inner, 0))
4077 if (TREE_CODE (lr_arg) == INTEGER_CST
4078 && TREE_CODE (rr_arg) == INTEGER_CST)
4079 l_const = lr_arg, r_const = rr_arg;
4080 else if (lr_inner == 0 || rr_inner == 0
4081 || ! operand_equal_p (lr_inner, rr_inner, 0))
4084 l_const = r_const = 0;
4086 /* If either comparison code is not correct for our logical operation,
4087 fail. However, we can convert a one-bit comparison against zero into
4088 the opposite comparison against that bit being set in the field. */
4090 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4091 if (lcode != wanted_code)
4093 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4095 /* Make the left operand unsigned, since we are only interested
4096 in the value of one bit. Otherwise we are doing the wrong
4105 /* This is analogous to the code for l_const above. */
4106 if (rcode != wanted_code)
4108 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4117 /* After this point all optimizations will generate bit-field
4118 references, which we might not want. */
4119 if (! lang_hooks.can_use_bit_fields_p ())
4122 /* See if we can find a mode that contains both fields being compared on
4123 the left. If we can't, fail. Otherwise, update all constants and masks
4124 to be relative to a field of that size. */
4125 first_bit = MIN (ll_bitpos, rl_bitpos);
4126 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4127 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4128 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4130 if (lnmode == VOIDmode)
4133 lnbitsize = GET_MODE_BITSIZE (lnmode);
4134 lnbitpos = first_bit & ~ (lnbitsize - 1);
4135 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4136 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4138 if (BYTES_BIG_ENDIAN)
4140 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4141 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4144 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4145 size_int (xll_bitpos), 0);
4146 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4147 size_int (xrl_bitpos), 0);
4151 l_const = fold_convert (lntype, l_const);
4152 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4153 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4154 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4155 fold (build1 (BIT_NOT_EXPR,
4159 warning ("comparison is always %d", wanted_code == NE_EXPR);
4161 return fold_convert (truth_type,
4162 wanted_code == NE_EXPR
4163 ? integer_one_node : integer_zero_node);
4168 r_const = fold_convert (lntype, r_const);
4169 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4170 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4171 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4172 fold (build1 (BIT_NOT_EXPR,
4176 warning ("comparison is always %d", wanted_code == NE_EXPR);
4178 return fold_convert (truth_type,
4179 wanted_code == NE_EXPR
4180 ? integer_one_node : integer_zero_node);
4184 /* If the right sides are not constant, do the same for it. Also,
4185 disallow this optimization if a size or signedness mismatch occurs
4186 between the left and right sides. */
4189 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4190 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4191 /* Make sure the two fields on the right
4192 correspond to the left without being swapped. */
4193 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4196 first_bit = MIN (lr_bitpos, rr_bitpos);
4197 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4198 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4199 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4201 if (rnmode == VOIDmode)
4204 rnbitsize = GET_MODE_BITSIZE (rnmode);
4205 rnbitpos = first_bit & ~ (rnbitsize - 1);
4206 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4207 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4209 if (BYTES_BIG_ENDIAN)
4211 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4212 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4215 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4216 size_int (xlr_bitpos), 0);
4217 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4218 size_int (xrr_bitpos), 0);
4220 /* Make a mask that corresponds to both fields being compared.
4221 Do this for both items being compared. If the operands are the
4222 same size and the bits being compared are in the same position
4223 then we can do this by masking both and comparing the masked
4225 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4226 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4227 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4229 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4230 ll_unsignedp || rl_unsignedp);
4231 if (! all_ones_mask_p (ll_mask, lnbitsize))
4232 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4234 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4235 lr_unsignedp || rr_unsignedp);
4236 if (! all_ones_mask_p (lr_mask, rnbitsize))
4237 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4239 return build (wanted_code, truth_type, lhs, rhs);
4242 /* There is still another way we can do something: If both pairs of
4243 fields being compared are adjacent, we may be able to make a wider
4244 field containing them both.
4246 Note that we still must mask the lhs/rhs expressions. Furthermore,
4247 the mask must be shifted to account for the shift done by
4248 make_bit_field_ref. */
4249 if ((ll_bitsize + ll_bitpos == rl_bitpos
4250 && lr_bitsize + lr_bitpos == rr_bitpos)
4251 || (ll_bitpos == rl_bitpos + rl_bitsize
4252 && lr_bitpos == rr_bitpos + rr_bitsize))
4256 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4257 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4258 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4259 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4261 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4262 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4263 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4264 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4266 /* Convert to the smaller type before masking out unwanted bits. */
4268 if (lntype != rntype)
4270 if (lnbitsize > rnbitsize)
4272 lhs = fold_convert (rntype, lhs);
4273 ll_mask = fold_convert (rntype, ll_mask);
4276 else if (lnbitsize < rnbitsize)
4278 rhs = fold_convert (lntype, rhs);
4279 lr_mask = fold_convert (lntype, lr_mask);
4284 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4285 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4287 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4288 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4290 return build (wanted_code, truth_type, lhs, rhs);
4296 /* Handle the case of comparisons with constants. If there is something in
4297 common between the masks, those bits of the constants must be the same.
4298 If not, the condition is always false. Test for this to avoid generating
4299 incorrect code below. */
4300 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4301 if (! integer_zerop (result)
4302 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4303 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4305 if (wanted_code == NE_EXPR)
4307 warning ("`or' of unmatched not-equal tests is always 1");
4308 return fold_convert (truth_type, integer_one_node);
4312 warning ("`and' of mutually exclusive equal-tests is always 0");
4313 return fold_convert (truth_type, integer_zero_node);
4317 /* Construct the expression we will return. First get the component
4318 reference we will make. Unless the mask is all ones the width of
4319 that field, perform the mask operation. Then compare with the
4321 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4322 ll_unsignedp || rl_unsignedp);
4324 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4325 if (! all_ones_mask_p (ll_mask, lnbitsize))
4326 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4328 return build (wanted_code, truth_type, result,
4329 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4332 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4336 optimize_minmax_comparison (tree t)
4338 tree type = TREE_TYPE (t);
4339 tree arg0 = TREE_OPERAND (t, 0);
4340 enum tree_code op_code;
4341 tree comp_const = TREE_OPERAND (t, 1);
4343 int consts_equal, consts_lt;
4346 STRIP_SIGN_NOPS (arg0);
4348 op_code = TREE_CODE (arg0);
4349 minmax_const = TREE_OPERAND (arg0, 1);
4350 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4351 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4352 inner = TREE_OPERAND (arg0, 0);
4354 /* If something does not permit us to optimize, return the original tree. */
4355 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4356 || TREE_CODE (comp_const) != INTEGER_CST
4357 || TREE_CONSTANT_OVERFLOW (comp_const)
4358 || TREE_CODE (minmax_const) != INTEGER_CST
4359 || TREE_CONSTANT_OVERFLOW (minmax_const))
4362 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4363 and GT_EXPR, doing the rest with recursive calls using logical
4365 switch (TREE_CODE (t))
4367 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4369 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4373 fold (build (TRUTH_ORIF_EXPR, type,
4374 optimize_minmax_comparison
4375 (build (EQ_EXPR, type, arg0, comp_const)),
4376 optimize_minmax_comparison
4377 (build (GT_EXPR, type, arg0, comp_const))));
4380 if (op_code == MAX_EXPR && consts_equal)
4381 /* MAX (X, 0) == 0 -> X <= 0 */
4382 return fold (build (LE_EXPR, type, inner, comp_const));
4384 else if (op_code == MAX_EXPR && consts_lt)
4385 /* MAX (X, 0) == 5 -> X == 5 */
4386 return fold (build (EQ_EXPR, type, inner, comp_const));
4388 else if (op_code == MAX_EXPR)
4389 /* MAX (X, 0) == -1 -> false */
4390 return omit_one_operand (type, integer_zero_node, inner);
4392 else if (consts_equal)
4393 /* MIN (X, 0) == 0 -> X >= 0 */
4394 return fold (build (GE_EXPR, type, inner, comp_const));
4397 /* MIN (X, 0) == 5 -> false */
4398 return omit_one_operand (type, integer_zero_node, inner);
4401 /* MIN (X, 0) == -1 -> X == -1 */
4402 return fold (build (EQ_EXPR, type, inner, comp_const));
4405 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4406 /* MAX (X, 0) > 0 -> X > 0
4407 MAX (X, 0) > 5 -> X > 5 */
4408 return fold (build (GT_EXPR, type, inner, comp_const));
4410 else if (op_code == MAX_EXPR)
4411 /* MAX (X, 0) > -1 -> true */
4412 return omit_one_operand (type, integer_one_node, inner);
4414 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4415 /* MIN (X, 0) > 0 -> false
4416 MIN (X, 0) > 5 -> false */
4417 return omit_one_operand (type, integer_zero_node, inner);
4420 /* MIN (X, 0) > -1 -> X > -1 */
4421 return fold (build (GT_EXPR, type, inner, comp_const));
4428 /* T is an integer expression that is being multiplied, divided, or taken a
4429 modulus (CODE says which and what kind of divide or modulus) by a
4430 constant C. See if we can eliminate that operation by folding it with
4431 other operations already in T. WIDE_TYPE, if non-null, is a type that
4432 should be used for the computation if wider than our type.
4434 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4435 (X * 2) + (Y * 4). We must, however, be assured that either the original
4436 expression would not overflow or that overflow is undefined for the type
4437 in the language in question.
4439 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4440 the machine has a multiply-accumulate insn or that this is part of an
4441 addressing calculation.
4443 If we return a non-null expression, it is an equivalent form of the
4444 original computation, but need not be in the original type. */
4447 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4449 /* To avoid exponential search depth, refuse to allow recursion past
4450 three levels. Beyond that (1) it's highly unlikely that we'll find
4451 something interesting and (2) we've probably processed it before
4452 when we built the inner expression. */
4461 ret = extract_muldiv_1 (t, c, code, wide_type);
4468 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4470 tree type = TREE_TYPE (t);
4471 enum tree_code tcode = TREE_CODE (t);
4472 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4473 > GET_MODE_SIZE (TYPE_MODE (type)))
4474 ? wide_type : type);
4476 int same_p = tcode == code;
4477 tree op0 = NULL_TREE, op1 = NULL_TREE;
4479 /* Don't deal with constants of zero here; they confuse the code below. */
4480 if (integer_zerop (c))
4483 if (TREE_CODE_CLASS (tcode) == '1')
4484 op0 = TREE_OPERAND (t, 0);
4486 if (TREE_CODE_CLASS (tcode) == '2')
4487 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4489 /* Note that we need not handle conditional operations here since fold
4490 already handles those cases. So just do arithmetic here. */
4494 /* For a constant, we can always simplify if we are a multiply
4495 or (for divide and modulus) if it is a multiple of our constant. */
4496 if (code == MULT_EXPR
4497 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4498 return const_binop (code, fold_convert (ctype, t),
4499 fold_convert (ctype, c), 0);
4502 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4503 /* If op0 is an expression ... */
4504 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4505 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4506 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4507 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4508 /* ... and is unsigned, and its type is smaller than ctype,
4509 then we cannot pass through as widening. */
4510 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4511 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4512 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4513 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4514 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4515 /* ... or its type is larger than ctype,
4516 then we cannot pass through this truncation. */
4517 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4518 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4519 /* ... or signedness changes for division or modulus,
4520 then we cannot pass through this conversion. */
4521 || (code != MULT_EXPR
4522 && (TREE_UNSIGNED (ctype)
4523 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4526 /* Pass the constant down and see if we can make a simplification. If
4527 we can, replace this expression with the inner simplification for
4528 possible later conversion to our or some other type. */
4529 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4530 && TREE_CODE (t2) == INTEGER_CST
4531 && ! TREE_CONSTANT_OVERFLOW (t2)
4532 && (0 != (t1 = extract_muldiv (op0, t2, code,
4534 ? ctype : NULL_TREE))))
4538 case NEGATE_EXPR: case ABS_EXPR:
4539 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4540 return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4543 case MIN_EXPR: case MAX_EXPR:
4544 /* If widening the type changes the signedness, then we can't perform
4545 this optimization as that changes the result. */
4546 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4549 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4550 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4551 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4553 if (tree_int_cst_sgn (c) < 0)
4554 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4556 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4557 fold_convert (ctype, t2)));
4561 case WITH_RECORD_EXPR:
4562 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4563 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4564 TREE_OPERAND (t, 1));
4567 case LSHIFT_EXPR: case RSHIFT_EXPR:
4568 /* If the second operand is constant, this is a multiplication
4569 or floor division, by a power of two, so we can treat it that
4570 way unless the multiplier or divisor overflows. */
4571 if (TREE_CODE (op1) == INTEGER_CST
4572 /* const_binop may not detect overflow correctly,
4573 so check for it explicitly here. */
4574 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4575 && TREE_INT_CST_HIGH (op1) == 0
4576 && 0 != (t1 = fold_convert (ctype,
4577 const_binop (LSHIFT_EXPR,
4580 && ! TREE_OVERFLOW (t1))
4581 return extract_muldiv (build (tcode == LSHIFT_EXPR
4582 ? MULT_EXPR : FLOOR_DIV_EXPR,
4583 ctype, fold_convert (ctype, op0), t1),
4584 c, code, wide_type);
4587 case PLUS_EXPR: case MINUS_EXPR:
4588 /* See if we can eliminate the operation on both sides. If we can, we
4589 can return a new PLUS or MINUS. If we can't, the only remaining
4590 cases where we can do anything are if the second operand is a
4592 t1 = extract_muldiv (op0, c, code, wide_type);
4593 t2 = extract_muldiv (op1, c, code, wide_type);
4594 if (t1 != 0 && t2 != 0
4595 && (code == MULT_EXPR
4596 /* If not multiplication, we can only do this if both operands
4597 are divisible by c. */
4598 || (multiple_of_p (ctype, op0, c)
4599 && multiple_of_p (ctype, op1, c))))
4600 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4601 fold_convert (ctype, t2)));
4603 /* If this was a subtraction, negate OP1 and set it to be an addition.
4604 This simplifies the logic below. */
4605 if (tcode == MINUS_EXPR)
4606 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4608 if (TREE_CODE (op1) != INTEGER_CST)
4611 /* If either OP1 or C are negative, this optimization is not safe for
4612 some of the division and remainder types while for others we need
4613 to change the code. */
4614 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4616 if (code == CEIL_DIV_EXPR)
4617 code = FLOOR_DIV_EXPR;
4618 else if (code == FLOOR_DIV_EXPR)
4619 code = CEIL_DIV_EXPR;
4620 else if (code != MULT_EXPR
4621 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4625 /* If it's a multiply or a division/modulus operation of a multiple
4626 of our constant, do the operation and verify it doesn't overflow. */
4627 if (code == MULT_EXPR
4628 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4630 op1 = const_binop (code, fold_convert (ctype, op1),
4631 fold_convert (ctype, c), 0);
4632 /* We allow the constant to overflow with wrapping semantics. */
4634 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4640 /* If we have an unsigned type is not a sizetype, we cannot widen
4641 the operation since it will change the result if the original
4642 computation overflowed. */
4643 if (TREE_UNSIGNED (ctype)
4644 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4648 /* If we were able to eliminate our operation from the first side,
4649 apply our operation to the second side and reform the PLUS. */
4650 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4651 return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4653 /* The last case is if we are a multiply. In that case, we can
4654 apply the distributive law to commute the multiply and addition
4655 if the multiplication of the constants doesn't overflow. */
4656 if (code == MULT_EXPR)
4657 return fold (build (tcode, ctype,
4658 fold (build (code, ctype,
4659 fold_convert (ctype, op0),
4660 fold_convert (ctype, c))),
4666 /* We have a special case here if we are doing something like
4667 (C * 8) % 4 since we know that's zero. */
4668 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4669 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4670 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4671 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4672 return omit_one_operand (type, integer_zero_node, op0);
4674 /* ... fall through ... */
4676 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4677 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4678 /* If we can extract our operation from the LHS, do so and return a
4679 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4680 do something only if the second operand is a constant. */
4682 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4683 return fold (build (tcode, ctype, fold_convert (ctype, t1),
4684 fold_convert (ctype, op1)));
4685 else if (tcode == MULT_EXPR && code == MULT_EXPR
4686 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4687 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4688 fold_convert (ctype, t1)));
4689 else if (TREE_CODE (op1) != INTEGER_CST)
4692 /* If these are the same operation types, we can associate them
4693 assuming no overflow. */
4695 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4696 fold_convert (ctype, c), 0))
4697 && ! TREE_OVERFLOW (t1))
4698 return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4700 /* If these operations "cancel" each other, we have the main
4701 optimizations of this pass, which occur when either constant is a
4702 multiple of the other, in which case we replace this with either an
4703 operation or CODE or TCODE.
4705 If we have an unsigned type that is not a sizetype, we cannot do
4706 this since it will change the result if the original computation
4708 if ((! TREE_UNSIGNED (ctype)
4709 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4711 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4712 || (tcode == MULT_EXPR
4713 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4714 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4716 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4717 return fold (build (tcode, ctype, fold_convert (ctype, op0),
4718 fold_convert (ctype,
4719 const_binop (TRUNC_DIV_EXPR,
4721 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4722 return fold (build (code, ctype, fold_convert (ctype, op0),
4723 fold_convert (ctype,
4724 const_binop (TRUNC_DIV_EXPR,
4736 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4737 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4738 that we may sometimes modify the tree. */
4741 strip_compound_expr (tree t, tree s)
4743 enum tree_code code = TREE_CODE (t);
4745 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4746 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4747 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4748 return TREE_OPERAND (t, 1);
4750 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4751 don't bother handling any other types. */
4752 else if (code == COND_EXPR)
4754 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4755 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4756 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4758 else if (TREE_CODE_CLASS (code) == '1')
4759 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4760 else if (TREE_CODE_CLASS (code) == '<'
4761 || TREE_CODE_CLASS (code) == '2')
4763 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4764 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4770 /* Return a node which has the indicated constant VALUE (either 0 or
4771 1), and is of the indicated TYPE. */
4774 constant_boolean_node (int value, tree type)
4776 if (type == integer_type_node)
4777 return value ? integer_one_node : integer_zero_node;
4778 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4779 return lang_hooks.truthvalue_conversion (value ? integer_one_node
4780 : integer_zero_node);
4783 tree t = build_int_2 (value, 0);
4785 TREE_TYPE (t) = type;
4790 /* Utility function for the following routine, to see how complex a nesting of
4791 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4792 we don't care (to avoid spending too much time on complex expressions.). */
4795 count_cond (tree expr, int lim)
4799 if (TREE_CODE (expr) != COND_EXPR)
4804 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4805 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4806 return MIN (lim, 1 + ctrue + cfalse);
4809 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4810 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4811 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4812 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4813 COND is the first argument to CODE; otherwise (as in the example
4814 given here), it is the second argument. TYPE is the type of the
4815 original expression. */
4818 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4819 tree cond, tree arg, int cond_first_p)
4821 tree test, true_value, false_value;
4822 tree lhs = NULL_TREE;
4823 tree rhs = NULL_TREE;
4824 /* In the end, we'll produce a COND_EXPR. Both arms of the
4825 conditional expression will be binary operations. The left-hand
4826 side of the expression to be executed if the condition is true
4827 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4828 of the expression to be executed if the condition is true will be
4829 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4830 but apply to the expression to be executed if the conditional is
4836 /* These are the codes to use for the left-hand side and right-hand
4837 side of the COND_EXPR. Normally, they are the same as CODE. */
4838 enum tree_code lhs_code = code;
4839 enum tree_code rhs_code = code;
4840 /* And these are the types of the expressions. */
4841 tree lhs_type = type;
4842 tree rhs_type = type;
4847 true_rhs = false_rhs = &arg;
4848 true_lhs = &true_value;
4849 false_lhs = &false_value;
4853 true_lhs = false_lhs = &arg;
4854 true_rhs = &true_value;
4855 false_rhs = &false_value;
4858 if (TREE_CODE (cond) == COND_EXPR)
4860 test = TREE_OPERAND (cond, 0);
4861 true_value = TREE_OPERAND (cond, 1);
4862 false_value = TREE_OPERAND (cond, 2);
4863 /* If this operand throws an expression, then it does not make
4864 sense to try to perform a logical or arithmetic operation
4865 involving it. Instead of building `a + throw 3' for example,
4866 we simply build `a, throw 3'. */
4867 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4871 lhs_code = COMPOUND_EXPR;
4872 lhs_type = void_type_node;
4877 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4881 rhs_code = COMPOUND_EXPR;
4882 rhs_type = void_type_node;
4890 tree testtype = TREE_TYPE (cond);
4892 true_value = fold_convert (testtype, integer_one_node);
4893 false_value = fold_convert (testtype, integer_zero_node);
4896 /* If ARG is complex we want to make sure we only evaluate it once. Though
4897 this is only required if it is volatile, it might be more efficient even
4898 if it is not. However, if we succeed in folding one part to a constant,
4899 we do not need to make this SAVE_EXPR. Since we do this optimization
4900 primarily to see if we do end up with constant and this SAVE_EXPR
4901 interferes with later optimizations, suppressing it when we can is
4904 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4905 do so. Don't try to see if the result is a constant if an arm is a
4906 COND_EXPR since we get exponential behavior in that case. */
4908 if (saved_expr_p (arg))
4910 else if (lhs == 0 && rhs == 0
4911 && !TREE_CONSTANT (arg)
4912 && lang_hooks.decls.global_bindings_p () == 0
4913 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4914 || TREE_SIDE_EFFECTS (arg)))
4916 if (TREE_CODE (true_value) != COND_EXPR)
4917 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4919 if (TREE_CODE (false_value) != COND_EXPR)
4920 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4922 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4923 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4925 arg = save_expr (arg);
4927 save = saved_expr_p (arg);
4932 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4934 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4936 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4938 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4939 ahead of the COND_EXPR we made. Otherwise we would have it only
4940 evaluated in one branch, with the other branch using the result
4941 but missing the evaluation code. Beware that the save_expr call
4942 above might not return a SAVE_EXPR, so testing the TREE_CODE
4943 of ARG is not enough to decide here. Â */
4945 return build (COMPOUND_EXPR, type,
4946 fold_convert (void_type_node, arg),
4947 strip_compound_expr (test, arg));
4949 return fold_convert (type, test);
4953 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4955 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4956 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4957 ADDEND is the same as X.
4959 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4960 and finite. The problematic cases are when X is zero, and its mode
4961 has signed zeros. In the case of rounding towards -infinity,
4962 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4963 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4966 fold_real_zero_addition_p (tree type, tree addend, int negate)
4968 if (!real_zerop (addend))
4971 /* Don't allow the fold with -fsignaling-nans. */
4972 if (HONOR_SNANS (TYPE_MODE (type)))
4975 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4976 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4979 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4980 if (TREE_CODE (addend) == REAL_CST
4981 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4984 /* The mode has signed zeros, and we have to honor their sign.
4985 In this situation, there is only one case we can return true for.
4986 X - 0 is the same as X unless rounding towards -infinity is
4988 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4991 /* Subroutine of fold() that checks comparisons of built-in math
4992 functions against real constants.
4994 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4995 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4996 is the type of the result and ARG0 and ARG1 are the operands of the
4997 comparison. ARG1 must be a TREE_REAL_CST.
4999 The function returns the constant folded tree if a simplification
5000 can be made, and NULL_TREE otherwise. */
5003 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5004 tree type, tree arg0, tree arg1)
5008 if (BUILTIN_SQRT_P (fcode))
5010 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5011 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5013 c = TREE_REAL_CST (arg1);
5014 if (REAL_VALUE_NEGATIVE (c))
5016 /* sqrt(x) < y is always false, if y is negative. */
5017 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5018 return omit_one_operand (type,
5019 fold_convert (type, integer_zero_node),
5022 /* sqrt(x) > y is always true, if y is negative and we
5023 don't care about NaNs, i.e. negative values of x. */
5024 if (code == NE_EXPR || !HONOR_NANS (mode))
5025 return omit_one_operand (type,
5026 fold_convert (type, integer_one_node),
5029 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5030 return fold (build (GE_EXPR, type, arg,
5031 build_real (TREE_TYPE (arg), dconst0)));
5033 else if (code == GT_EXPR || code == GE_EXPR)
5037 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5038 real_convert (&c2, mode, &c2);
5040 if (REAL_VALUE_ISINF (c2))
5042 /* sqrt(x) > y is x == +Inf, when y is very large. */
5043 if (HONOR_INFINITIES (mode))
5044 return fold (build (EQ_EXPR, type, arg,
5045 build_real (TREE_TYPE (arg), c2)));
5047 /* sqrt(x) > y is always false, when y is very large
5048 and we don't care about infinities. */
5049 return omit_one_operand (type,
5050 fold_convert (type, integer_zero_node),
5054 /* sqrt(x) > c is the same as x > c*c. */
5055 return fold (build (code, type, arg,
5056 build_real (TREE_TYPE (arg), c2)));
5058 else if (code == LT_EXPR || code == LE_EXPR)
5062 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5063 real_convert (&c2, mode, &c2);
5065 if (REAL_VALUE_ISINF (c2))
5067 /* sqrt(x) < y is always true, when y is a very large
5068 value and we don't care about NaNs or Infinities. */
5069 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5070 return omit_one_operand (type,
5071 fold_convert (type, integer_one_node),
5074 /* sqrt(x) < y is x != +Inf when y is very large and we
5075 don't care about NaNs. */
5076 if (! HONOR_NANS (mode))
5077 return fold (build (NE_EXPR, type, arg,
5078 build_real (TREE_TYPE (arg), c2)));
5080 /* sqrt(x) < y is x >= 0 when y is very large and we
5081 don't care about Infinities. */
5082 if (! HONOR_INFINITIES (mode))
5083 return fold (build (GE_EXPR, type, arg,
5084 build_real (TREE_TYPE (arg), dconst0)));
5086 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5087 if (lang_hooks.decls.global_bindings_p () != 0
5088 || CONTAINS_PLACEHOLDER_P (arg))
5091 arg = save_expr (arg);
5092 return fold (build (TRUTH_ANDIF_EXPR, type,
5093 fold (build (GE_EXPR, type, arg,
5094 build_real (TREE_TYPE (arg),
5096 fold (build (NE_EXPR, type, arg,
5097 build_real (TREE_TYPE (arg),
5101 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5102 if (! HONOR_NANS (mode))
5103 return fold (build (code, type, arg,
5104 build_real (TREE_TYPE (arg), c2)));
5106 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5107 if (lang_hooks.decls.global_bindings_p () == 0
5108 && ! CONTAINS_PLACEHOLDER_P (arg))
5110 arg = save_expr (arg);
5111 return fold (build (TRUTH_ANDIF_EXPR, type,
5112 fold (build (GE_EXPR, type, arg,
5113 build_real (TREE_TYPE (arg),
5115 fold (build (code, type, arg,
5116 build_real (TREE_TYPE (arg),
5125 /* Subroutine of fold() that optimizes comparisons against Infinities,
5126 either +Inf or -Inf.
5128 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5129 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5130 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5132 The function returns the constant folded tree if a simplification
5133 can be made, and NULL_TREE otherwise. */
5136 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5138 enum machine_mode mode;
5139 REAL_VALUE_TYPE max;
5143 mode = TYPE_MODE (TREE_TYPE (arg0));
5145 /* For negative infinity swap the sense of the comparison. */
5146 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5148 code = swap_tree_comparison (code);
5153 /* x > +Inf is always false, if with ignore sNANs. */
5154 if (HONOR_SNANS (mode))
5156 return omit_one_operand (type,
5157 fold_convert (type, integer_zero_node),
5161 /* x <= +Inf is always true, if we don't case about NaNs. */
5162 if (! HONOR_NANS (mode))
5163 return omit_one_operand (type,
5164 fold_convert (type, integer_one_node),
5167 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5168 if (lang_hooks.decls.global_bindings_p () == 0
5169 && ! CONTAINS_PLACEHOLDER_P (arg0))
5171 arg0 = save_expr (arg0);
5172 return fold (build (EQ_EXPR, type, arg0, arg0));
5178 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5179 real_maxval (&max, neg, mode);
5180 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5181 arg0, build_real (TREE_TYPE (arg0), max)));
5184 /* x < +Inf is always equal to x <= DBL_MAX. */
5185 real_maxval (&max, neg, mode);
5186 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5187 arg0, build_real (TREE_TYPE (arg0), max)));
5190 /* x != +Inf is always equal to !(x > DBL_MAX). */
5191 real_maxval (&max, neg, mode);
5192 if (! HONOR_NANS (mode))
5193 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5194 arg0, build_real (TREE_TYPE (arg0), max)));
5195 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5196 arg0, build_real (TREE_TYPE (arg0), max)));
5197 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5206 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5207 equality/inequality test, then return a simplified form of
5208 the test using shifts and logical operations. Otherwise return
5209 NULL. TYPE is the desired result type. */
5212 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5215 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5217 if (code == TRUTH_NOT_EXPR)
5219 code = TREE_CODE (arg0);
5220 if (code != NE_EXPR && code != EQ_EXPR)
5223 /* Extract the arguments of the EQ/NE. */
5224 arg1 = TREE_OPERAND (arg0, 1);
5225 arg0 = TREE_OPERAND (arg0, 0);
5227 /* This requires us to invert the code. */
5228 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5231 /* If this is testing a single bit, we can optimize the test. */
5232 if ((code == NE_EXPR || code == EQ_EXPR)
5233 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5234 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5236 tree inner = TREE_OPERAND (arg0, 0);
5237 tree type = TREE_TYPE (arg0);
5238 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5239 enum machine_mode operand_mode = TYPE_MODE (type);
5241 tree signed_type, unsigned_type, intermediate_type;
5244 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5245 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5246 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5247 if (arg00 != NULL_TREE)
5249 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
5250 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5251 fold_convert (stype, arg00),
5252 fold_convert (stype, integer_zero_node)));
5255 /* At this point, we know that arg0 is not testing the sign bit. */
5256 if (TYPE_PRECISION (type) - 1 == bitnum)
5259 /* Otherwise we have (A & C) != 0 where C is a single bit,
5260 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5261 Similarly for (A & C) == 0. */
5263 /* If INNER is a right shift of a constant and it plus BITNUM does
5264 not overflow, adjust BITNUM and INNER. */
5265 if (TREE_CODE (inner) == RSHIFT_EXPR
5266 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5267 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5268 && bitnum < TYPE_PRECISION (type)
5269 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5270 bitnum - TYPE_PRECISION (type)))
5272 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5273 inner = TREE_OPERAND (inner, 0);
5276 /* If we are going to be able to omit the AND below, we must do our
5277 operations as unsigned. If we must use the AND, we have a choice.
5278 Normally unsigned is faster, but for some machines signed is. */
5279 #ifdef LOAD_EXTEND_OP
5280 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5285 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
5286 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
5287 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5288 inner = fold_convert (intermediate_type, inner);
5291 inner = build (RSHIFT_EXPR, intermediate_type,
5292 inner, size_int (bitnum));
5294 if (code == EQ_EXPR)
5295 inner = build (BIT_XOR_EXPR, intermediate_type,
5296 inner, integer_one_node);
5298 /* Put the AND last so it can combine with more things. */
5299 inner = build (BIT_AND_EXPR, intermediate_type,
5300 inner, integer_one_node);
5302 /* Make sure to return the proper type. */
5303 inner = fold_convert (result_type, inner);
5310 /* Check whether we are allowed to reorder operands arg0 and arg1,
5311 such that the evaluation of arg1 occurs before arg0. */
5314 reorder_operands_p (tree arg0, tree arg1)
5316 if (! flag_evaluation_order)
5318 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5320 return ! TREE_SIDE_EFFECTS (arg0)
5321 && ! TREE_SIDE_EFFECTS (arg1);
5324 /* Test whether it is preferable two swap two operands, ARG0 and
5325 ARG1, for example because ARG0 is an integer constant and ARG1
5326 isn't. If REORDER is true, only recommend swapping if we can
5327 evaluate the operands in reverse order. */
5330 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5332 STRIP_SIGN_NOPS (arg0);
5333 STRIP_SIGN_NOPS (arg1);
5335 if (TREE_CODE (arg1) == INTEGER_CST)
5337 if (TREE_CODE (arg0) == INTEGER_CST)
5340 if (TREE_CODE (arg1) == REAL_CST)
5342 if (TREE_CODE (arg0) == REAL_CST)
5345 if (TREE_CODE (arg1) == COMPLEX_CST)
5347 if (TREE_CODE (arg0) == COMPLEX_CST)
5350 if (TREE_CONSTANT (arg1))
5352 if (TREE_CONSTANT (arg0))
5358 if (reorder && flag_evaluation_order
5359 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5370 /* Perform constant folding and related simplification of EXPR.
5371 The related simplifications include x*1 => x, x*0 => 0, etc.,
5372 and application of the associative law.
5373 NOP_EXPR conversions may be removed freely (as long as we
5374 are careful not to change the C type of the overall expression)
5375 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5376 but we can constant-fold them if they have constant operands. */
5378 #ifdef ENABLE_FOLD_CHECKING
5379 # define fold(x) fold_1 (x)
5380 static tree fold_1 (tree);
5386 tree t = expr, orig_t;
5387 tree t1 = NULL_TREE;
5389 tree type = TREE_TYPE (expr);
5390 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5391 enum tree_code code = TREE_CODE (t);
5392 int kind = TREE_CODE_CLASS (code);
5394 /* WINS will be nonzero when the switch is done
5395 if all operands are constant. */
5398 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5399 Likewise for a SAVE_EXPR that's already been evaluated. */
5400 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5403 /* Return right away if a constant. */
5409 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5413 /* Special case for conversion ops that can have fixed point args. */
5414 arg0 = TREE_OPERAND (t, 0);
5416 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5418 STRIP_SIGN_NOPS (arg0);
5420 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5421 subop = TREE_REALPART (arg0);
5425 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5426 && TREE_CODE (subop) != REAL_CST)
5427 /* Note that TREE_CONSTANT isn't enough:
5428 static var addresses are constant but we can't
5429 do arithmetic on them. */
5432 else if (IS_EXPR_CODE_CLASS (kind))
5434 int len = first_rtl_op (code);
5436 for (i = 0; i < len; i++)
5438 tree op = TREE_OPERAND (t, i);
5442 continue; /* Valid for CALL_EXPR, at least. */
5444 /* Strip any conversions that don't change the mode. This is
5445 safe for every expression, except for a comparison expression
5446 because its signedness is derived from its operands. So, in
5447 the latter case, only strip conversions that don't change the
5450 Note that this is done as an internal manipulation within the
5451 constant folder, in order to find the simplest representation
5452 of the arguments so that their form can be studied. In any
5453 cases, the appropriate type conversions should be put back in
5454 the tree that will get out of the constant folder. */
5456 STRIP_SIGN_NOPS (op);
5460 if (TREE_CODE (op) == COMPLEX_CST)
5461 subop = TREE_REALPART (op);
5465 if (TREE_CODE (subop) != INTEGER_CST
5466 && TREE_CODE (subop) != REAL_CST)
5467 /* Note that TREE_CONSTANT isn't enough:
5468 static var addresses are constant but we can't
5469 do arithmetic on them. */
5479 /* If this is a commutative operation, and ARG0 is a constant, move it
5480 to ARG1 to reduce the number of tests below. */
5481 if (commutative_tree_code (code)
5482 && tree_swap_operands_p (arg0, arg1, true))
5483 return fold (build (code, type, TREE_OPERAND (t, 1),
5484 TREE_OPERAND (t, 0)));
5486 /* Now WINS is set as described above,
5487 ARG0 is the first operand of EXPR,
5488 and ARG1 is the second operand (if it has more than one operand).
5490 First check for cases where an arithmetic operation is applied to a
5491 compound, conditional, or comparison operation. Push the arithmetic
5492 operation inside the compound or conditional to see if any folding
5493 can then be done. Convert comparison to conditional for this purpose.
5494 The also optimizes non-constant cases that used to be done in
5497 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5498 one of the operands is a comparison and the other is a comparison, a
5499 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5500 code below would make the expression more complex. Change it to a
5501 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5502 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5504 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5505 || code == EQ_EXPR || code == NE_EXPR)
5506 && ((truth_value_p (TREE_CODE (arg0))
5507 && (truth_value_p (TREE_CODE (arg1))
5508 || (TREE_CODE (arg1) == BIT_AND_EXPR
5509 && integer_onep (TREE_OPERAND (arg1, 1)))))
5510 || (truth_value_p (TREE_CODE (arg1))
5511 && (truth_value_p (TREE_CODE (arg0))
5512 || (TREE_CODE (arg0) == BIT_AND_EXPR
5513 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5515 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5516 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5520 if (code == EQ_EXPR)
5521 t = invert_truthvalue (t);
5526 if (TREE_CODE_CLASS (code) == '1')
5528 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5529 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5530 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5531 else if (TREE_CODE (arg0) == COND_EXPR)
5533 tree arg01 = TREE_OPERAND (arg0, 1);
5534 tree arg02 = TREE_OPERAND (arg0, 2);
5535 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5536 arg01 = fold (build1 (code, type, arg01));
5537 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5538 arg02 = fold (build1 (code, type, arg02));
5539 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5542 /* If this was a conversion, and all we did was to move into
5543 inside the COND_EXPR, bring it back out. But leave it if
5544 it is a conversion from integer to integer and the
5545 result precision is no wider than a word since such a
5546 conversion is cheap and may be optimized away by combine,
5547 while it couldn't if it were outside the COND_EXPR. Then return
5548 so we don't get into an infinite recursion loop taking the
5549 conversion out and then back in. */
5551 if ((code == NOP_EXPR || code == CONVERT_EXPR
5552 || code == NON_LVALUE_EXPR)
5553 && TREE_CODE (t) == COND_EXPR
5554 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5555 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5556 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5557 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5558 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5559 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5560 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5562 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5563 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5564 t = build1 (code, type,
5566 TREE_TYPE (TREE_OPERAND
5567 (TREE_OPERAND (t, 1), 0)),
5568 TREE_OPERAND (t, 0),
5569 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5570 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5573 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5574 return fold (build (COND_EXPR, type, arg0,
5575 fold (build1 (code, type, integer_one_node)),
5576 fold (build1 (code, type, integer_zero_node))));
5578 else if (TREE_CODE_CLASS (code) == '<'
5579 && TREE_CODE (arg0) == COMPOUND_EXPR)
5580 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5581 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5582 else if (TREE_CODE_CLASS (code) == '<'
5583 && TREE_CODE (arg1) == COMPOUND_EXPR)
5584 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5585 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5586 else if (TREE_CODE_CLASS (code) == '2'
5587 || TREE_CODE_CLASS (code) == '<')
5589 if (TREE_CODE (arg1) == COMPOUND_EXPR
5590 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5591 && ! TREE_SIDE_EFFECTS (arg0))
5592 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5593 fold (build (code, type,
5594 arg0, TREE_OPERAND (arg1, 1))));
5595 else if ((TREE_CODE (arg1) == COND_EXPR
5596 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5597 && TREE_CODE_CLASS (code) != '<'))
5598 && (TREE_CODE (arg0) != COND_EXPR
5599 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5600 && (! TREE_SIDE_EFFECTS (arg0)
5601 || (lang_hooks.decls.global_bindings_p () == 0
5602 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5604 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5605 /*cond_first_p=*/0);
5606 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5607 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5608 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5609 else if ((TREE_CODE (arg0) == COND_EXPR
5610 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5611 && TREE_CODE_CLASS (code) != '<'))
5612 && (TREE_CODE (arg1) != COND_EXPR
5613 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5614 && (! TREE_SIDE_EFFECTS (arg1)
5615 || (lang_hooks.decls.global_bindings_p () == 0
5616 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5618 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5619 /*cond_first_p=*/1);
5633 return fold (DECL_INITIAL (t));
5638 case FIX_TRUNC_EXPR:
5640 case FIX_FLOOR_EXPR:
5641 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5642 return TREE_OPERAND (t, 0);
5644 /* Handle cases of two conversions in a row. */
5645 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5646 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5648 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5649 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5650 tree final_type = TREE_TYPE (t);
5651 int inside_int = INTEGRAL_TYPE_P (inside_type);
5652 int inside_ptr = POINTER_TYPE_P (inside_type);
5653 int inside_float = FLOAT_TYPE_P (inside_type);
5654 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5655 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5656 int inter_int = INTEGRAL_TYPE_P (inter_type);
5657 int inter_ptr = POINTER_TYPE_P (inter_type);
5658 int inter_float = FLOAT_TYPE_P (inter_type);
5659 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5660 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5661 int final_int = INTEGRAL_TYPE_P (final_type);
5662 int final_ptr = POINTER_TYPE_P (final_type);
5663 int final_float = FLOAT_TYPE_P (final_type);
5664 unsigned int final_prec = TYPE_PRECISION (final_type);
5665 int final_unsignedp = TREE_UNSIGNED (final_type);
5667 /* In addition to the cases of two conversions in a row
5668 handled below, if we are converting something to its own
5669 type via an object of identical or wider precision, neither
5670 conversion is needed. */
5671 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5672 && ((inter_int && final_int) || (inter_float && final_float))
5673 && inter_prec >= final_prec)
5674 return fold (build1 (code, final_type,
5675 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5677 /* Likewise, if the intermediate and final types are either both
5678 float or both integer, we don't need the middle conversion if
5679 it is wider than the final type and doesn't change the signedness
5680 (for integers). Avoid this if the final type is a pointer
5681 since then we sometimes need the inner conversion. Likewise if
5682 the outer has a precision not equal to the size of its mode. */
5683 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5684 || (inter_float && inside_float))
5685 && inter_prec >= inside_prec
5686 && (inter_float || inter_unsignedp == inside_unsignedp)
5687 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5688 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5690 return fold (build1 (code, final_type,
5691 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5693 /* If we have a sign-extension of a zero-extended value, we can
5694 replace that by a single zero-extension. */
5695 if (inside_int && inter_int && final_int
5696 && inside_prec < inter_prec && inter_prec < final_prec
5697 && inside_unsignedp && !inter_unsignedp)
5698 return fold (build1 (code, final_type,
5699 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5701 /* Two conversions in a row are not needed unless:
5702 - some conversion is floating-point (overstrict for now), or
5703 - the intermediate type is narrower than both initial and
5705 - the intermediate type and innermost type differ in signedness,
5706 and the outermost type is wider than the intermediate, or
5707 - the initial type is a pointer type and the precisions of the
5708 intermediate and final types differ, or
5709 - the final type is a pointer type and the precisions of the
5710 initial and intermediate types differ. */
5711 if (! inside_float && ! inter_float && ! final_float
5712 && (inter_prec > inside_prec || inter_prec > final_prec)
5713 && ! (inside_int && inter_int
5714 && inter_unsignedp != inside_unsignedp
5715 && inter_prec < final_prec)
5716 && ((inter_unsignedp && inter_prec > inside_prec)
5717 == (final_unsignedp && final_prec > inter_prec))
5718 && ! (inside_ptr && inter_prec != final_prec)
5719 && ! (final_ptr && inside_prec != inter_prec)
5720 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5721 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5723 return fold (build1 (code, final_type,
5724 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5727 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5728 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5729 /* Detect assigning a bitfield. */
5730 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5731 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5733 /* Don't leave an assignment inside a conversion
5734 unless assigning a bitfield. */
5735 tree prev = TREE_OPERAND (t, 0);
5738 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5739 /* First do the assignment, then return converted constant. */
5740 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5741 TREE_NO_UNUSED_WARNING (t) = 1;
5746 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5747 constants (if x has signed type, the sign bit cannot be set
5748 in c). This folds extension into the BIT_AND_EXPR. */
5749 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5750 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5751 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5752 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5754 tree and = TREE_OPERAND (t, 0);
5755 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5758 if (TREE_UNSIGNED (TREE_TYPE (and))
5759 || (TYPE_PRECISION (TREE_TYPE (t))
5760 <= TYPE_PRECISION (TREE_TYPE (and))))
5762 else if (TYPE_PRECISION (TREE_TYPE (and1))
5763 <= HOST_BITS_PER_WIDE_INT
5764 && host_integerp (and1, 1))
5766 unsigned HOST_WIDE_INT cst;
5768 cst = tree_low_cst (and1, 1);
5769 cst &= (HOST_WIDE_INT) -1
5770 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5771 change = (cst == 0);
5772 #ifdef LOAD_EXTEND_OP
5774 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5777 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
5778 and0 = fold_convert (uns, and0);
5779 and1 = fold_convert (uns, and1);
5784 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5785 fold_convert (TREE_TYPE (t), and0),
5786 fold_convert (TREE_TYPE (t), and1)));
5789 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5790 return tem ? tem : t;
5792 case VIEW_CONVERT_EXPR:
5793 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5794 return build1 (VIEW_CONVERT_EXPR, type,
5795 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5799 if (TREE_CODE (arg0) == CONSTRUCTOR
5800 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5802 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5809 if (TREE_CONSTANT (t) != wins)
5813 TREE_CONSTANT (t) = wins;
5818 if (negate_expr_p (arg0))
5819 return fold_convert (type, negate_expr (arg0));
5825 if (TREE_CODE (arg0) == INTEGER_CST)
5827 /* If the value is unsigned, then the absolute value is
5828 the same as the ordinary value. */
5829 if (TREE_UNSIGNED (type))
5831 /* Similarly, if the value is non-negative. */
5832 else if (INT_CST_LT (integer_minus_one_node, arg0))
5834 /* If the value is negative, then the absolute value is
5838 unsigned HOST_WIDE_INT low;
5840 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5841 TREE_INT_CST_HIGH (arg0),
5843 t = build_int_2 (low, high);
5844 TREE_TYPE (t) = type;
5846 = (TREE_OVERFLOW (arg0)
5847 | force_fit_type (t, overflow));
5848 TREE_CONSTANT_OVERFLOW (t)
5849 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5852 else if (TREE_CODE (arg0) == REAL_CST)
5854 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5855 t = build_real (type,
5856 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5859 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5860 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5861 /* Convert fabs((double)float) into (double)fabsf(float). */
5862 else if (TREE_CODE (arg0) == NOP_EXPR
5863 && TREE_CODE (type) == REAL_TYPE)
5865 tree targ0 = strip_float_extensions (arg0);
5867 return fold_convert (type, fold (build1 (ABS_EXPR,
5871 else if (tree_expr_nonnegative_p (arg0))
5876 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5877 return fold_convert (type, arg0);
5878 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5879 return build (COMPLEX_EXPR, type,
5880 TREE_OPERAND (arg0, 0),
5881 negate_expr (TREE_OPERAND (arg0, 1)));
5882 else if (TREE_CODE (arg0) == COMPLEX_CST)
5883 return build_complex (type, TREE_REALPART (arg0),
5884 negate_expr (TREE_IMAGPART (arg0)));
5885 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5886 return fold (build (TREE_CODE (arg0), type,
5887 fold (build1 (CONJ_EXPR, type,
5888 TREE_OPERAND (arg0, 0))),
5889 fold (build1 (CONJ_EXPR,
5890 type, TREE_OPERAND (arg0, 1)))));
5891 else if (TREE_CODE (arg0) == CONJ_EXPR)
5892 return TREE_OPERAND (arg0, 0);
5898 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5899 ~ TREE_INT_CST_HIGH (arg0));
5900 TREE_TYPE (t) = type;
5901 force_fit_type (t, 0);
5902 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5903 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5905 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5906 return TREE_OPERAND (arg0, 0);
5910 /* A + (-B) -> A - B */
5911 if (TREE_CODE (arg1) == NEGATE_EXPR)
5912 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5913 /* (-A) + B -> B - A */
5914 if (TREE_CODE (arg0) == NEGATE_EXPR)
5915 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5916 else if (! FLOAT_TYPE_P (type))
5918 if (integer_zerop (arg1))
5919 return non_lvalue (fold_convert (type, arg0));
5921 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5922 with a constant, and the two constants have no bits in common,
5923 we should treat this as a BIT_IOR_EXPR since this may produce more
5925 if (TREE_CODE (arg0) == BIT_AND_EXPR
5926 && TREE_CODE (arg1) == BIT_AND_EXPR
5927 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5928 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5929 && integer_zerop (const_binop (BIT_AND_EXPR,
5930 TREE_OPERAND (arg0, 1),
5931 TREE_OPERAND (arg1, 1), 0)))
5933 code = BIT_IOR_EXPR;
5937 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5938 (plus (plus (mult) (mult)) (foo)) so that we can
5939 take advantage of the factoring cases below. */
5940 if ((TREE_CODE (arg0) == PLUS_EXPR
5941 && TREE_CODE (arg1) == MULT_EXPR)
5942 || (TREE_CODE (arg1) == PLUS_EXPR
5943 && TREE_CODE (arg0) == MULT_EXPR))
5945 tree parg0, parg1, parg, marg;
5947 if (TREE_CODE (arg0) == PLUS_EXPR)
5948 parg = arg0, marg = arg1;
5950 parg = arg1, marg = arg0;
5951 parg0 = TREE_OPERAND (parg, 0);
5952 parg1 = TREE_OPERAND (parg, 1);
5956 if (TREE_CODE (parg0) == MULT_EXPR
5957 && TREE_CODE (parg1) != MULT_EXPR)
5958 return fold (build (PLUS_EXPR, type,
5959 fold (build (PLUS_EXPR, type,
5960 fold_convert (type, parg0),
5961 fold_convert (type, marg))),
5962 fold_convert (type, parg1)));
5963 if (TREE_CODE (parg0) != MULT_EXPR
5964 && TREE_CODE (parg1) == MULT_EXPR)
5965 return fold (build (PLUS_EXPR, type,
5966 fold (build (PLUS_EXPR, type,
5967 fold_convert (type, parg1),
5968 fold_convert (type, marg))),
5969 fold_convert (type, parg0)));
5972 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5974 tree arg00, arg01, arg10, arg11;
5975 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5977 /* (A * C) + (B * C) -> (A+B) * C.
5978 We are most concerned about the case where C is a constant,
5979 but other combinations show up during loop reduction. Since
5980 it is not difficult, try all four possibilities. */
5982 arg00 = TREE_OPERAND (arg0, 0);
5983 arg01 = TREE_OPERAND (arg0, 1);
5984 arg10 = TREE_OPERAND (arg1, 0);
5985 arg11 = TREE_OPERAND (arg1, 1);
5988 if (operand_equal_p (arg01, arg11, 0))
5989 same = arg01, alt0 = arg00, alt1 = arg10;
5990 else if (operand_equal_p (arg00, arg10, 0))
5991 same = arg00, alt0 = arg01, alt1 = arg11;
5992 else if (operand_equal_p (arg00, arg11, 0))
5993 same = arg00, alt0 = arg01, alt1 = arg10;
5994 else if (operand_equal_p (arg01, arg10, 0))
5995 same = arg01, alt0 = arg00, alt1 = arg11;
5997 /* No identical multiplicands; see if we can find a common
5998 power-of-two factor in non-power-of-two multiplies. This
5999 can help in multi-dimensional array access. */
6000 else if (TREE_CODE (arg01) == INTEGER_CST
6001 && TREE_CODE (arg11) == INTEGER_CST
6002 && TREE_INT_CST_HIGH (arg01) == 0
6003 && TREE_INT_CST_HIGH (arg11) == 0)
6005 HOST_WIDE_INT int01, int11, tmp;
6006 int01 = TREE_INT_CST_LOW (arg01);
6007 int11 = TREE_INT_CST_LOW (arg11);
6009 /* Move min of absolute values to int11. */
6010 if ((int01 >= 0 ? int01 : -int01)
6011 < (int11 >= 0 ? int11 : -int11))
6013 tmp = int01, int01 = int11, int11 = tmp;
6014 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6015 alt0 = arg01, arg01 = arg11, arg11 = alt0;
6018 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6020 alt0 = fold (build (MULT_EXPR, type, arg00,
6021 build_int_2 (int01 / int11, 0)));
6028 return fold (build (MULT_EXPR, type,
6029 fold (build (PLUS_EXPR, type, alt0, alt1)),
6035 /* See if ARG1 is zero and X + ARG1 reduces to X. */
6036 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6037 return non_lvalue (fold_convert (type, arg0));
6039 /* Likewise if the operands are reversed. */
6040 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6041 return non_lvalue (fold_convert (type, arg1));
6043 /* Convert x+x into x*2.0. */
6044 if (operand_equal_p (arg0, arg1, 0)
6045 && SCALAR_FLOAT_TYPE_P (type))
6046 return fold (build (MULT_EXPR, type, arg0,
6047 build_real (type, dconst2)));
6049 /* Convert x*c+x into x*(c+1). */
6050 if (flag_unsafe_math_optimizations
6051 && TREE_CODE (arg0) == MULT_EXPR
6052 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6053 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6054 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6058 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6059 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6060 return fold (build (MULT_EXPR, type, arg1,
6061 build_real (type, c)));
6064 /* Convert x+x*c into x*(c+1). */
6065 if (flag_unsafe_math_optimizations
6066 && TREE_CODE (arg1) == MULT_EXPR
6067 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6068 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6069 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6073 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6074 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6075 return fold (build (MULT_EXPR, type, arg0,
6076 build_real (type, c)));
6079 /* Convert x*c1+x*c2 into x*(c1+c2). */
6080 if (flag_unsafe_math_optimizations
6081 && TREE_CODE (arg0) == MULT_EXPR
6082 && TREE_CODE (arg1) == MULT_EXPR
6083 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6084 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6085 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6086 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6087 && operand_equal_p (TREE_OPERAND (arg0, 0),
6088 TREE_OPERAND (arg1, 0), 0))
6090 REAL_VALUE_TYPE c1, c2;
6092 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6093 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6094 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6095 return fold (build (MULT_EXPR, type,
6096 TREE_OPERAND (arg0, 0),
6097 build_real (type, c1)));
6102 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6103 is a rotate of A by C1 bits. */
6104 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6105 is a rotate of A by B bits. */
6107 enum tree_code code0, code1;
6108 code0 = TREE_CODE (arg0);
6109 code1 = TREE_CODE (arg1);
6110 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6111 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6112 && operand_equal_p (TREE_OPERAND (arg0, 0),
6113 TREE_OPERAND (arg1, 0), 0)
6114 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6116 tree tree01, tree11;
6117 enum tree_code code01, code11;
6119 tree01 = TREE_OPERAND (arg0, 1);
6120 tree11 = TREE_OPERAND (arg1, 1);
6121 STRIP_NOPS (tree01);
6122 STRIP_NOPS (tree11);
6123 code01 = TREE_CODE (tree01);
6124 code11 = TREE_CODE (tree11);
6125 if (code01 == INTEGER_CST
6126 && code11 == INTEGER_CST
6127 && TREE_INT_CST_HIGH (tree01) == 0
6128 && TREE_INT_CST_HIGH (tree11) == 0
6129 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6130 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6131 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6132 code0 == LSHIFT_EXPR ? tree01 : tree11);
6133 else if (code11 == MINUS_EXPR)
6135 tree tree110, tree111;
6136 tree110 = TREE_OPERAND (tree11, 0);
6137 tree111 = TREE_OPERAND (tree11, 1);
6138 STRIP_NOPS (tree110);
6139 STRIP_NOPS (tree111);
6140 if (TREE_CODE (tree110) == INTEGER_CST
6141 && 0 == compare_tree_int (tree110,
6143 (TREE_TYPE (TREE_OPERAND
6145 && operand_equal_p (tree01, tree111, 0))
6146 return build ((code0 == LSHIFT_EXPR
6149 type, TREE_OPERAND (arg0, 0), tree01);
6151 else if (code01 == MINUS_EXPR)
6153 tree tree010, tree011;
6154 tree010 = TREE_OPERAND (tree01, 0);
6155 tree011 = TREE_OPERAND (tree01, 1);
6156 STRIP_NOPS (tree010);
6157 STRIP_NOPS (tree011);
6158 if (TREE_CODE (tree010) == INTEGER_CST
6159 && 0 == compare_tree_int (tree010,
6161 (TREE_TYPE (TREE_OPERAND
6163 && operand_equal_p (tree11, tree011, 0))
6164 return build ((code0 != LSHIFT_EXPR
6167 type, TREE_OPERAND (arg0, 0), tree11);
6173 /* In most languages, can't associate operations on floats through
6174 parentheses. Rather than remember where the parentheses were, we
6175 don't associate floats at all, unless the user has specified
6176 -funsafe-math-optimizations. */
6179 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6181 tree var0, con0, lit0, minus_lit0;
6182 tree var1, con1, lit1, minus_lit1;
6184 /* Split both trees into variables, constants, and literals. Then
6185 associate each group together, the constants with literals,
6186 then the result with variables. This increases the chances of
6187 literals being recombined later and of generating relocatable
6188 expressions for the sum of a constant and literal. */
6189 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6190 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6191 code == MINUS_EXPR);
6193 /* Only do something if we found more than two objects. Otherwise,
6194 nothing has changed and we risk infinite recursion. */
6195 if (2 < ((var0 != 0) + (var1 != 0)
6196 + (con0 != 0) + (con1 != 0)
6197 + (lit0 != 0) + (lit1 != 0)
6198 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6200 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6201 if (code == MINUS_EXPR)
6204 var0 = associate_trees (var0, var1, code, type);
6205 con0 = associate_trees (con0, con1, code, type);
6206 lit0 = associate_trees (lit0, lit1, code, type);
6207 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6209 /* Preserve the MINUS_EXPR if the negative part of the literal is
6210 greater than the positive part. Otherwise, the multiplicative
6211 folding code (i.e extract_muldiv) may be fooled in case
6212 unsigned constants are subtracted, like in the following
6213 example: ((X*2 + 4) - 8U)/2. */
6214 if (minus_lit0 && lit0)
6216 if (TREE_CODE (lit0) == INTEGER_CST
6217 && TREE_CODE (minus_lit0) == INTEGER_CST
6218 && tree_int_cst_lt (lit0, minus_lit0))
6220 minus_lit0 = associate_trees (minus_lit0, lit0,
6226 lit0 = associate_trees (lit0, minus_lit0,
6234 return fold_convert (type,
6235 associate_trees (var0, minus_lit0,
6239 con0 = associate_trees (con0, minus_lit0,
6241 return fold_convert (type,
6242 associate_trees (var0, con0,
6247 con0 = associate_trees (con0, lit0, code, type);
6248 return fold_convert (type, associate_trees (var0, con0,
6255 t1 = const_binop (code, arg0, arg1, 0);
6256 if (t1 != NULL_TREE)
6258 /* The return value should always have
6259 the same type as the original expression. */
6260 if (TREE_TYPE (t1) != TREE_TYPE (t))
6261 t1 = fold_convert (TREE_TYPE (t), t1);
6268 /* A - (-B) -> A + B */
6269 if (TREE_CODE (arg1) == NEGATE_EXPR)
6270 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6271 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6272 if (TREE_CODE (arg0) == NEGATE_EXPR
6273 && (FLOAT_TYPE_P (type)
6274 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6275 && negate_expr_p (arg1)
6276 && reorder_operands_p (arg0, arg1))
6277 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6278 TREE_OPERAND (arg0, 0)));
6280 if (! FLOAT_TYPE_P (type))
6282 if (! wins && integer_zerop (arg0))
6283 return negate_expr (fold_convert (type, arg1));
6284 if (integer_zerop (arg1))
6285 return non_lvalue (fold_convert (type, arg0));
6287 /* Fold A - (A & B) into ~B & A. */
6288 if (!TREE_SIDE_EFFECTS (arg0)
6289 && TREE_CODE (arg1) == BIT_AND_EXPR)
6291 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6292 return fold (build (BIT_AND_EXPR, type,
6293 fold (build1 (BIT_NOT_EXPR, type,
6294 TREE_OPERAND (arg1, 0))),
6296 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6297 return fold (build (BIT_AND_EXPR, type,
6298 fold (build1 (BIT_NOT_EXPR, type,
6299 TREE_OPERAND (arg1, 1))),
6303 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6304 any power of 2 minus 1. */
6305 if (TREE_CODE (arg0) == BIT_AND_EXPR
6306 && TREE_CODE (arg1) == BIT_AND_EXPR
6307 && operand_equal_p (TREE_OPERAND (arg0, 0),
6308 TREE_OPERAND (arg1, 0), 0))
6310 tree mask0 = TREE_OPERAND (arg0, 1);
6311 tree mask1 = TREE_OPERAND (arg1, 1);
6312 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6314 if (operand_equal_p (tem, mask1, 0))
6316 tem = fold (build (BIT_XOR_EXPR, type,
6317 TREE_OPERAND (arg0, 0), mask1));
6318 return fold (build (MINUS_EXPR, type, tem, mask1));
6323 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6324 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6325 return non_lvalue (fold_convert (type, arg0));
6327 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6328 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6329 (-ARG1 + ARG0) reduces to -ARG1. */
6330 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6331 return negate_expr (fold_convert (type, arg1));
6333 /* Fold &x - &x. This can happen from &x.foo - &x.
6334 This is unsafe for certain floats even in non-IEEE formats.
6335 In IEEE, it is unsafe because it does wrong for NaNs.
6336 Also note that operand_equal_p is always false if an operand
6339 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6340 && operand_equal_p (arg0, arg1, 0))
6341 return fold_convert (type, integer_zero_node);
6343 /* A - B -> A + (-B) if B is easily negatable. */
6344 if (!wins && negate_expr_p (arg1)
6345 && (FLOAT_TYPE_P (type)
6346 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6347 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6349 if (TREE_CODE (arg0) == MULT_EXPR
6350 && TREE_CODE (arg1) == MULT_EXPR
6351 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6353 /* (A * C) - (B * C) -> (A-B) * C. */
6354 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6355 TREE_OPERAND (arg1, 1), 0))
6356 return fold (build (MULT_EXPR, type,
6357 fold (build (MINUS_EXPR, type,
6358 TREE_OPERAND (arg0, 0),
6359 TREE_OPERAND (arg1, 0))),
6360 TREE_OPERAND (arg0, 1)));
6361 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6362 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6363 TREE_OPERAND (arg1, 0), 0))
6364 return fold (build (MULT_EXPR, type,
6365 TREE_OPERAND (arg0, 0),
6366 fold (build (MINUS_EXPR, type,
6367 TREE_OPERAND (arg0, 1),
6368 TREE_OPERAND (arg1, 1)))));
6374 /* (-A) * (-B) -> A * B */
6375 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6376 return fold (build (MULT_EXPR, type,
6377 TREE_OPERAND (arg0, 0),
6378 negate_expr (arg1)));
6379 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6380 return fold (build (MULT_EXPR, type,
6382 TREE_OPERAND (arg1, 0)));
6384 if (! FLOAT_TYPE_P (type))
6386 if (integer_zerop (arg1))
6387 return omit_one_operand (type, arg1, arg0);
6388 if (integer_onep (arg1))
6389 return non_lvalue (fold_convert (type, arg0));
6391 /* (a * (1 << b)) is (a << b) */
6392 if (TREE_CODE (arg1) == LSHIFT_EXPR
6393 && integer_onep (TREE_OPERAND (arg1, 0)))
6394 return fold (build (LSHIFT_EXPR, type, arg0,
6395 TREE_OPERAND (arg1, 1)));
6396 if (TREE_CODE (arg0) == LSHIFT_EXPR
6397 && integer_onep (TREE_OPERAND (arg0, 0)))
6398 return fold (build (LSHIFT_EXPR, type, arg1,
6399 TREE_OPERAND (arg0, 1)));
6401 if (TREE_CODE (arg1) == INTEGER_CST
6402 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6403 fold_convert (type, arg1),
6405 return fold_convert (type, tem);
6410 /* Maybe fold x * 0 to 0. The expressions aren't the same
6411 when x is NaN, since x * 0 is also NaN. Nor are they the
6412 same in modes with signed zeros, since multiplying a
6413 negative value by 0 gives -0, not +0. */
6414 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6415 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6416 && real_zerop (arg1))
6417 return omit_one_operand (type, arg1, arg0);
6418 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6419 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6420 && real_onep (arg1))
6421 return non_lvalue (fold_convert (type, arg0));
6423 /* Transform x * -1.0 into -x. */
6424 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6425 && real_minus_onep (arg1))
6426 return fold (build1 (NEGATE_EXPR, type, arg0));
6428 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6429 if (flag_unsafe_math_optimizations
6430 && TREE_CODE (arg0) == RDIV_EXPR
6431 && TREE_CODE (arg1) == REAL_CST
6432 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6434 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6437 return fold (build (RDIV_EXPR, type, tem,
6438 TREE_OPERAND (arg0, 1)));
6441 if (flag_unsafe_math_optimizations)
6443 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6444 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6446 /* Optimizations of sqrt(...)*sqrt(...). */
6447 if (fcode0 == fcode1 && BUILTIN_SQRT_P (fcode0))
6449 tree sqrtfn, arg, arglist;
6450 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6451 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6453 /* Optimize sqrt(x)*sqrt(x) as x. */
6454 if (operand_equal_p (arg00, arg10, 0)
6455 && ! HONOR_SNANS (TYPE_MODE (type)))
6458 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6459 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6460 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6461 arglist = build_tree_list (NULL_TREE, arg);
6462 return build_function_call_expr (sqrtfn, arglist);
6465 /* Optimize expN(x)*expN(y) as expN(x+y). */
6466 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
6468 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6469 tree arg = build (PLUS_EXPR, type,
6470 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6471 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6472 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6473 return build_function_call_expr (expfn, arglist);
6476 /* Optimizations of pow(...)*pow(...). */
6477 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6478 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6479 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6481 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6482 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6484 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6485 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6488 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6489 if (operand_equal_p (arg01, arg11, 0))
6491 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6492 tree arg = build (MULT_EXPR, type, arg00, arg10);
6493 tree arglist = tree_cons (NULL_TREE, fold (arg),
6494 build_tree_list (NULL_TREE,
6496 return build_function_call_expr (powfn, arglist);
6499 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6500 if (operand_equal_p (arg00, arg10, 0))
6502 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6503 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6504 tree arglist = tree_cons (NULL_TREE, arg00,
6505 build_tree_list (NULL_TREE,
6507 return build_function_call_expr (powfn, arglist);
6511 /* Optimize tan(x)*cos(x) as sin(x). */
6512 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6513 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6514 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6515 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6516 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6517 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6518 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6519 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6527 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6531 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6535 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6541 if (sinfn != NULL_TREE)
6542 return build_function_call_expr (sinfn,
6543 TREE_OPERAND (arg0, 1));
6546 /* Optimize x*pow(x,c) as pow(x,c+1). */
6547 if (fcode1 == BUILT_IN_POW
6548 || fcode1 == BUILT_IN_POWF
6549 || fcode1 == BUILT_IN_POWL)
6551 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6552 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6554 if (TREE_CODE (arg11) == REAL_CST
6555 && ! TREE_CONSTANT_OVERFLOW (arg11)
6556 && operand_equal_p (arg0, arg10, 0))
6558 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6562 c = TREE_REAL_CST (arg11);
6563 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6564 arg = build_real (type, c);
6565 arglist = build_tree_list (NULL_TREE, arg);
6566 arglist = tree_cons (NULL_TREE, arg0, arglist);
6567 return build_function_call_expr (powfn, arglist);
6571 /* Optimize pow(x,c)*x as pow(x,c+1). */
6572 if (fcode0 == BUILT_IN_POW
6573 || fcode0 == BUILT_IN_POWF
6574 || fcode0 == BUILT_IN_POWL)
6576 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6577 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6579 if (TREE_CODE (arg01) == REAL_CST
6580 && ! TREE_CONSTANT_OVERFLOW (arg01)
6581 && operand_equal_p (arg1, arg00, 0))
6583 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6587 c = TREE_REAL_CST (arg01);
6588 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6589 arg = build_real (type, c);
6590 arglist = build_tree_list (NULL_TREE, arg);
6591 arglist = tree_cons (NULL_TREE, arg1, arglist);
6592 return build_function_call_expr (powfn, arglist);
6596 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6598 && operand_equal_p (arg0, arg1, 0))
6602 if (type == double_type_node)
6603 powfn = implicit_built_in_decls[BUILT_IN_POW];
6604 else if (type == float_type_node)
6605 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6606 else if (type == long_double_type_node)
6607 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6613 tree arg = build_real (type, dconst2);
6614 tree arglist = build_tree_list (NULL_TREE, arg);
6615 arglist = tree_cons (NULL_TREE, arg0, arglist);
6616 return build_function_call_expr (powfn, arglist);
6625 if (integer_all_onesp (arg1))
6626 return omit_one_operand (type, arg1, arg0);
6627 if (integer_zerop (arg1))
6628 return non_lvalue (fold_convert (type, arg0));
6629 if (operand_equal_p (arg0, arg1, 0))
6630 return non_lvalue (fold_convert (type, arg0));
6631 t1 = distribute_bit_expr (code, type, arg0, arg1);
6632 if (t1 != NULL_TREE)
6635 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6637 This results in more efficient code for machines without a NAND
6638 instruction. Combine will canonicalize to the first form
6639 which will allow use of NAND instructions provided by the
6640 backend if they exist. */
6641 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6642 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6644 return fold (build1 (BIT_NOT_EXPR, type,
6645 build (BIT_AND_EXPR, type,
6646 TREE_OPERAND (arg0, 0),
6647 TREE_OPERAND (arg1, 0))));
6650 /* See if this can be simplified into a rotate first. If that
6651 is unsuccessful continue in the association code. */
6655 if (integer_zerop (arg1))
6656 return non_lvalue (fold_convert (type, arg0));
6657 if (integer_all_onesp (arg1))
6658 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6659 if (operand_equal_p (arg0, arg1, 0))
6660 return omit_one_operand (type, integer_zero_node, arg0);
6662 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6663 with a constant, and the two constants have no bits in common,
6664 we should treat this as a BIT_IOR_EXPR since this may produce more
6666 if (TREE_CODE (arg0) == BIT_AND_EXPR
6667 && TREE_CODE (arg1) == BIT_AND_EXPR
6668 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6669 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6670 && integer_zerop (const_binop (BIT_AND_EXPR,
6671 TREE_OPERAND (arg0, 1),
6672 TREE_OPERAND (arg1, 1), 0)))
6674 code = BIT_IOR_EXPR;
6678 /* See if this can be simplified into a rotate first. If that
6679 is unsuccessful continue in the association code. */
6683 if (integer_all_onesp (arg1))
6684 return non_lvalue (fold_convert (type, arg0));
6685 if (integer_zerop (arg1))
6686 return omit_one_operand (type, arg1, arg0);
6687 if (operand_equal_p (arg0, arg1, 0))
6688 return non_lvalue (fold_convert (type, arg0));
6689 t1 = distribute_bit_expr (code, type, arg0, arg1);
6690 if (t1 != NULL_TREE)
6692 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6693 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6694 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6697 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6699 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6700 && (~TREE_INT_CST_LOW (arg1)
6701 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6702 return fold_convert (type, TREE_OPERAND (arg0, 0));
6705 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6707 This results in more efficient code for machines without a NOR
6708 instruction. Combine will canonicalize to the first form
6709 which will allow use of NOR instructions provided by the
6710 backend if they exist. */
6711 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6712 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6714 return fold (build1 (BIT_NOT_EXPR, type,
6715 build (BIT_IOR_EXPR, type,
6716 TREE_OPERAND (arg0, 0),
6717 TREE_OPERAND (arg1, 0))));
6723 /* Don't touch a floating-point divide by zero unless the mode
6724 of the constant can represent infinity. */
6725 if (TREE_CODE (arg1) == REAL_CST
6726 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6727 && real_zerop (arg1))
6730 /* (-A) / (-B) -> A / B */
6731 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6732 return fold (build (RDIV_EXPR, type,
6733 TREE_OPERAND (arg0, 0),
6734 negate_expr (arg1)));
6735 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6736 return fold (build (RDIV_EXPR, type,
6738 TREE_OPERAND (arg1, 0)));
6740 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6741 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6742 && real_onep (arg1))
6743 return non_lvalue (fold_convert (type, arg0));
6745 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6746 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6747 && real_minus_onep (arg1))
6748 return non_lvalue (fold_convert (type, negate_expr (arg0)));
6750 /* If ARG1 is a constant, we can convert this to a multiply by the
6751 reciprocal. This does not have the same rounding properties,
6752 so only do this if -funsafe-math-optimizations. We can actually
6753 always safely do it if ARG1 is a power of two, but it's hard to
6754 tell if it is or not in a portable manner. */
6755 if (TREE_CODE (arg1) == REAL_CST)
6757 if (flag_unsafe_math_optimizations
6758 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6760 return fold (build (MULT_EXPR, type, arg0, tem));
6761 /* Find the reciprocal if optimizing and the result is exact. */
6765 r = TREE_REAL_CST (arg1);
6766 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6768 tem = build_real (type, r);
6769 return fold (build (MULT_EXPR, type, arg0, tem));
6773 /* Convert A/B/C to A/(B*C). */
6774 if (flag_unsafe_math_optimizations
6775 && TREE_CODE (arg0) == RDIV_EXPR)
6776 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6777 fold (build (MULT_EXPR, type,
6778 TREE_OPERAND (arg0, 1), arg1))));
6780 /* Convert A/(B/C) to (A/B)*C. */
6781 if (flag_unsafe_math_optimizations
6782 && TREE_CODE (arg1) == RDIV_EXPR)
6783 return fold (build (MULT_EXPR, type,
6784 fold (build (RDIV_EXPR, type, arg0,
6785 TREE_OPERAND (arg1, 0))),
6786 TREE_OPERAND (arg1, 1)));
6788 /* Convert C1/(X*C2) into (C1/C2)/X. */
6789 if (flag_unsafe_math_optimizations
6790 && TREE_CODE (arg1) == MULT_EXPR
6791 && TREE_CODE (arg0) == REAL_CST
6792 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6794 tree tem = const_binop (RDIV_EXPR, arg0,
6795 TREE_OPERAND (arg1, 1), 0);
6797 return fold (build (RDIV_EXPR, type, tem,
6798 TREE_OPERAND (arg1, 0)));
6801 if (flag_unsafe_math_optimizations)
6803 enum built_in_function fcode = builtin_mathfn_code (arg1);
6804 /* Optimize x/expN(y) into x*expN(-y). */
6805 if (BUILTIN_EXPONENT_P (fcode))
6807 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6808 tree arg = build1 (NEGATE_EXPR, type,
6809 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6810 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6811 arg1 = build_function_call_expr (expfn, arglist);
6812 return fold (build (MULT_EXPR, type, arg0, arg1));
6815 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6816 if (fcode == BUILT_IN_POW
6817 || fcode == BUILT_IN_POWF
6818 || fcode == BUILT_IN_POWL)
6820 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6821 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6822 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6823 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6824 tree arglist = tree_cons(NULL_TREE, arg10,
6825 build_tree_list (NULL_TREE, neg11));
6826 arg1 = build_function_call_expr (powfn, arglist);
6827 return fold (build (MULT_EXPR, type, arg0, arg1));
6831 if (flag_unsafe_math_optimizations)
6833 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6834 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6836 /* Optimize sin(x)/cos(x) as tan(x). */
6837 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6838 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6839 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6840 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6841 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6845 if (fcode0 == BUILT_IN_SIN)
6846 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6847 else if (fcode0 == BUILT_IN_SINF)
6848 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6849 else if (fcode0 == BUILT_IN_SINL)
6850 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6854 if (tanfn != NULL_TREE)
6855 return build_function_call_expr (tanfn,
6856 TREE_OPERAND (arg0, 1));
6859 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6860 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6861 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6862 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6863 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6864 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6868 if (fcode0 == BUILT_IN_COS)
6869 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6870 else if (fcode0 == BUILT_IN_COSF)
6871 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6872 else if (fcode0 == BUILT_IN_COSL)
6873 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6877 if (tanfn != NULL_TREE)
6879 tree tmp = TREE_OPERAND (arg0, 1);
6880 tmp = build_function_call_expr (tanfn, tmp);
6881 return fold (build (RDIV_EXPR, type,
6882 build_real (type, dconst1),
6887 /* Optimize pow(x,c)/x as pow(x,c-1). */
6888 if (fcode0 == BUILT_IN_POW
6889 || fcode0 == BUILT_IN_POWF
6890 || fcode0 == BUILT_IN_POWL)
6892 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6893 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6894 if (TREE_CODE (arg01) == REAL_CST
6895 && ! TREE_CONSTANT_OVERFLOW (arg01)
6896 && operand_equal_p (arg1, arg00, 0))
6898 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6902 c = TREE_REAL_CST (arg01);
6903 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6904 arg = build_real (type, c);
6905 arglist = build_tree_list (NULL_TREE, arg);
6906 arglist = tree_cons (NULL_TREE, arg1, arglist);
6907 return build_function_call_expr (powfn, arglist);
6913 case TRUNC_DIV_EXPR:
6914 case ROUND_DIV_EXPR:
6915 case FLOOR_DIV_EXPR:
6917 case EXACT_DIV_EXPR:
6918 if (integer_onep (arg1))
6919 return non_lvalue (fold_convert (type, arg0));
6920 if (integer_zerop (arg1))
6923 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6924 operation, EXACT_DIV_EXPR.
6926 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6927 At one time others generated faster code, it's not clear if they do
6928 after the last round to changes to the DIV code in expmed.c. */
6929 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6930 && multiple_of_p (type, arg0, arg1))
6931 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6933 if (TREE_CODE (arg1) == INTEGER_CST
6934 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6936 return fold_convert (type, tem);
6941 case FLOOR_MOD_EXPR:
6942 case ROUND_MOD_EXPR:
6943 case TRUNC_MOD_EXPR:
6944 if (integer_onep (arg1))
6945 return omit_one_operand (type, integer_zero_node, arg0);
6946 if (integer_zerop (arg1))
6949 if (TREE_CODE (arg1) == INTEGER_CST
6950 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6952 return fold_convert (type, tem);
6958 if (integer_all_onesp (arg0))
6959 return omit_one_operand (type, arg0, arg1);
6963 /* Optimize -1 >> x for arithmetic right shifts. */
6964 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6965 return omit_one_operand (type, arg0, arg1);
6966 /* ... fall through ... */
6970 if (integer_zerop (arg1))
6971 return non_lvalue (fold_convert (type, arg0));
6972 if (integer_zerop (arg0))
6973 return omit_one_operand (type, arg0, arg1);
6975 /* Since negative shift count is not well-defined,
6976 don't try to compute it in the compiler. */
6977 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6979 /* Rewrite an LROTATE_EXPR by a constant into an
6980 RROTATE_EXPR by a new constant. */
6981 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6983 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6984 tem = fold_convert (TREE_TYPE (arg1), tem);
6985 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6986 return fold (build (RROTATE_EXPR, type, arg0, tem));
6989 /* If we have a rotate of a bit operation with the rotate count and
6990 the second operand of the bit operation both constant,
6991 permute the two operations. */
6992 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6993 && (TREE_CODE (arg0) == BIT_AND_EXPR
6994 || TREE_CODE (arg0) == BIT_IOR_EXPR
6995 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6996 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6997 return fold (build (TREE_CODE (arg0), type,
6998 fold (build (code, type,
6999 TREE_OPERAND (arg0, 0), arg1)),
7000 fold (build (code, type,
7001 TREE_OPERAND (arg0, 1), arg1))));
7003 /* Two consecutive rotates adding up to the width of the mode can
7005 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7006 && TREE_CODE (arg0) == RROTATE_EXPR
7007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7008 && TREE_INT_CST_HIGH (arg1) == 0
7009 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
7010 && ((TREE_INT_CST_LOW (arg1)
7011 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
7012 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
7013 return TREE_OPERAND (arg0, 0);
7018 if (operand_equal_p (arg0, arg1, 0))
7019 return omit_one_operand (type, arg0, arg1);
7020 if (INTEGRAL_TYPE_P (type)
7021 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
7022 return omit_one_operand (type, arg1, arg0);
7026 if (operand_equal_p (arg0, arg1, 0))
7027 return omit_one_operand (type, arg0, arg1);
7028 if (INTEGRAL_TYPE_P (type)
7029 && TYPE_MAX_VALUE (type)
7030 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7031 return omit_one_operand (type, arg1, arg0);
7034 case TRUTH_NOT_EXPR:
7035 /* Note that the operand of this must be an int
7036 and its values must be 0 or 1.
7037 ("true" is a fixed value perhaps depending on the language,
7038 but we don't handle values other than 1 correctly yet.) */
7039 tem = invert_truthvalue (arg0);
7040 /* Avoid infinite recursion. */
7041 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7043 tem = fold_single_bit_test (code, arg0, arg1, type);
7048 return fold_convert (type, tem);
7050 case TRUTH_ANDIF_EXPR:
7051 /* Note that the operands of this must be ints
7052 and their values must be 0 or 1.
7053 ("true" is a fixed value perhaps depending on the language.) */
7054 /* If first arg is constant zero, return it. */
7055 if (integer_zerop (arg0))
7056 return fold_convert (type, arg0);
7057 case TRUTH_AND_EXPR:
7058 /* If either arg is constant true, drop it. */
7059 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7060 return non_lvalue (fold_convert (type, arg1));
7061 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7062 /* Preserve sequence points. */
7063 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7064 return non_lvalue (fold_convert (type, arg0));
7065 /* If second arg is constant zero, result is zero, but first arg
7066 must be evaluated. */
7067 if (integer_zerop (arg1))
7068 return omit_one_operand (type, arg1, arg0);
7069 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7070 case will be handled here. */
7071 if (integer_zerop (arg0))
7072 return omit_one_operand (type, arg0, arg1);
7075 /* We only do these simplifications if we are optimizing. */
7079 /* Check for things like (A || B) && (A || C). We can convert this
7080 to A || (B && C). Note that either operator can be any of the four
7081 truth and/or operations and the transformation will still be
7082 valid. Also note that we only care about order for the
7083 ANDIF and ORIF operators. If B contains side effects, this
7084 might change the truth-value of A. */
7085 if (TREE_CODE (arg0) == TREE_CODE (arg1)
7086 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7087 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7088 || TREE_CODE (arg0) == TRUTH_AND_EXPR
7089 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7090 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7092 tree a00 = TREE_OPERAND (arg0, 0);
7093 tree a01 = TREE_OPERAND (arg0, 1);
7094 tree a10 = TREE_OPERAND (arg1, 0);
7095 tree a11 = TREE_OPERAND (arg1, 1);
7096 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7097 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7098 && (code == TRUTH_AND_EXPR
7099 || code == TRUTH_OR_EXPR));
7101 if (operand_equal_p (a00, a10, 0))
7102 return fold (build (TREE_CODE (arg0), type, a00,
7103 fold (build (code, type, a01, a11))));
7104 else if (commutative && operand_equal_p (a00, a11, 0))
7105 return fold (build (TREE_CODE (arg0), type, a00,
7106 fold (build (code, type, a01, a10))));
7107 else if (commutative && operand_equal_p (a01, a10, 0))
7108 return fold (build (TREE_CODE (arg0), type, a01,
7109 fold (build (code, type, a00, a11))));
7111 /* This case if tricky because we must either have commutative
7112 operators or else A10 must not have side-effects. */
7114 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7115 && operand_equal_p (a01, a11, 0))
7116 return fold (build (TREE_CODE (arg0), type,
7117 fold (build (code, type, a00, a10)),
7121 /* See if we can build a range comparison. */
7122 if (0 != (tem = fold_range_test (t)))
7125 /* Check for the possibility of merging component references. If our
7126 lhs is another similar operation, try to merge its rhs with our
7127 rhs. Then try to merge our lhs and rhs. */
7128 if (TREE_CODE (arg0) == code
7129 && 0 != (tem = fold_truthop (code, type,
7130 TREE_OPERAND (arg0, 1), arg1)))
7131 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7133 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7138 case TRUTH_ORIF_EXPR:
7139 /* Note that the operands of this must be ints
7140 and their values must be 0 or true.
7141 ("true" is a fixed value perhaps depending on the language.) */
7142 /* If first arg is constant true, return it. */
7143 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7144 return fold_convert (type, arg0);
7146 /* If either arg is constant zero, drop it. */
7147 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7148 return non_lvalue (fold_convert (type, arg1));
7149 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7150 /* Preserve sequence points. */
7151 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7152 return non_lvalue (fold_convert (type, arg0));
7153 /* If second arg is constant true, result is true, but we must
7154 evaluate first arg. */
7155 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7156 return omit_one_operand (type, arg1, arg0);
7157 /* Likewise for first arg, but note this only occurs here for
7159 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7160 return omit_one_operand (type, arg0, arg1);
7163 case TRUTH_XOR_EXPR:
7164 /* If either arg is constant zero, drop it. */
7165 if (integer_zerop (arg0))
7166 return non_lvalue (fold_convert (type, arg1));
7167 if (integer_zerop (arg1))
7168 return non_lvalue (fold_convert (type, arg0));
7169 /* If either arg is constant true, this is a logical inversion. */
7170 if (integer_onep (arg0))
7171 return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7172 if (integer_onep (arg1))
7173 return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7182 /* If one arg is a real or integer constant, put it last. */
7183 if (tree_swap_operands_p (arg0, arg1, true))
7184 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7186 /* If this is an equality comparison of the address of a non-weak
7187 object against zero, then we know the result. */
7188 if ((code == EQ_EXPR || code == NE_EXPR)
7189 && TREE_CODE (arg0) == ADDR_EXPR
7190 && DECL_P (TREE_OPERAND (arg0, 0))
7191 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7192 && integer_zerop (arg1))
7194 if (code == EQ_EXPR)
7195 return integer_zero_node;
7197 return integer_one_node;
7200 /* If this is an equality comparison of the address of two non-weak,
7201 unaliased symbols neither of which are extern (since we do not
7202 have access to attributes for externs), then we know the result. */
7203 if ((code == EQ_EXPR || code == NE_EXPR)
7204 && TREE_CODE (arg0) == ADDR_EXPR
7205 && DECL_P (TREE_OPERAND (arg0, 0))
7206 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
7207 && ! lookup_attribute ("alias",
7208 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
7209 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
7210 && TREE_CODE (arg1) == ADDR_EXPR
7211 && DECL_P (TREE_OPERAND (arg1, 0))
7212 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
7213 && ! lookup_attribute ("alias",
7214 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
7215 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
7217 if (code == EQ_EXPR)
7218 return (operand_equal_p (arg0, arg1, 0)
7219 ? integer_one_node : integer_zero_node);
7221 return (operand_equal_p (arg0, arg1, 0)
7222 ? integer_zero_node : integer_one_node);
7225 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7227 tree targ0 = strip_float_extensions (arg0);
7228 tree targ1 = strip_float_extensions (arg1);
7229 tree newtype = TREE_TYPE (targ0);
7231 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7232 newtype = TREE_TYPE (targ1);
7234 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7235 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7236 return fold (build (code, type, fold_convert (newtype, targ0),
7237 fold_convert (newtype, targ1)));
7239 /* (-a) CMP (-b) -> b CMP a */
7240 if (TREE_CODE (arg0) == NEGATE_EXPR
7241 && TREE_CODE (arg1) == NEGATE_EXPR)
7242 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7243 TREE_OPERAND (arg0, 0)));
7245 if (TREE_CODE (arg1) == REAL_CST)
7247 REAL_VALUE_TYPE cst;
7248 cst = TREE_REAL_CST (arg1);
7250 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7251 if (TREE_CODE (arg0) == NEGATE_EXPR)
7253 fold (build (swap_tree_comparison (code), type,
7254 TREE_OPERAND (arg0, 0),
7255 build_real (TREE_TYPE (arg1),
7256 REAL_VALUE_NEGATE (cst))));
7258 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7259 /* a CMP (-0) -> a CMP 0 */
7260 if (REAL_VALUE_MINUS_ZERO (cst))
7261 return fold (build (code, type, arg0,
7262 build_real (TREE_TYPE (arg1), dconst0)));
7264 /* x != NaN is always true, other ops are always false. */
7265 if (REAL_VALUE_ISNAN (cst)
7266 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7268 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7269 return omit_one_operand (type, fold_convert (type, t), arg0);
7272 /* Fold comparisons against infinity. */
7273 if (REAL_VALUE_ISINF (cst))
7275 tem = fold_inf_compare (code, type, arg0, arg1);
7276 if (tem != NULL_TREE)
7281 /* If this is a comparison of a real constant with a PLUS_EXPR
7282 or a MINUS_EXPR of a real constant, we can convert it into a
7283 comparison with a revised real constant as long as no overflow
7284 occurs when unsafe_math_optimizations are enabled. */
7285 if (flag_unsafe_math_optimizations
7286 && TREE_CODE (arg1) == REAL_CST
7287 && (TREE_CODE (arg0) == PLUS_EXPR
7288 || TREE_CODE (arg0) == MINUS_EXPR)
7289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7290 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7291 ? MINUS_EXPR : PLUS_EXPR,
7292 arg1, TREE_OPERAND (arg0, 1), 0))
7293 && ! TREE_CONSTANT_OVERFLOW (tem))
7294 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7296 /* Likewise, we can simplify a comparison of a real constant with
7297 a MINUS_EXPR whose first operand is also a real constant, i.e.
7298 (c1 - x) < c2 becomes x > c1-c2. */
7299 if (flag_unsafe_math_optimizations
7300 && TREE_CODE (arg1) == REAL_CST
7301 && TREE_CODE (arg0) == MINUS_EXPR
7302 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7303 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7305 && ! TREE_CONSTANT_OVERFLOW (tem))
7306 return fold (build (swap_tree_comparison (code), type,
7307 TREE_OPERAND (arg0, 1), tem));
7309 /* Fold comparisons against built-in math functions. */
7310 if (TREE_CODE (arg1) == REAL_CST
7311 && flag_unsafe_math_optimizations
7312 && ! flag_errno_math)
7314 enum built_in_function fcode = builtin_mathfn_code (arg0);
7316 if (fcode != END_BUILTINS)
7318 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7319 if (tem != NULL_TREE)
7325 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
7326 if (TREE_CONSTANT (arg1)
7327 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7328 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7329 /* This optimization is invalid for ordered comparisons
7330 if CONST+INCR overflows or if foo+incr might overflow.
7331 This optimization is invalid for floating point due to rounding.
7332 For pointer types we assume overflow doesn't happen. */
7333 && (POINTER_TYPE_P (TREE_TYPE (arg0))
7334 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7335 && (code == EQ_EXPR || code == NE_EXPR))))
7337 tree varop, newconst;
7339 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7341 newconst = fold (build2 (PLUS_EXPR, TREE_TYPE (arg0),
7342 arg1, TREE_OPERAND (arg0, 1)));
7343 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7344 TREE_OPERAND (arg0, 0),
7345 TREE_OPERAND (arg0, 1));
7349 newconst = fold (build2 (MINUS_EXPR, TREE_TYPE (arg0),
7350 arg1, TREE_OPERAND (arg0, 1)));
7351 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7352 TREE_OPERAND (arg0, 0),
7353 TREE_OPERAND (arg0, 1));
7357 /* If VAROP is a reference to a bitfield, we must mask
7358 the constant by the width of the field. */
7359 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7360 && DECL_BIT_FIELD(TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7362 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7363 int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7364 tree folded_compare;
7367 /* First check whether the comparison would come out
7368 always the same. If we don't do that we would
7369 change the meaning with the masking. */
7370 folded_compare = fold (build2 (code, type,
7371 TREE_OPERAND (varop, 0),
7373 if (integer_zerop (folded_compare)
7374 || integer_onep (folded_compare))
7375 return omit_one_operand (type, folded_compare, varop);
7377 if (size < HOST_BITS_PER_WIDE_INT)
7379 unsigned HOST_WIDE_INT lo = ((unsigned HOST_WIDE_INT) 1
7381 mask = build_int_2 (lo, 0);
7383 else if (size < 2 * HOST_BITS_PER_WIDE_INT)
7385 HOST_WIDE_INT hi = ((HOST_WIDE_INT) 1
7386 << (size - HOST_BITS_PER_WIDE_INT)) - 1;
7387 mask = build_int_2 (~0, hi);
7392 mask = fold_convert (TREE_TYPE (varop), mask);
7393 newconst = fold (build2 (BIT_AND_EXPR, TREE_TYPE (varop),
7398 return fold (build2 (code, type, varop, newconst));
7401 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7402 This transformation affects the cases which are handled in later
7403 optimizations involving comparisons with non-negative constants. */
7404 if (TREE_CODE (arg1) == INTEGER_CST
7405 && TREE_CODE (arg0) != INTEGER_CST
7406 && tree_int_cst_sgn (arg1) > 0)
7411 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7412 return fold (build (GT_EXPR, type, arg0, arg1));
7415 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7416 return fold (build (LE_EXPR, type, arg0, arg1));
7423 /* Comparisons with the highest or lowest possible integer of
7424 the specified size will have known values. */
7426 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7428 if (TREE_CODE (arg1) == INTEGER_CST
7429 && ! TREE_CONSTANT_OVERFLOW (arg1)
7430 && width <= HOST_BITS_PER_WIDE_INT
7431 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7432 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7434 unsigned HOST_WIDE_INT signed_max;
7435 unsigned HOST_WIDE_INT max, min;
7437 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7439 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7441 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7447 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7450 if (TREE_INT_CST_HIGH (arg1) == 0
7451 && TREE_INT_CST_LOW (arg1) == max)
7455 return omit_one_operand (type,
7460 return fold (build (EQ_EXPR, type, arg0, arg1));
7463 return omit_one_operand (type,
7468 return fold (build (NE_EXPR, type, arg0, arg1));
7470 /* The GE_EXPR and LT_EXPR cases above are not normally
7471 reached because of previous transformations. */
7476 else if (TREE_INT_CST_HIGH (arg1) == 0
7477 && TREE_INT_CST_LOW (arg1) == max - 1)
7481 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7482 return fold (build (EQ_EXPR, type, arg0, arg1));
7484 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7485 return fold (build (NE_EXPR, type, arg0, arg1));
7489 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7490 && TREE_INT_CST_LOW (arg1) == min)
7494 return omit_one_operand (type,
7499 return fold (build (EQ_EXPR, type, arg0, arg1));
7502 return omit_one_operand (type,
7507 return fold (build (NE_EXPR, type, arg0, arg1));
7512 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7513 && TREE_INT_CST_LOW (arg1) == min + 1)
7517 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7518 return fold (build (NE_EXPR, type, arg0, arg1));
7520 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7521 return fold (build (EQ_EXPR, type, arg0, arg1));
7526 else if (TREE_INT_CST_HIGH (arg1) == 0
7527 && TREE_INT_CST_LOW (arg1) == signed_max
7528 && TREE_UNSIGNED (TREE_TYPE (arg1))
7529 /* signed_type does not work on pointer types. */
7530 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7532 /* The following case also applies to X < signed_max+1
7533 and X >= signed_max+1 because previous transformations. */
7534 if (code == LE_EXPR || code == GT_EXPR)
7537 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
7538 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
7540 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7541 type, fold_convert (st0, arg0),
7542 fold_convert (st1, integer_zero_node)));
7548 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7549 a MINUS_EXPR of a constant, we can convert it into a comparison with
7550 a revised constant as long as no overflow occurs. */
7551 if ((code == EQ_EXPR || code == NE_EXPR)
7552 && TREE_CODE (arg1) == INTEGER_CST
7553 && (TREE_CODE (arg0) == PLUS_EXPR
7554 || TREE_CODE (arg0) == MINUS_EXPR)
7555 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7556 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7557 ? MINUS_EXPR : PLUS_EXPR,
7558 arg1, TREE_OPERAND (arg0, 1), 0))
7559 && ! TREE_CONSTANT_OVERFLOW (tem))
7560 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7562 /* Similarly for a NEGATE_EXPR. */
7563 else if ((code == EQ_EXPR || code == NE_EXPR)
7564 && TREE_CODE (arg0) == NEGATE_EXPR
7565 && TREE_CODE (arg1) == INTEGER_CST
7566 && 0 != (tem = negate_expr (arg1))
7567 && TREE_CODE (tem) == INTEGER_CST
7568 && ! TREE_CONSTANT_OVERFLOW (tem))
7569 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7571 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7572 for !=. Don't do this for ordered comparisons due to overflow. */
7573 else if ((code == NE_EXPR || code == EQ_EXPR)
7574 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7575 return fold (build (code, type,
7576 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7578 /* If we are widening one operand of an integer comparison,
7579 see if the other operand is similarly being widened. Perhaps we
7580 can do the comparison in the narrower type. */
7581 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7582 && TREE_CODE (arg0) == NOP_EXPR
7583 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7584 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7585 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7586 || (TREE_CODE (t1) == INTEGER_CST
7587 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7588 return fold (build (code, type, tem,
7589 fold_convert (TREE_TYPE (tem), t1)));
7591 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7592 constant, we can simplify it. */
7593 else if (TREE_CODE (arg1) == INTEGER_CST
7594 && (TREE_CODE (arg0) == MIN_EXPR
7595 || TREE_CODE (arg0) == MAX_EXPR)
7596 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7597 return optimize_minmax_comparison (t);
7599 /* If we are comparing an ABS_EXPR with a constant, we can
7600 convert all the cases into explicit comparisons, but they may
7601 well not be faster than doing the ABS and one comparison.
7602 But ABS (X) <= C is a range comparison, which becomes a subtraction
7603 and a comparison, and is probably faster. */
7604 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7605 && TREE_CODE (arg0) == ABS_EXPR
7606 && ! TREE_SIDE_EFFECTS (arg0)
7607 && (0 != (tem = negate_expr (arg1)))
7608 && TREE_CODE (tem) == INTEGER_CST
7609 && ! TREE_CONSTANT_OVERFLOW (tem))
7610 return fold (build (TRUTH_ANDIF_EXPR, type,
7611 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7612 build (LE_EXPR, type,
7613 TREE_OPERAND (arg0, 0), arg1)));
7615 /* If this is an EQ or NE comparison with zero and ARG0 is
7616 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7617 two operations, but the latter can be done in one less insn
7618 on machines that have only two-operand insns or on which a
7619 constant cannot be the first operand. */
7620 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7621 && TREE_CODE (arg0) == BIT_AND_EXPR)
7623 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7624 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7626 fold (build (code, type,
7627 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7629 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7630 TREE_OPERAND (arg0, 1),
7631 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7632 fold_convert (TREE_TYPE (arg0),
7635 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7636 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7638 fold (build (code, type,
7639 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7641 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7642 TREE_OPERAND (arg0, 0),
7643 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7644 fold_convert (TREE_TYPE (arg0),
7649 /* If this is an NE or EQ comparison of zero against the result of a
7650 signed MOD operation whose second operand is a power of 2, make
7651 the MOD operation unsigned since it is simpler and equivalent. */
7652 if ((code == NE_EXPR || code == EQ_EXPR)
7653 && integer_zerop (arg1)
7654 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7655 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7656 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7657 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7658 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7659 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7661 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
7662 tree newmod = build (TREE_CODE (arg0), newtype,
7663 fold_convert (newtype,
7664 TREE_OPERAND (arg0, 0)),
7665 fold_convert (newtype,
7666 TREE_OPERAND (arg0, 1)));
7668 return build (code, type, newmod, fold_convert (newtype, arg1));
7671 /* If this is an NE comparison of zero with an AND of one, remove the
7672 comparison since the AND will give the correct value. */
7673 if (code == NE_EXPR && integer_zerop (arg1)
7674 && TREE_CODE (arg0) == BIT_AND_EXPR
7675 && integer_onep (TREE_OPERAND (arg0, 1)))
7676 return fold_convert (type, arg0);
7678 /* If we have (A & C) == C where C is a power of 2, convert this into
7679 (A & C) != 0. Similarly for NE_EXPR. */
7680 if ((code == EQ_EXPR || code == NE_EXPR)
7681 && TREE_CODE (arg0) == BIT_AND_EXPR
7682 && integer_pow2p (TREE_OPERAND (arg0, 1))
7683 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7684 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7685 arg0, integer_zero_node));
7687 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7688 2, then fold the expression into shifts and logical operations. */
7689 tem = fold_single_bit_test (code, arg0, arg1, type);
7693 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7694 Similarly for NE_EXPR. */
7695 if ((code == EQ_EXPR || code == NE_EXPR)
7696 && TREE_CODE (arg0) == BIT_AND_EXPR
7697 && TREE_CODE (arg1) == INTEGER_CST
7698 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7701 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7702 arg1, build1 (BIT_NOT_EXPR,
7703 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7704 TREE_OPERAND (arg0, 1))));
7705 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7706 if (integer_nonzerop (dandnotc))
7707 return omit_one_operand (type, rslt, arg0);
7710 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7711 Similarly for NE_EXPR. */
7712 if ((code == EQ_EXPR || code == NE_EXPR)
7713 && TREE_CODE (arg0) == BIT_IOR_EXPR
7714 && TREE_CODE (arg1) == INTEGER_CST
7715 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7718 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7719 TREE_OPERAND (arg0, 1),
7720 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7721 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7722 if (integer_nonzerop (candnotd))
7723 return omit_one_operand (type, rslt, arg0);
7726 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7727 and similarly for >= into !=. */
7728 if ((code == LT_EXPR || code == GE_EXPR)
7729 && TREE_UNSIGNED (TREE_TYPE (arg0))
7730 && TREE_CODE (arg1) == LSHIFT_EXPR
7731 && integer_onep (TREE_OPERAND (arg1, 0)))
7732 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7733 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7734 TREE_OPERAND (arg1, 1)),
7735 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7737 else if ((code == LT_EXPR || code == GE_EXPR)
7738 && TREE_UNSIGNED (TREE_TYPE (arg0))
7739 && (TREE_CODE (arg1) == NOP_EXPR
7740 || TREE_CODE (arg1) == CONVERT_EXPR)
7741 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7742 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7744 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7745 fold_convert (TREE_TYPE (arg0),
7746 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7747 TREE_OPERAND (TREE_OPERAND (arg1, 0),
7749 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7751 /* Simplify comparison of something with itself. (For IEEE
7752 floating-point, we can only do some of these simplifications.) */
7753 if (operand_equal_p (arg0, arg1, 0))
7758 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7759 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7760 return constant_boolean_node (1, type);
7765 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7766 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7767 return constant_boolean_node (1, type);
7768 return fold (build (EQ_EXPR, type, arg0, arg1));
7771 /* For NE, we can only do this simplification if integer
7772 or we don't honor IEEE floating point NaNs. */
7773 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7774 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7776 /* ... fall through ... */
7779 return constant_boolean_node (0, type);
7785 /* If we are comparing an expression that just has comparisons
7786 of two integer values, arithmetic expressions of those comparisons,
7787 and constants, we can simplify it. There are only three cases
7788 to check: the two values can either be equal, the first can be
7789 greater, or the second can be greater. Fold the expression for
7790 those three values. Since each value must be 0 or 1, we have
7791 eight possibilities, each of which corresponds to the constant 0
7792 or 1 or one of the six possible comparisons.
7794 This handles common cases like (a > b) == 0 but also handles
7795 expressions like ((x > y) - (y > x)) > 0, which supposedly
7796 occur in macroized code. */
7798 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7800 tree cval1 = 0, cval2 = 0;
7803 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7804 /* Don't handle degenerate cases here; they should already
7805 have been handled anyway. */
7806 && cval1 != 0 && cval2 != 0
7807 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7808 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7809 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7810 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7811 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7812 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7813 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7815 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7816 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7818 /* We can't just pass T to eval_subst in case cval1 or cval2
7819 was the same as ARG1. */
7822 = fold (build (code, type,
7823 eval_subst (arg0, cval1, maxval, cval2, minval),
7826 = fold (build (code, type,
7827 eval_subst (arg0, cval1, maxval, cval2, maxval),
7830 = fold (build (code, type,
7831 eval_subst (arg0, cval1, minval, cval2, maxval),
7834 /* All three of these results should be 0 or 1. Confirm they
7835 are. Then use those values to select the proper code
7838 if ((integer_zerop (high_result)
7839 || integer_onep (high_result))
7840 && (integer_zerop (equal_result)
7841 || integer_onep (equal_result))
7842 && (integer_zerop (low_result)
7843 || integer_onep (low_result)))
7845 /* Make a 3-bit mask with the high-order bit being the
7846 value for `>', the next for '=', and the low for '<'. */
7847 switch ((integer_onep (high_result) * 4)
7848 + (integer_onep (equal_result) * 2)
7849 + integer_onep (low_result))
7853 return omit_one_operand (type, integer_zero_node, arg0);
7874 return omit_one_operand (type, integer_one_node, arg0);
7877 t = build (code, type, cval1, cval2);
7879 return save_expr (t);
7886 /* If this is a comparison of a field, we may be able to simplify it. */
7887 if (((TREE_CODE (arg0) == COMPONENT_REF
7888 && lang_hooks.can_use_bit_fields_p ())
7889 || TREE_CODE (arg0) == BIT_FIELD_REF)
7890 && (code == EQ_EXPR || code == NE_EXPR)
7891 /* Handle the constant case even without -O
7892 to make sure the warnings are given. */
7893 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7895 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7900 /* If this is a comparison of complex values and either or both sides
7901 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7902 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7903 This may prevent needless evaluations. */
7904 if ((code == EQ_EXPR || code == NE_EXPR)
7905 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7906 && (TREE_CODE (arg0) == COMPLEX_EXPR
7907 || TREE_CODE (arg1) == COMPLEX_EXPR
7908 || TREE_CODE (arg0) == COMPLEX_CST
7909 || TREE_CODE (arg1) == COMPLEX_CST))
7911 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7912 tree real0, imag0, real1, imag1;
7914 arg0 = save_expr (arg0);
7915 arg1 = save_expr (arg1);
7916 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7917 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7918 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7919 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7921 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7924 fold (build (code, type, real0, real1)),
7925 fold (build (code, type, imag0, imag1))));
7928 /* Optimize comparisons of strlen vs zero to a compare of the
7929 first character of the string vs zero. To wit,
7930 strlen(ptr) == 0 => *ptr == 0
7931 strlen(ptr) != 0 => *ptr != 0
7932 Other cases should reduce to one of these two (or a constant)
7933 due to the return value of strlen being unsigned. */
7934 if ((code == EQ_EXPR || code == NE_EXPR)
7935 && integer_zerop (arg1)
7936 && TREE_CODE (arg0) == CALL_EXPR)
7938 tree fndecl = get_callee_fndecl (arg0);
7942 && DECL_BUILT_IN (fndecl)
7943 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7944 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7945 && (arglist = TREE_OPERAND (arg0, 1))
7946 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7947 && ! TREE_CHAIN (arglist))
7948 return fold (build (code, type,
7949 build1 (INDIRECT_REF, char_type_node,
7950 TREE_VALUE(arglist)),
7951 integer_zero_node));
7954 /* From here on, the only cases we handle are when the result is
7955 known to be a constant.
7957 To compute GT, swap the arguments and do LT.
7958 To compute GE, do LT and invert the result.
7959 To compute LE, swap the arguments, do LT and invert the result.
7960 To compute NE, do EQ and invert the result.
7962 Therefore, the code below must handle only EQ and LT. */
7964 if (code == LE_EXPR || code == GT_EXPR)
7966 tem = arg0, arg0 = arg1, arg1 = tem;
7967 code = swap_tree_comparison (code);
7970 /* Note that it is safe to invert for real values here because we
7971 will check below in the one case that it matters. */
7975 if (code == NE_EXPR || code == GE_EXPR)
7978 code = invert_tree_comparison (code);
7981 /* Compute a result for LT or EQ if args permit;
7982 otherwise return T. */
7983 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7985 if (code == EQ_EXPR)
7986 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7988 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7989 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7990 : INT_CST_LT (arg0, arg1)),
7994 #if 0 /* This is no longer useful, but breaks some real code. */
7995 /* Assume a nonexplicit constant cannot equal an explicit one,
7996 since such code would be undefined anyway.
7997 Exception: on sysvr4, using #pragma weak,
7998 a label can come out as 0. */
7999 else if (TREE_CODE (arg1) == INTEGER_CST
8000 && !integer_zerop (arg1)
8001 && TREE_CONSTANT (arg0)
8002 && TREE_CODE (arg0) == ADDR_EXPR
8004 t1 = build_int_2 (0, 0);
8006 /* Two real constants can be compared explicitly. */
8007 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8009 /* If either operand is a NaN, the result is false with two
8010 exceptions: First, an NE_EXPR is true on NaNs, but that case
8011 is already handled correctly since we will be inverting the
8012 result for NE_EXPR. Second, if we had inverted a LE_EXPR
8013 or a GE_EXPR into a LT_EXPR, we must return true so that it
8014 will be inverted into false. */
8016 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
8017 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
8018 t1 = build_int_2 (invert && code == LT_EXPR, 0);
8020 else if (code == EQ_EXPR)
8021 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
8022 TREE_REAL_CST (arg1)),
8025 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
8026 TREE_REAL_CST (arg1)),
8030 if (t1 == NULL_TREE)
8034 TREE_INT_CST_LOW (t1) ^= 1;
8036 TREE_TYPE (t1) = type;
8037 if (TREE_CODE (type) == BOOLEAN_TYPE)
8038 return lang_hooks.truthvalue_conversion (t1);
8042 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
8043 so all simple results must be passed through pedantic_non_lvalue. */
8044 if (TREE_CODE (arg0) == INTEGER_CST)
8046 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
8047 /* Only optimize constant conditions when the selected branch
8048 has the same type as the COND_EXPR. This avoids optimizing
8049 away "c ? x : throw", where the throw has a void type. */
8050 if (! VOID_TYPE_P (TREE_TYPE (tem))
8051 || VOID_TYPE_P (TREE_TYPE (t)))
8052 return pedantic_non_lvalue (tem);
8055 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
8056 return pedantic_omit_one_operand (type, arg1, arg0);
8058 /* If we have A op B ? A : C, we may be able to convert this to a
8059 simpler expression, depending on the operation and the values
8060 of B and C. Signed zeros prevent all of these transformations,
8061 for reasons given above each one. */
8063 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
8064 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
8065 arg1, TREE_OPERAND (arg0, 1))
8066 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
8068 tree arg2 = TREE_OPERAND (t, 2);
8069 enum tree_code comp_code = TREE_CODE (arg0);
8073 /* If we have A op 0 ? A : -A, consider applying the following
8076 A == 0? A : -A same as -A
8077 A != 0? A : -A same as A
8078 A >= 0? A : -A same as abs (A)
8079 A > 0? A : -A same as abs (A)
8080 A <= 0? A : -A same as -abs (A)
8081 A < 0? A : -A same as -abs (A)
8083 None of these transformations work for modes with signed
8084 zeros. If A is +/-0, the first two transformations will
8085 change the sign of the result (from +0 to -0, or vice
8086 versa). The last four will fix the sign of the result,
8087 even though the original expressions could be positive or
8088 negative, depending on the sign of A.
8090 Note that all these transformations are correct if A is
8091 NaN, since the two alternatives (A and -A) are also NaNs. */
8092 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8093 ? real_zerop (TREE_OPERAND (arg0, 1))
8094 : integer_zerop (TREE_OPERAND (arg0, 1)))
8095 && TREE_CODE (arg2) == NEGATE_EXPR
8096 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8100 tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8101 tem = fold_convert (type, negate_expr (tem));
8102 return pedantic_non_lvalue (tem);
8104 return pedantic_non_lvalue (fold_convert (type, arg1));
8107 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8108 arg1 = fold_convert (lang_hooks.types.signed_type
8109 (TREE_TYPE (arg1)), arg1);
8110 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8111 return pedantic_non_lvalue (fold_convert (type, arg1));
8114 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8115 arg1 = fold_convert (lang_hooks.types.signed_type
8116 (TREE_TYPE (arg1)), arg1);
8117 arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8118 arg1 = negate_expr (fold_convert (type, arg1));
8119 return pedantic_non_lvalue (arg1);
8124 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8125 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8126 both transformations are correct when A is NaN: A != 0
8127 is then true, and A == 0 is false. */
8129 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8131 if (comp_code == NE_EXPR)
8132 return pedantic_non_lvalue (fold_convert (type, arg1));
8133 else if (comp_code == EQ_EXPR)
8134 return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8137 /* Try some transformations of A op B ? A : B.
8139 A == B? A : B same as B
8140 A != B? A : B same as A
8141 A >= B? A : B same as max (A, B)
8142 A > B? A : B same as max (B, A)
8143 A <= B? A : B same as min (A, B)
8144 A < B? A : B same as min (B, A)
8146 As above, these transformations don't work in the presence
8147 of signed zeros. For example, if A and B are zeros of
8148 opposite sign, the first two transformations will change
8149 the sign of the result. In the last four, the original
8150 expressions give different results for (A=+0, B=-0) and
8151 (A=-0, B=+0), but the transformed expressions do not.
8153 The first two transformations are correct if either A or B
8154 is a NaN. In the first transformation, the condition will
8155 be false, and B will indeed be chosen. In the case of the
8156 second transformation, the condition A != B will be true,
8157 and A will be chosen.
8159 The conversions to max() and min() are not correct if B is
8160 a number and A is not. The conditions in the original
8161 expressions will be false, so all four give B. The min()
8162 and max() versions would give a NaN instead. */
8163 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8164 arg2, TREE_OPERAND (arg0, 0)))
8166 tree comp_op0 = TREE_OPERAND (arg0, 0);
8167 tree comp_op1 = TREE_OPERAND (arg0, 1);
8168 tree comp_type = TREE_TYPE (comp_op0);
8170 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8171 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8181 return pedantic_non_lvalue (fold_convert (type, arg2));
8183 return pedantic_non_lvalue (fold_convert (type, arg1));
8186 /* In C++ a ?: expression can be an lvalue, so put the
8187 operand which will be used if they are equal first
8188 so that we can convert this back to the
8189 corresponding COND_EXPR. */
8190 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8191 return pedantic_non_lvalue (fold_convert
8192 (type, fold (build (MIN_EXPR, comp_type,
8193 (comp_code == LE_EXPR
8194 ? comp_op0 : comp_op1),
8195 (comp_code == LE_EXPR
8196 ? comp_op1 : comp_op0)))));
8200 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8201 return pedantic_non_lvalue (fold_convert
8202 (type, fold (build (MAX_EXPR, comp_type,
8203 (comp_code == GE_EXPR
8204 ? comp_op0 : comp_op1),
8205 (comp_code == GE_EXPR
8206 ? comp_op1 : comp_op0)))));
8213 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8214 we might still be able to simplify this. For example,
8215 if C1 is one less or one more than C2, this might have started
8216 out as a MIN or MAX and been transformed by this function.
8217 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8219 if (INTEGRAL_TYPE_P (type)
8220 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8221 && TREE_CODE (arg2) == INTEGER_CST)
8225 /* We can replace A with C1 in this case. */
8226 arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8227 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8228 TREE_OPERAND (t, 2)));
8231 /* If C1 is C2 + 1, this is min(A, C2). */
8232 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8233 && operand_equal_p (TREE_OPERAND (arg0, 1),
8234 const_binop (PLUS_EXPR, arg2,
8235 integer_one_node, 0), 1))
8236 return pedantic_non_lvalue
8237 (fold (build (MIN_EXPR, type, arg1, arg2)));
8241 /* If C1 is C2 - 1, this is min(A, C2). */
8242 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8243 && operand_equal_p (TREE_OPERAND (arg0, 1),
8244 const_binop (MINUS_EXPR, arg2,
8245 integer_one_node, 0), 1))
8246 return pedantic_non_lvalue
8247 (fold (build (MIN_EXPR, type, arg1, arg2)));
8251 /* If C1 is C2 - 1, this is max(A, C2). */
8252 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8253 && operand_equal_p (TREE_OPERAND (arg0, 1),
8254 const_binop (MINUS_EXPR, arg2,
8255 integer_one_node, 0), 1))
8256 return pedantic_non_lvalue
8257 (fold (build (MAX_EXPR, type, arg1, arg2)));
8261 /* If C1 is C2 + 1, this is max(A, C2). */
8262 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8263 && operand_equal_p (TREE_OPERAND (arg0, 1),
8264 const_binop (PLUS_EXPR, arg2,
8265 integer_one_node, 0), 1))
8266 return pedantic_non_lvalue
8267 (fold (build (MAX_EXPR, type, arg1, arg2)));
8276 /* If the second operand is simpler than the third, swap them
8277 since that produces better jump optimization results. */
8278 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8279 TREE_OPERAND (t, 2), false))
8281 /* See if this can be inverted. If it can't, possibly because
8282 it was a floating-point inequality comparison, don't do
8284 tem = invert_truthvalue (arg0);
8286 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8287 return fold (build (code, type, tem,
8288 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8291 /* Convert A ? 1 : 0 to simply A. */
8292 if (integer_onep (TREE_OPERAND (t, 1))
8293 && integer_zerop (TREE_OPERAND (t, 2))
8294 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8295 call to fold will try to move the conversion inside
8296 a COND, which will recurse. In that case, the COND_EXPR
8297 is probably the best choice, so leave it alone. */
8298 && type == TREE_TYPE (arg0))
8299 return pedantic_non_lvalue (arg0);
8301 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8302 over COND_EXPR in cases such as floating point comparisons. */
8303 if (integer_zerop (TREE_OPERAND (t, 1))
8304 && integer_onep (TREE_OPERAND (t, 2))
8305 && truth_value_p (TREE_CODE (arg0)))
8306 return pedantic_non_lvalue (fold_convert (type,
8307 invert_truthvalue (arg0)));
8309 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8310 operation is simply A & 2. */
8312 if (integer_zerop (TREE_OPERAND (t, 2))
8313 && TREE_CODE (arg0) == NE_EXPR
8314 && integer_zerop (TREE_OPERAND (arg0, 1))
8315 && integer_pow2p (arg1)
8316 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8317 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8319 return pedantic_non_lvalue (fold_convert (type,
8320 TREE_OPERAND (arg0, 0)));
8322 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8323 if (integer_zerop (TREE_OPERAND (t, 2))
8324 && truth_value_p (TREE_CODE (arg0))
8325 && truth_value_p (TREE_CODE (arg1)))
8326 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8329 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8330 if (integer_onep (TREE_OPERAND (t, 2))
8331 && truth_value_p (TREE_CODE (arg0))
8332 && truth_value_p (TREE_CODE (arg1)))
8334 /* Only perform transformation if ARG0 is easily inverted. */
8335 tem = invert_truthvalue (arg0);
8336 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8337 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8344 /* When pedantic, a compound expression can be neither an lvalue
8345 nor an integer constant expression. */
8346 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8348 /* Don't let (0, 0) be null pointer constant. */
8349 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
8350 : fold_convert (type, arg1);
8351 return pedantic_non_lvalue (tem);
8355 return build_complex (type, arg0, arg1);
8359 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8361 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8362 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8363 TREE_OPERAND (arg0, 1));
8364 else if (TREE_CODE (arg0) == COMPLEX_CST)
8365 return TREE_REALPART (arg0);
8366 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8367 return fold (build (TREE_CODE (arg0), type,
8368 fold (build1 (REALPART_EXPR, type,
8369 TREE_OPERAND (arg0, 0))),
8370 fold (build1 (REALPART_EXPR,
8371 type, TREE_OPERAND (arg0, 1)))));
8375 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8376 return fold_convert (type, integer_zero_node);
8377 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8378 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8379 TREE_OPERAND (arg0, 0));
8380 else if (TREE_CODE (arg0) == COMPLEX_CST)
8381 return TREE_IMAGPART (arg0);
8382 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8383 return fold (build (TREE_CODE (arg0), type,
8384 fold (build1 (IMAGPART_EXPR, type,
8385 TREE_OPERAND (arg0, 0))),
8386 fold (build1 (IMAGPART_EXPR, type,
8387 TREE_OPERAND (arg0, 1)))));
8390 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8392 case CLEANUP_POINT_EXPR:
8393 if (! has_cleanups (arg0))
8394 return TREE_OPERAND (t, 0);
8397 enum tree_code code0 = TREE_CODE (arg0);
8398 int kind0 = TREE_CODE_CLASS (code0);
8399 tree arg00 = TREE_OPERAND (arg0, 0);
8402 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8403 return fold (build1 (code0, type,
8404 fold (build1 (CLEANUP_POINT_EXPR,
8405 TREE_TYPE (arg00), arg00))));
8407 if (kind0 == '<' || kind0 == '2'
8408 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8409 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8410 || code0 == TRUTH_XOR_EXPR)
8412 arg01 = TREE_OPERAND (arg0, 1);
8414 if (TREE_CONSTANT (arg00)
8415 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8416 && ! has_cleanups (arg00)))
8417 return fold (build (code0, type, arg00,
8418 fold (build1 (CLEANUP_POINT_EXPR,
8419 TREE_TYPE (arg01), arg01))));
8421 if (TREE_CONSTANT (arg01))
8422 return fold (build (code0, type,
8423 fold (build1 (CLEANUP_POINT_EXPR,
8424 TREE_TYPE (arg00), arg00)),
8432 /* Check for a built-in function. */
8433 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8434 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8436 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8438 tree tmp = fold_builtin (expr);
8446 } /* switch (code) */
8449 #ifdef ENABLE_FOLD_CHECKING
8452 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8453 static void fold_check_failed (tree, tree);
8454 void print_fold_checksum (tree);
8456 /* When --enable-checking=fold, compute a digest of expr before
8457 and after actual fold call to see if fold did not accidentally
8458 change original expr. */
8465 unsigned char checksum_before[16], checksum_after[16];
8468 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8469 md5_init_ctx (&ctx);
8470 fold_checksum_tree (expr, &ctx, ht);
8471 md5_finish_ctx (&ctx, checksum_before);
8474 ret = fold_1 (expr);
8476 md5_init_ctx (&ctx);
8477 fold_checksum_tree (expr, &ctx, ht);
8478 md5_finish_ctx (&ctx, checksum_after);
8481 if (memcmp (checksum_before, checksum_after, 16))
8482 fold_check_failed (expr, ret);
8488 print_fold_checksum (tree expr)
8491 unsigned char checksum[16], cnt;
8494 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8495 md5_init_ctx (&ctx);
8496 fold_checksum_tree (expr, &ctx, ht);
8497 md5_finish_ctx (&ctx, checksum);
8499 for (cnt = 0; cnt < 16; ++cnt)
8500 fprintf (stderr, "%02x", checksum[cnt]);
8501 putc ('\n', stderr);
8505 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8507 internal_error ("fold check: original tree changed by fold");
8511 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8514 enum tree_code code;
8515 char buf[sizeof (struct tree_decl)];
8518 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8519 > sizeof (struct tree_decl)
8520 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8524 slot = htab_find_slot (ht, expr, INSERT);
8528 code = TREE_CODE (expr);
8529 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8531 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8532 memcpy (buf, expr, tree_size (expr));
8534 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8536 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8538 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8539 memcpy (buf, expr, tree_size (expr));
8541 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8543 else if (TREE_CODE_CLASS (code) == 't'
8544 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8546 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8547 memcpy (buf, expr, tree_size (expr));
8549 TYPE_POINTER_TO (expr) = NULL;
8550 TYPE_REFERENCE_TO (expr) = NULL;
8552 md5_process_bytes (expr, tree_size (expr), ctx);
8553 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8554 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8555 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8556 len = TREE_CODE_LENGTH (code);
8557 switch (TREE_CODE_CLASS (code))
8563 md5_process_bytes (TREE_STRING_POINTER (expr),
8564 TREE_STRING_LENGTH (expr), ctx);
8567 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8568 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8571 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8581 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8582 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8585 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8586 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8595 case SAVE_EXPR: len = 2; break;
8596 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8597 case RTL_EXPR: len = 0; break;
8598 case WITH_CLEANUP_EXPR: len = 2; break;
8607 for (i = 0; i < len; ++i)
8608 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8611 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8612 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8613 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8614 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8615 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8616 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8617 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8618 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8619 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8620 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8621 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8624 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8625 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8626 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8627 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8628 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8629 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8630 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8631 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8632 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8633 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8642 /* Perform constant folding and related simplification of initializer
8643 expression EXPR. This behaves identically to "fold" but ignores
8644 potential run-time traps and exceptions that fold must preserve. */
8647 fold_initializer (tree expr)
8649 int saved_signaling_nans = flag_signaling_nans;
8650 int saved_trapping_math = flag_trapping_math;
8651 int saved_trapv = flag_trapv;
8654 flag_signaling_nans = 0;
8655 flag_trapping_math = 0;
8658 result = fold (expr);
8660 flag_signaling_nans = saved_signaling_nans;
8661 flag_trapping_math = saved_trapping_math;
8662 flag_trapv = saved_trapv;
8667 /* Determine if first argument is a multiple of second argument. Return 0 if
8668 it is not, or we cannot easily determined it to be.
8670 An example of the sort of thing we care about (at this point; this routine
8671 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8672 fold cases do now) is discovering that
8674 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8680 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8682 This code also handles discovering that
8684 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8686 is a multiple of 8 so we don't have to worry about dealing with a
8689 Note that we *look* inside a SAVE_EXPR only to determine how it was
8690 calculated; it is not safe for fold to do much of anything else with the
8691 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8692 at run time. For example, the latter example above *cannot* be implemented
8693 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8694 evaluation time of the original SAVE_EXPR is not necessarily the same at
8695 the time the new expression is evaluated. The only optimization of this
8696 sort that would be valid is changing
8698 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8702 SAVE_EXPR (I) * SAVE_EXPR (J)
8704 (where the same SAVE_EXPR (J) is used in the original and the
8705 transformed version). */
8708 multiple_of_p (tree type, tree top, tree bottom)
8710 if (operand_equal_p (top, bottom, 0))
8713 if (TREE_CODE (type) != INTEGER_TYPE)
8716 switch (TREE_CODE (top))
8719 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8720 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8724 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8725 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8728 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8732 op1 = TREE_OPERAND (top, 1);
8733 /* const_binop may not detect overflow correctly,
8734 so check for it explicitly here. */
8735 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8736 > TREE_INT_CST_LOW (op1)
8737 && TREE_INT_CST_HIGH (op1) == 0
8738 && 0 != (t1 = fold_convert (type,
8739 const_binop (LSHIFT_EXPR,
8742 && ! TREE_OVERFLOW (t1))
8743 return multiple_of_p (type, t1, bottom);
8748 /* Can't handle conversions from non-integral or wider integral type. */
8749 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8750 || (TYPE_PRECISION (type)
8751 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8754 /* .. fall through ... */
8757 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8760 if (TREE_CODE (bottom) != INTEGER_CST
8761 || (TREE_UNSIGNED (type)
8762 && (tree_int_cst_sgn (top) < 0
8763 || tree_int_cst_sgn (bottom) < 0)))
8765 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8773 /* Return true if `t' is known to be non-negative. */
8776 tree_expr_nonnegative_p (tree t)
8778 switch (TREE_CODE (t))
8784 return tree_int_cst_sgn (t) >= 0;
8787 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8790 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8791 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8792 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8794 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8795 both unsigned and at least 2 bits shorter than the result. */
8796 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8797 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8798 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8800 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8801 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8802 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8803 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8805 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8806 TYPE_PRECISION (inner2)) + 1;
8807 return prec < TYPE_PRECISION (TREE_TYPE (t));
8813 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8815 /* x * x for floating point x is always non-negative. */
8816 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8818 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8819 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8822 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8823 both unsigned and their total bits is shorter than the result. */
8824 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8825 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8826 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8828 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8829 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8830 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8831 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8832 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8833 < TYPE_PRECISION (TREE_TYPE (t));
8837 case TRUNC_DIV_EXPR:
8839 case FLOOR_DIV_EXPR:
8840 case ROUND_DIV_EXPR:
8841 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8842 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8844 case TRUNC_MOD_EXPR:
8846 case FLOOR_MOD_EXPR:
8847 case ROUND_MOD_EXPR:
8848 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8851 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8852 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8856 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8857 tree outer_type = TREE_TYPE (t);
8859 if (TREE_CODE (outer_type) == REAL_TYPE)
8861 if (TREE_CODE (inner_type) == REAL_TYPE)
8862 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8863 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8865 if (TREE_UNSIGNED (inner_type))
8867 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8870 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8872 if (TREE_CODE (inner_type) == REAL_TYPE)
8873 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8874 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8875 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8876 && TREE_UNSIGNED (inner_type);
8882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8883 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8885 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8887 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8888 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8890 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8891 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8893 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8895 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8897 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8898 case NON_LVALUE_EXPR:
8899 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8901 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8903 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8907 tree fndecl = get_callee_fndecl (t);
8908 tree arglist = TREE_OPERAND (t, 1);
8910 && DECL_BUILT_IN (fndecl)
8911 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8912 switch (DECL_FUNCTION_CODE (fndecl))
8914 #define CASE_BUILTIN_F(BUILT_IN_FN) \
8915 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
8916 #define CASE_BUILTIN_I(BUILT_IN_FN) \
8917 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
8919 CASE_BUILTIN_F (BUILT_IN_ACOS)
8920 CASE_BUILTIN_F (BUILT_IN_ACOSH)
8921 CASE_BUILTIN_F (BUILT_IN_CABS)
8922 CASE_BUILTIN_F (BUILT_IN_COSH)
8923 CASE_BUILTIN_F (BUILT_IN_ERFC)
8924 CASE_BUILTIN_F (BUILT_IN_EXP)
8925 CASE_BUILTIN_F (BUILT_IN_EXP10)
8926 CASE_BUILTIN_F (BUILT_IN_EXP2)
8927 CASE_BUILTIN_F (BUILT_IN_FABS)
8928 CASE_BUILTIN_F (BUILT_IN_FDIM)
8929 CASE_BUILTIN_F (BUILT_IN_FREXP)
8930 CASE_BUILTIN_F (BUILT_IN_HYPOT)
8931 CASE_BUILTIN_F (BUILT_IN_POW10)
8932 CASE_BUILTIN_F (BUILT_IN_SQRT)
8933 CASE_BUILTIN_I (BUILT_IN_FFS)
8934 CASE_BUILTIN_I (BUILT_IN_PARITY)
8935 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
8939 CASE_BUILTIN_F (BUILT_IN_ASINH)
8940 CASE_BUILTIN_F (BUILT_IN_ATAN)
8941 CASE_BUILTIN_F (BUILT_IN_ATANH)
8942 CASE_BUILTIN_F (BUILT_IN_CBRT)
8943 CASE_BUILTIN_F (BUILT_IN_CEIL)
8944 CASE_BUILTIN_F (BUILT_IN_ERF)
8945 CASE_BUILTIN_F (BUILT_IN_EXPM1)
8946 CASE_BUILTIN_F (BUILT_IN_FLOOR)
8947 CASE_BUILTIN_F (BUILT_IN_FMOD)
8948 CASE_BUILTIN_F (BUILT_IN_LDEXP)
8949 CASE_BUILTIN_F (BUILT_IN_LLRINT)
8950 CASE_BUILTIN_F (BUILT_IN_LLROUND)
8951 CASE_BUILTIN_F (BUILT_IN_LRINT)
8952 CASE_BUILTIN_F (BUILT_IN_LROUND)
8953 CASE_BUILTIN_F (BUILT_IN_MODF)
8954 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
8955 CASE_BUILTIN_F (BUILT_IN_POW)
8956 CASE_BUILTIN_F (BUILT_IN_RINT)
8957 CASE_BUILTIN_F (BUILT_IN_ROUND)
8958 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
8959 CASE_BUILTIN_F (BUILT_IN_SINH)
8960 CASE_BUILTIN_F (BUILT_IN_TANH)
8961 CASE_BUILTIN_F (BUILT_IN_TRUNC)
8962 /* True if the 1st argument is nonnegative. */
8963 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8965 CASE_BUILTIN_F(BUILT_IN_FMAX)
8966 /* True if the 1st OR 2nd arguments are nonnegative. */
8967 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8968 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8970 CASE_BUILTIN_F(BUILT_IN_FMIN)
8971 /* True if the 1st AND 2nd arguments are nonnegative. */
8972 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
8973 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8975 CASE_BUILTIN_F(BUILT_IN_COPYSIGN)
8976 /* True if the 2nd argument is nonnegative. */
8977 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
8981 #undef CASE_BUILTIN_F
8982 #undef CASE_BUILTIN_I
8986 /* ... fall through ... */
8989 if (truth_value_p (TREE_CODE (t)))
8990 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8994 /* We don't know sign of `t', so be conservative and return false. */
8998 /* Return true if `r' is known to be non-negative.
8999 Only handles constants at the moment. */
9002 rtl_expr_nonnegative_p (rtx r)
9004 switch (GET_CODE (r))
9007 return INTVAL (r) >= 0;
9010 if (GET_MODE (r) == VOIDmode)
9011 return CONST_DOUBLE_HIGH (r) >= 0;
9019 units = CONST_VECTOR_NUNITS (r);
9021 for (i = 0; i < units; ++i)
9023 elt = CONST_VECTOR_ELT (r, i);
9024 if (!rtl_expr_nonnegative_p (elt))
9033 /* These are always nonnegative. */
9041 /* Return the tree for neg (ARG0) when ARG0 is known to be either
9042 an integer constant or real constant.
9044 TYPE is the type of the result. */
9047 fold_negate_const (tree arg0, tree type)
9051 if (TREE_CODE (arg0) == INTEGER_CST)
9053 unsigned HOST_WIDE_INT low;
9055 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
9056 TREE_INT_CST_HIGH (arg0),
9058 t = build_int_2 (low, high);
9059 TREE_TYPE (t) = type;
9061 = (TREE_OVERFLOW (arg0)
9062 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
9063 TREE_CONSTANT_OVERFLOW (t)
9064 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
9066 else if (TREE_CODE (arg0) == REAL_CST)
9067 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
9068 #ifdef ENABLE_CHECKING
9076 #include "gt-fold-const.h"