1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static bool tree_expr_nonzero_p (tree);
137 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
138 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
139 and SUM1. Then this yields nonzero if overflow occurred during the
142 Overflow occurs if A and B have the same sign, but A and SUM differ in
143 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
145 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
148 We do that by representing the two-word integer in 4 words, with only
149 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
150 number. The value of the word is LOWPART + HIGHPART * BASE. */
153 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
154 #define HIGHPART(x) \
155 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
156 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158 /* Unpack a two-word integer into 4 words.
159 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
160 WORDS points to the array of HOST_WIDE_INTs. */
163 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
165 words[0] = LOWPART (low);
166 words[1] = HIGHPART (low);
167 words[2] = LOWPART (hi);
168 words[3] = HIGHPART (hi);
171 /* Pack an array of 4 words into a two-word integer.
172 WORDS points to the array of words.
173 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
176 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 *low = words[0] + words[1] * BASE;
180 *hi = words[2] + words[3] * BASE;
183 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
184 in overflow of the value, when >0 we are only interested in signed
185 overflow, for <0 we are interested in any overflow. OVERFLOWED
186 indicates whether overflow has already occurred. CONST_OVERFLOWED
187 indicates whether constant overflow has already occurred. We force
188 T's value to be within range of T's type (by setting to 0 or 1 all
189 the bits outside the type's range). We set TREE_OVERFLOWED if,
190 OVERFLOWED is nonzero,
191 or OVERFLOWABLE is >0 and signed overflow occurs
192 or OVERFLOWABLE is <0 and any overflow occurs
193 We set TREE_CONSTANT_OVERFLOWED if,
194 CONST_OVERFLOWED is nonzero
195 or we set TREE_OVERFLOWED.
196 We return either the original T, or a copy. */
199 force_fit_type (tree t, int overflowable,
200 bool overflowed, bool overflowed_const)
202 unsigned HOST_WIDE_INT low;
205 int sign_extended_type;
207 gcc_assert (TREE_CODE (t) == INTEGER_CST);
209 low = TREE_INT_CST_LOW (t);
210 high = TREE_INT_CST_HIGH (t);
212 if (POINTER_TYPE_P (TREE_TYPE (t))
213 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = TYPE_PRECISION (TREE_TYPE (t));
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
219 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 low &= ~((HOST_WIDE_INT) (-1) << prec);
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
241 /* Sign extend top half? */
242 if (high & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 else if (prec == HOST_BITS_PER_WIDE_INT)
248 if ((HOST_WIDE_INT)low < 0)
253 /* Sign extend bottom half? */
254 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
257 low |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value changed, return a new node. */
262 if (overflowed || overflowed_const
263 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
265 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 || (overflowable > 0 && sign_extended_type))
272 TREE_OVERFLOW (t) = 1;
273 TREE_CONSTANT_OVERFLOW (t) = 1;
275 else if (overflowed_const)
278 TREE_CONSTANT_OVERFLOW (t) = 1;
285 /* Add two doubleword integers with doubleword result.
286 Each argument is given as two `HOST_WIDE_INT' pieces.
287 One argument is L1 and H1; the other, L2 and H2.
288 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
291 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
292 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
293 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
295 unsigned HOST_WIDE_INT l;
299 h = h1 + h2 + (l < l1);
303 return OVERFLOW_SUM_SIGN (h1, h2, h);
306 /* Negate a doubleword integer with doubleword result.
307 Return nonzero if the operation overflows, assuming it's signed.
308 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
309 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
312 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 return (*hv & h1) < 0;
329 /* Multiply two doubleword integers with doubleword result.
330 Return nonzero if the operation overflows, assuming it's signed.
331 Each argument is given as two `HOST_WIDE_INT' pieces.
332 One argument is L1 and H1; the other, L2 and H2.
333 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
336 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
337 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
338 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
340 HOST_WIDE_INT arg1[4];
341 HOST_WIDE_INT arg2[4];
342 HOST_WIDE_INT prod[4 * 2];
343 unsigned HOST_WIDE_INT carry;
345 unsigned HOST_WIDE_INT toplow, neglow;
346 HOST_WIDE_INT tophigh, neghigh;
348 encode (arg1, l1, h1);
349 encode (arg2, l2, h2);
351 memset (prod, 0, sizeof prod);
353 for (i = 0; i < 4; i++)
356 for (j = 0; j < 4; j++)
359 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
360 carry += arg1[i] * arg2[j];
361 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
363 prod[k] = LOWPART (carry);
364 carry = HIGHPART (carry);
369 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
371 /* Check for overflow by calculating the top half of the answer in full;
372 it should agree with the low half's sign bit. */
373 decode (prod + 4, &toplow, &tophigh);
376 neg_double (l2, h2, &neglow, &neghigh);
377 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 neg_double (l1, h1, &neglow, &neghigh);
382 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
387 /* Shift the doubleword integer in L1, H1 left by COUNT places
388 keeping only PREC bits of result.
389 Shift right if COUNT is negative.
390 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
391 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
394 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
395 HOST_WIDE_INT count, unsigned int prec,
396 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
398 unsigned HOST_WIDE_INT signmask;
402 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 if (SHIFT_COUNT_TRUNCATED)
409 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
411 /* Shifting by the host word size is undefined according to the
412 ANSI standard, so we must handle this as a special case. */
416 else if (count >= HOST_BITS_PER_WIDE_INT)
418 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
424 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 /* Sign extend all bits that are beyond the precision. */
430 signmask = -((prec > HOST_BITS_PER_WIDE_INT
431 ? ((unsigned HOST_WIDE_INT) *hv
432 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
433 : (*lv >> (prec - 1))) & 1);
435 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
437 else if (prec >= HOST_BITS_PER_WIDE_INT)
439 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
440 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
445 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
446 *lv |= signmask << prec;
450 /* Shift the doubleword integer in L1, H1 right by COUNT places
451 keeping only PREC bits of result. COUNT must be positive.
452 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
453 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
456 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
457 HOST_WIDE_INT count, unsigned int prec,
458 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 unsigned HOST_WIDE_INT signmask;
464 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 if (SHIFT_COUNT_TRUNCATED)
470 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
472 /* Shifting by the host word size is undefined according to the
473 ANSI standard, so we must handle this as a special case. */
477 else if (count >= HOST_BITS_PER_WIDE_INT)
480 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
486 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
489 /* Zero / sign extend all bits that are beyond the precision. */
491 if (count >= (HOST_WIDE_INT)prec)
496 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
498 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
500 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
501 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
506 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
507 *lv |= signmask << (prec - count);
511 /* Rotate the doubleword integer in L1, H1 left by COUNT places
512 keeping only PREC bits of result.
513 Rotate right if COUNT is negative.
514 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
517 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
518 HOST_WIDE_INT count, unsigned int prec,
519 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
521 unsigned HOST_WIDE_INT s1l, s2l;
522 HOST_WIDE_INT s1h, s2h;
528 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
529 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result. COUNT must be positive.
536 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
540 HOST_WIDE_INT count, unsigned int prec,
541 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
543 unsigned HOST_WIDE_INT s1l, s2l;
544 HOST_WIDE_INT s1h, s2h;
550 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
551 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
557 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
558 CODE is a tree code for a kind of division, one of
559 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
561 It controls how the quotient is rounded to an integer.
562 Return nonzero if the operation overflows.
563 UNS nonzero says do unsigned division. */
566 div_and_round_double (enum tree_code code, int uns,
567 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
568 HOST_WIDE_INT hnum_orig,
569 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
570 HOST_WIDE_INT hden_orig,
571 unsigned HOST_WIDE_INT *lquo,
572 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
577 HOST_WIDE_INT den[4], quo[4];
579 unsigned HOST_WIDE_INT work;
580 unsigned HOST_WIDE_INT carry = 0;
581 unsigned HOST_WIDE_INT lnum = lnum_orig;
582 HOST_WIDE_INT hnum = hnum_orig;
583 unsigned HOST_WIDE_INT lden = lden_orig;
584 HOST_WIDE_INT hden = hden_orig;
587 if (hden == 0 && lden == 0)
588 overflow = 1, lden = 1;
590 /* Calculate quotient sign and convert operands to unsigned. */
596 /* (minimum integer) / (-1) is the only overflow case. */
597 if (neg_double (lnum, hnum, &lnum, &hnum)
598 && ((HOST_WIDE_INT) lden & hden) == -1)
604 neg_double (lden, hden, &lden, &hden);
608 if (hnum == 0 && hden == 0)
609 { /* single precision */
611 /* This unsigned division rounds toward zero. */
617 { /* trivial case: dividend < divisor */
618 /* hden != 0 already checked. */
625 memset (quo, 0, sizeof quo);
627 memset (num, 0, sizeof num); /* to zero 9th element */
628 memset (den, 0, sizeof den);
630 encode (num, lnum, hnum);
631 encode (den, lden, hden);
633 /* Special code for when the divisor < BASE. */
634 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 /* hnum != 0 already checked. */
637 for (i = 4 - 1; i >= 0; i--)
639 work = num[i] + carry * BASE;
640 quo[i] = work / lden;
646 /* Full double precision division,
647 with thanks to Don Knuth's "Seminumerical Algorithms". */
648 int num_hi_sig, den_hi_sig;
649 unsigned HOST_WIDE_INT quo_est, scale;
651 /* Find the highest nonzero divisor digit. */
652 for (i = 4 - 1;; i--)
659 /* Insure that the first digit of the divisor is at least BASE/2.
660 This is required by the quotient digit estimation algorithm. */
662 scale = BASE / (den[den_hi_sig] + 1);
664 { /* scale divisor and dividend */
666 for (i = 0; i <= 4 - 1; i++)
668 work = (num[i] * scale) + carry;
669 num[i] = LOWPART (work);
670 carry = HIGHPART (work);
675 for (i = 0; i <= 4 - 1; i++)
677 work = (den[i] * scale) + carry;
678 den[i] = LOWPART (work);
679 carry = HIGHPART (work);
680 if (den[i] != 0) den_hi_sig = i;
687 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 /* Guess the next quotient digit, quo_est, by dividing the first
690 two remaining dividend digits by the high order quotient digit.
691 quo_est is never low and is at most 2 high. */
692 unsigned HOST_WIDE_INT tmp;
694 num_hi_sig = i + den_hi_sig + 1;
695 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
696 if (num[num_hi_sig] != den[den_hi_sig])
697 quo_est = work / den[den_hi_sig];
701 /* Refine quo_est so it's usually correct, and at most one high. */
702 tmp = work - quo_est * den[den_hi_sig];
704 && (den[den_hi_sig - 1] * quo_est
705 > (tmp * BASE + num[num_hi_sig - 2])))
708 /* Try QUO_EST as the quotient digit, by multiplying the
709 divisor by QUO_EST and subtracting from the remaining dividend.
710 Keep in mind that QUO_EST is the I - 1st digit. */
713 for (j = 0; j <= den_hi_sig; j++)
715 work = quo_est * den[j] + carry;
716 carry = HIGHPART (work);
717 work = num[i + j] - LOWPART (work);
718 num[i + j] = LOWPART (work);
719 carry += HIGHPART (work) != 0;
722 /* If quo_est was high by one, then num[i] went negative and
723 we need to correct things. */
724 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
727 carry = 0; /* add divisor back in */
728 for (j = 0; j <= den_hi_sig; j++)
730 work = num[i + j] + den[j] + carry;
731 carry = HIGHPART (work);
732 num[i + j] = LOWPART (work);
735 num [num_hi_sig] += carry;
738 /* Store the quotient digit. */
743 decode (quo, lquo, hquo);
746 /* If result is negative, make it so. */
748 neg_double (*lquo, *hquo, lquo, hquo);
750 /* Compute trial remainder: rem = num - (quo * den) */
751 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
752 neg_double (*lrem, *hrem, lrem, hrem);
753 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 case TRUNC_MOD_EXPR: /* round toward zero */
759 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 case FLOOR_MOD_EXPR: /* round toward negative infinity */
764 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
775 case CEIL_MOD_EXPR: /* round toward positive infinity */
776 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
786 case ROUND_MOD_EXPR: /* round to closest integer */
788 unsigned HOST_WIDE_INT labs_rem = *lrem;
789 HOST_WIDE_INT habs_rem = *hrem;
790 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
791 HOST_WIDE_INT habs_den = hden, htwice;
793 /* Get absolute values. */
795 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 neg_double (lden, hden, &labs_den, &habs_den);
799 /* If (2 * abs (lrem) >= abs (lden)) */
800 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
801 labs_rem, habs_rem, <wice, &htwice);
803 if (((unsigned HOST_WIDE_INT) habs_den
804 < (unsigned HOST_WIDE_INT) htwice)
805 || (((unsigned HOST_WIDE_INT) habs_den
806 == (unsigned HOST_WIDE_INT) htwice)
807 && (labs_den < ltwice)))
811 add_double (*lquo, *hquo,
812 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827 /* Compute true remainder: rem = num - (quo * den) */
828 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
829 neg_double (*lrem, *hrem, lrem, hrem);
830 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 /* If ARG2 divides ARG1 with zero remainder, carries out the division
835 of type CODE and returns the quotient.
836 Otherwise returns NULL_TREE. */
839 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
841 unsigned HOST_WIDE_INT int1l, int2l;
842 HOST_WIDE_INT int1h, int2h;
843 unsigned HOST_WIDE_INT quol, reml;
844 HOST_WIDE_INT quoh, remh;
845 tree type = TREE_TYPE (arg1);
846 int uns = TYPE_UNSIGNED (type);
848 int1l = TREE_INT_CST_LOW (arg1);
849 int1h = TREE_INT_CST_HIGH (arg1);
850 int2l = TREE_INT_CST_LOW (arg2);
851 int2h = TREE_INT_CST_HIGH (arg2);
853 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
854 &quol, &quoh, &reml, &remh);
855 if (remh != 0 || reml != 0)
858 return build_int_cst_wide (type, quol, quoh);
861 /* Return true if built-in mathematical function specified by CODE
862 preserves the sign of it argument, i.e. -f(x) == f(-x). */
865 negate_mathfn_p (enum built_in_function code)
889 /* Check whether we may negate an integer constant T without causing
893 may_negate_without_overflow_p (tree t)
895 unsigned HOST_WIDE_INT val;
899 gcc_assert (TREE_CODE (t) == INTEGER_CST);
901 type = TREE_TYPE (t);
902 if (TYPE_UNSIGNED (type))
905 prec = TYPE_PRECISION (type);
906 if (prec > HOST_BITS_PER_WIDE_INT)
908 if (TREE_INT_CST_LOW (t) != 0)
910 prec -= HOST_BITS_PER_WIDE_INT;
911 val = TREE_INT_CST_HIGH (t);
914 val = TREE_INT_CST_LOW (t);
915 if (prec < HOST_BITS_PER_WIDE_INT)
916 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
917 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
920 /* Determine whether an expression T can be cheaply negated using
921 the function negate_expr. */
924 negate_expr_p (tree t)
931 type = TREE_TYPE (t);
934 switch (TREE_CODE (t))
937 if (TYPE_UNSIGNED (type) || ! flag_trapv)
940 /* Check that -CST will not overflow type. */
941 return may_negate_without_overflow_p (t);
948 return negate_expr_p (TREE_REALPART (t))
949 && negate_expr_p (TREE_IMAGPART (t));
952 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
954 /* -(A + B) -> (-B) - A. */
955 if (negate_expr_p (TREE_OPERAND (t, 1))
956 && reorder_operands_p (TREE_OPERAND (t, 0),
957 TREE_OPERAND (t, 1)))
959 /* -(A + B) -> (-A) - B. */
960 return negate_expr_p (TREE_OPERAND (t, 0));
963 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
964 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1));
969 if (TYPE_UNSIGNED (TREE_TYPE (t)))
975 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
976 return negate_expr_p (TREE_OPERAND (t, 1))
977 || negate_expr_p (TREE_OPERAND (t, 0));
981 /* Negate -((double)float) as (double)(-float). */
982 if (TREE_CODE (type) == REAL_TYPE)
984 tree tem = strip_float_extensions (t);
986 return negate_expr_p (tem);
991 /* Negate -f(x) as f(-x). */
992 if (negate_mathfn_p (builtin_mathfn_code (t)))
993 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
997 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
998 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1000 tree op1 = TREE_OPERAND (t, 1);
1001 if (TREE_INT_CST_HIGH (op1) == 0
1002 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1003 == TREE_INT_CST_LOW (op1))
1014 /* Given T, an expression, return the negation of T. Allow for T to be
1015 null, in which case return null. */
1018 negate_expr (tree t)
1026 type = TREE_TYPE (t);
1027 STRIP_SIGN_NOPS (t);
1029 switch (TREE_CODE (t))
1032 tem = fold_negate_const (t, type);
1033 if (! TREE_OVERFLOW (tem)
1034 || TYPE_UNSIGNED (type)
1040 tem = fold_negate_const (t, type);
1041 /* Two's complement FP formats, such as c4x, may overflow. */
1042 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1043 return fold_convert (type, tem);
1048 tree rpart = negate_expr (TREE_REALPART (t));
1049 tree ipart = negate_expr (TREE_IMAGPART (t));
1051 if ((TREE_CODE (rpart) == REAL_CST
1052 && TREE_CODE (ipart) == REAL_CST)
1053 || (TREE_CODE (rpart) == INTEGER_CST
1054 && TREE_CODE (ipart) == INTEGER_CST))
1055 return build_complex (type, rpart, ipart);
1060 return fold_convert (type, TREE_OPERAND (t, 0));
1063 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 /* -(A + B) -> (-B) - A. */
1066 if (negate_expr_p (TREE_OPERAND (t, 1))
1067 && reorder_operands_p (TREE_OPERAND (t, 0),
1068 TREE_OPERAND (t, 1)))
1070 tem = negate_expr (TREE_OPERAND (t, 1));
1071 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1072 tem, TREE_OPERAND (t, 0));
1073 return fold_convert (type, tem);
1076 /* -(A + B) -> (-A) - B. */
1077 if (negate_expr_p (TREE_OPERAND (t, 0)))
1079 tem = negate_expr (TREE_OPERAND (t, 0));
1080 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1081 tem, TREE_OPERAND (t, 1));
1082 return fold_convert (type, tem);
1088 /* - (A - B) -> B - A */
1089 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1090 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1091 return fold_convert (type,
1092 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1093 TREE_OPERAND (t, 1),
1094 TREE_OPERAND (t, 0)));
1098 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1104 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1106 tem = TREE_OPERAND (t, 1);
1107 if (negate_expr_p (tem))
1108 return fold_convert (type,
1109 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1110 TREE_OPERAND (t, 0),
1111 negate_expr (tem)));
1112 tem = TREE_OPERAND (t, 0);
1113 if (negate_expr_p (tem))
1114 return fold_convert (type,
1115 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1117 TREE_OPERAND (t, 1)));
1122 /* Convert -((double)float) into (double)(-float). */
1123 if (TREE_CODE (type) == REAL_TYPE)
1125 tem = strip_float_extensions (t);
1126 if (tem != t && negate_expr_p (tem))
1127 return fold_convert (type, negate_expr (tem));
1132 /* Negate -f(x) as f(-x). */
1133 if (negate_mathfn_p (builtin_mathfn_code (t))
1134 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1136 tree fndecl, arg, arglist;
1138 fndecl = get_callee_fndecl (t);
1139 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1140 arglist = build_tree_list (NULL_TREE, arg);
1141 return build_function_call_expr (fndecl, arglist);
1146 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1147 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1149 tree op1 = TREE_OPERAND (t, 1);
1150 if (TREE_INT_CST_HIGH (op1) == 0
1151 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1152 == TREE_INT_CST_LOW (op1))
1154 tree ntype = TYPE_UNSIGNED (type)
1155 ? lang_hooks.types.signed_type (type)
1156 : lang_hooks.types.unsigned_type (type);
1157 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1158 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1159 return fold_convert (type, temp);
1168 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1169 return fold_convert (type, tem);
1172 /* Split a tree IN into a constant, literal and variable parts that could be
1173 combined with CODE to make IN. "constant" means an expression with
1174 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1175 commutative arithmetic operation. Store the constant part into *CONP,
1176 the literal in *LITP and return the variable part. If a part isn't
1177 present, set it to null. If the tree does not decompose in this way,
1178 return the entire tree as the variable part and the other parts as null.
1180 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1181 case, we negate an operand that was subtracted. Except if it is a
1182 literal for which we use *MINUS_LITP instead.
1184 If NEGATE_P is true, we are negating all of IN, again except a literal
1185 for which we use *MINUS_LITP instead.
1187 If IN is itself a literal or constant, return it as appropriate.
1189 Note that we do not guarantee that any of the three values will be the
1190 same type as IN, but they will have the same signedness and mode. */
1193 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1194 tree *minus_litp, int negate_p)
1202 /* Strip any conversions that don't change the machine mode or signedness. */
1203 STRIP_SIGN_NOPS (in);
1205 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1207 else if (TREE_CODE (in) == code
1208 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1209 /* We can associate addition and subtraction together (even
1210 though the C standard doesn't say so) for integers because
1211 the value is not affected. For reals, the value might be
1212 affected, so we can't. */
1213 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1214 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1216 tree op0 = TREE_OPERAND (in, 0);
1217 tree op1 = TREE_OPERAND (in, 1);
1218 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1219 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1221 /* First see if either of the operands is a literal, then a constant. */
1222 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1223 *litp = op0, op0 = 0;
1224 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1225 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1227 if (op0 != 0 && TREE_CONSTANT (op0))
1228 *conp = op0, op0 = 0;
1229 else if (op1 != 0 && TREE_CONSTANT (op1))
1230 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1232 /* If we haven't dealt with either operand, this is not a case we can
1233 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1234 if (op0 != 0 && op1 != 0)
1239 var = op1, neg_var_p = neg1_p;
1241 /* Now do any needed negations. */
1243 *minus_litp = *litp, *litp = 0;
1245 *conp = negate_expr (*conp);
1247 var = negate_expr (var);
1249 else if (TREE_CONSTANT (in))
1257 *minus_litp = *litp, *litp = 0;
1258 else if (*minus_litp)
1259 *litp = *minus_litp, *minus_litp = 0;
1260 *conp = negate_expr (*conp);
1261 var = negate_expr (var);
1267 /* Re-associate trees split by the above function. T1 and T2 are either
1268 expressions to associate or null. Return the new expression, if any. If
1269 we build an operation, do it in TYPE and with CODE. */
1272 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1279 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1280 try to fold this since we will have infinite recursion. But do
1281 deal with any NEGATE_EXPRs. */
1282 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1283 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1285 if (code == PLUS_EXPR)
1287 if (TREE_CODE (t1) == NEGATE_EXPR)
1288 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1289 fold_convert (type, TREE_OPERAND (t1, 0)));
1290 else if (TREE_CODE (t2) == NEGATE_EXPR)
1291 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1292 fold_convert (type, TREE_OPERAND (t2, 0)));
1293 else if (integer_zerop (t2))
1294 return fold_convert (type, t1);
1296 else if (code == MINUS_EXPR)
1298 if (integer_zerop (t2))
1299 return fold_convert (type, t1);
1302 return build2 (code, type, fold_convert (type, t1),
1303 fold_convert (type, t2));
1306 return fold_build2 (code, type, fold_convert (type, t1),
1307 fold_convert (type, t2));
1310 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1311 to produce a new constant.
1313 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1316 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1318 unsigned HOST_WIDE_INT int1l, int2l;
1319 HOST_WIDE_INT int1h, int2h;
1320 unsigned HOST_WIDE_INT low;
1322 unsigned HOST_WIDE_INT garbagel;
1323 HOST_WIDE_INT garbageh;
1325 tree type = TREE_TYPE (arg1);
1326 int uns = TYPE_UNSIGNED (type);
1328 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1331 int1l = TREE_INT_CST_LOW (arg1);
1332 int1h = TREE_INT_CST_HIGH (arg1);
1333 int2l = TREE_INT_CST_LOW (arg2);
1334 int2h = TREE_INT_CST_HIGH (arg2);
1339 low = int1l | int2l, hi = int1h | int2h;
1343 low = int1l ^ int2l, hi = int1h ^ int2h;
1347 low = int1l & int2l, hi = int1h & int2h;
1353 /* It's unclear from the C standard whether shifts can overflow.
1354 The following code ignores overflow; perhaps a C standard
1355 interpretation ruling is needed. */
1356 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1368 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1372 neg_double (int2l, int2h, &low, &hi);
1373 add_double (int1l, int1h, low, hi, &low, &hi);
1374 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1378 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1381 case TRUNC_DIV_EXPR:
1382 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1383 case EXACT_DIV_EXPR:
1384 /* This is a shortcut for a common special case. */
1385 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1386 && ! TREE_CONSTANT_OVERFLOW (arg1)
1387 && ! TREE_CONSTANT_OVERFLOW (arg2)
1388 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1390 if (code == CEIL_DIV_EXPR)
1393 low = int1l / int2l, hi = 0;
1397 /* ... fall through ... */
1399 case ROUND_DIV_EXPR:
1400 if (int2h == 0 && int2l == 1)
1402 low = int1l, hi = int1h;
1405 if (int1l == int2l && int1h == int2h
1406 && ! (int1l == 0 && int1h == 0))
1411 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1412 &low, &hi, &garbagel, &garbageh);
1415 case TRUNC_MOD_EXPR:
1416 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1417 /* This is a shortcut for a common special case. */
1418 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1419 && ! TREE_CONSTANT_OVERFLOW (arg1)
1420 && ! TREE_CONSTANT_OVERFLOW (arg2)
1421 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1423 if (code == CEIL_MOD_EXPR)
1425 low = int1l % int2l, hi = 0;
1429 /* ... fall through ... */
1431 case ROUND_MOD_EXPR:
1432 overflow = div_and_round_double (code, uns,
1433 int1l, int1h, int2l, int2h,
1434 &garbagel, &garbageh, &low, &hi);
1440 low = (((unsigned HOST_WIDE_INT) int1h
1441 < (unsigned HOST_WIDE_INT) int2h)
1442 || (((unsigned HOST_WIDE_INT) int1h
1443 == (unsigned HOST_WIDE_INT) int2h)
1446 low = (int1h < int2h
1447 || (int1h == int2h && int1l < int2l));
1449 if (low == (code == MIN_EXPR))
1450 low = int1l, hi = int1h;
1452 low = int2l, hi = int2h;
1459 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1463 /* Propagate overflow flags ourselves. */
1464 if (((!uns || is_sizetype) && overflow)
1465 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1468 TREE_OVERFLOW (t) = 1;
1469 TREE_CONSTANT_OVERFLOW (t) = 1;
1471 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1474 TREE_CONSTANT_OVERFLOW (t) = 1;
1478 t = force_fit_type (t, 1,
1479 ((!uns || is_sizetype) && overflow)
1480 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1481 TREE_CONSTANT_OVERFLOW (arg1)
1482 | TREE_CONSTANT_OVERFLOW (arg2));
1487 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1488 constant. We assume ARG1 and ARG2 have the same data type, or at least
1489 are the same kind of constant and the same machine mode.
1491 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1494 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 return int_const_binop (code, arg1, arg2, notrunc);
1502 if (TREE_CODE (arg1) == REAL_CST)
1504 enum machine_mode mode;
1507 REAL_VALUE_TYPE value;
1508 REAL_VALUE_TYPE result;
1512 d1 = TREE_REAL_CST (arg1);
1513 d2 = TREE_REAL_CST (arg2);
1515 type = TREE_TYPE (arg1);
1516 mode = TYPE_MODE (type);
1518 /* Don't perform operation if we honor signaling NaNs and
1519 either operand is a NaN. */
1520 if (HONOR_SNANS (mode)
1521 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1524 /* Don't perform operation if it would raise a division
1525 by zero exception. */
1526 if (code == RDIV_EXPR
1527 && REAL_VALUES_EQUAL (d2, dconst0)
1528 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1531 /* If either operand is a NaN, just return it. Otherwise, set up
1532 for floating-point trap; we return an overflow. */
1533 if (REAL_VALUE_ISNAN (d1))
1535 else if (REAL_VALUE_ISNAN (d2))
1538 inexact = real_arithmetic (&value, code, &d1, &d2);
1539 real_convert (&result, mode, &value);
1541 /* Don't constant fold this floating point operation if the
1542 result may dependent upon the run-time rounding mode and
1543 flag_rounding_math is set, or if GCC's software emulation
1544 is unable to accurately represent the result. */
1546 if ((flag_rounding_math
1547 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1548 && !flag_unsafe_math_optimizations))
1549 && (inexact || !real_identical (&result, &value)))
1552 t = build_real (type, result);
1554 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1555 TREE_CONSTANT_OVERFLOW (t)
1557 | TREE_CONSTANT_OVERFLOW (arg1)
1558 | TREE_CONSTANT_OVERFLOW (arg2);
1561 if (TREE_CODE (arg1) == COMPLEX_CST)
1563 tree type = TREE_TYPE (arg1);
1564 tree r1 = TREE_REALPART (arg1);
1565 tree i1 = TREE_IMAGPART (arg1);
1566 tree r2 = TREE_REALPART (arg2);
1567 tree i2 = TREE_IMAGPART (arg2);
1573 t = build_complex (type,
1574 const_binop (PLUS_EXPR, r1, r2, notrunc),
1575 const_binop (PLUS_EXPR, i1, i2, notrunc));
1579 t = build_complex (type,
1580 const_binop (MINUS_EXPR, r1, r2, notrunc),
1581 const_binop (MINUS_EXPR, i1, i2, notrunc));
1585 t = build_complex (type,
1586 const_binop (MINUS_EXPR,
1587 const_binop (MULT_EXPR,
1589 const_binop (MULT_EXPR,
1592 const_binop (PLUS_EXPR,
1593 const_binop (MULT_EXPR,
1595 const_binop (MULT_EXPR,
1602 tree t1, t2, real, imag;
1604 = const_binop (PLUS_EXPR,
1605 const_binop (MULT_EXPR, r2, r2, notrunc),
1606 const_binop (MULT_EXPR, i2, i2, notrunc),
1609 t1 = const_binop (PLUS_EXPR,
1610 const_binop (MULT_EXPR, r1, r2, notrunc),
1611 const_binop (MULT_EXPR, i1, i2, notrunc),
1613 t2 = const_binop (MINUS_EXPR,
1614 const_binop (MULT_EXPR, i1, r2, notrunc),
1615 const_binop (MULT_EXPR, r1, i2, notrunc),
1618 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1620 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1621 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1625 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1626 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1631 t = build_complex (type, real, imag);
1643 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1644 indicates which particular sizetype to create. */
1647 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1649 return build_int_cst (sizetype_tab[(int) kind], number);
1652 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1653 is a tree code. The type of the result is taken from the operands.
1654 Both must be the same type integer type and it must be a size type.
1655 If the operands are constant, so is the result. */
1658 size_binop (enum tree_code code, tree arg0, tree arg1)
1660 tree type = TREE_TYPE (arg0);
1662 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1663 && type == TREE_TYPE (arg1));
1665 /* Handle the special case of two integer constants faster. */
1666 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 /* And some specific cases even faster than that. */
1669 if (code == PLUS_EXPR && integer_zerop (arg0))
1671 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1672 && integer_zerop (arg1))
1674 else if (code == MULT_EXPR && integer_onep (arg0))
1677 /* Handle general case of two integer constants. */
1678 return int_const_binop (code, arg0, arg1, 0);
1681 if (arg0 == error_mark_node || arg1 == error_mark_node)
1682 return error_mark_node;
1684 return fold_build2 (code, type, arg0, arg1);
1687 /* Given two values, either both of sizetype or both of bitsizetype,
1688 compute the difference between the two values. Return the value
1689 in signed type corresponding to the type of the operands. */
1692 size_diffop (tree arg0, tree arg1)
1694 tree type = TREE_TYPE (arg0);
1697 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1698 && type == TREE_TYPE (arg1));
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type))
1702 return size_binop (MINUS_EXPR, arg0, arg1);
1704 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1711 fold_convert (ctype, arg1));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0, arg1))
1718 return fold_convert (ctype, integer_zero_node);
1719 else if (tree_int_cst_lt (arg1, arg0))
1720 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1723 fold_convert (ctype, size_binop (MINUS_EXPR,
1727 /* A subroutine of fold_convert_const handling conversions of an
1728 INTEGER_CST to another integer type. */
1731 fold_convert_const_int_from_int (tree type, tree arg1)
1735 /* Given an integer constant, make new constant with new type,
1736 appropriately sign-extended or truncated. */
1737 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1738 TREE_INT_CST_HIGH (arg1));
1740 t = force_fit_type (t,
1741 /* Don't set the overflow when
1742 converting a pointer */
1743 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1744 (TREE_INT_CST_HIGH (arg1) < 0
1745 && (TYPE_UNSIGNED (type)
1746 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1747 | TREE_OVERFLOW (arg1),
1748 TREE_CONSTANT_OVERFLOW (arg1));
1753 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1754 to an integer type. */
1757 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1762 /* The following code implements the floating point to integer
1763 conversion rules required by the Java Language Specification,
1764 that IEEE NaNs are mapped to zero and values that overflow
1765 the target precision saturate, i.e. values greater than
1766 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1767 are mapped to INT_MIN. These semantics are allowed by the
1768 C and C++ standards that simply state that the behavior of
1769 FP-to-integer conversion is unspecified upon overflow. */
1771 HOST_WIDE_INT high, low;
1773 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1777 case FIX_TRUNC_EXPR:
1778 real_trunc (&r, VOIDmode, &x);
1782 real_ceil (&r, VOIDmode, &x);
1785 case FIX_FLOOR_EXPR:
1786 real_floor (&r, VOIDmode, &x);
1789 case FIX_ROUND_EXPR:
1790 real_round (&r, VOIDmode, &x);
1797 /* If R is NaN, return zero and show we have an overflow. */
1798 if (REAL_VALUE_ISNAN (r))
1805 /* See if R is less than the lower bound or greater than the
1810 tree lt = TYPE_MIN_VALUE (type);
1811 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1812 if (REAL_VALUES_LESS (r, l))
1815 high = TREE_INT_CST_HIGH (lt);
1816 low = TREE_INT_CST_LOW (lt);
1822 tree ut = TYPE_MAX_VALUE (type);
1825 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1826 if (REAL_VALUES_LESS (u, r))
1829 high = TREE_INT_CST_HIGH (ut);
1830 low = TREE_INT_CST_LOW (ut);
1836 REAL_VALUE_TO_INT (&low, &high, r);
1838 t = build_int_cst_wide (type, low, high);
1840 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1841 TREE_CONSTANT_OVERFLOW (arg1));
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to another floating point type. */
1849 fold_convert_const_real_from_real (tree type, tree arg1)
1851 REAL_VALUE_TYPE value;
1854 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1855 t = build_real (type, value);
1857 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1863 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1864 type TYPE. If no simplification can be done return NULL_TREE. */
1867 fold_convert_const (enum tree_code code, tree type, tree arg1)
1869 if (TREE_TYPE (arg1) == type)
1872 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1874 if (TREE_CODE (arg1) == INTEGER_CST)
1875 return fold_convert_const_int_from_int (type, arg1);
1876 else if (TREE_CODE (arg1) == REAL_CST)
1877 return fold_convert_const_int_from_real (code, type, arg1);
1879 else if (TREE_CODE (type) == REAL_TYPE)
1881 if (TREE_CODE (arg1) == INTEGER_CST)
1882 return build_real_from_int_cst (type, arg1);
1883 if (TREE_CODE (arg1) == REAL_CST)
1884 return fold_convert_const_real_from_real (type, arg1);
1889 /* Construct a vector of zero elements of vector type TYPE. */
1892 build_zero_vector (tree type)
1897 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1898 units = TYPE_VECTOR_SUBPARTS (type);
1901 for (i = 0; i < units; i++)
1902 list = tree_cons (NULL_TREE, elem, list);
1903 return build_vector (type, list);
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1910 fold_convert (tree type, tree arg)
1912 tree orig = TREE_TYPE (arg);
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1923 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1924 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1925 TYPE_MAIN_VARIANT (orig)))
1926 return fold_build1 (NOP_EXPR, type, arg);
1928 switch (TREE_CODE (type))
1930 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1933 if (TREE_CODE (arg) == INTEGER_CST)
1935 tem = fold_convert_const (NOP_EXPR, type, arg);
1936 if (tem != NULL_TREE)
1939 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1940 || TREE_CODE (orig) == OFFSET_TYPE)
1941 return fold_build1 (NOP_EXPR, type, arg);
1942 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1945 return fold_convert (type, tem);
1947 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1948 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1949 return fold_build1 (NOP_EXPR, type, arg);
1952 if (TREE_CODE (arg) == INTEGER_CST)
1954 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1958 else if (TREE_CODE (arg) == REAL_CST)
1960 tem = fold_convert_const (NOP_EXPR, type, arg);
1961 if (tem != NULL_TREE)
1965 switch (TREE_CODE (orig))
1967 case INTEGER_TYPE: case CHAR_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 return fold_build1 (FLOAT_EXPR, type, arg);
1973 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1977 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1978 return fold_convert (type, tem);
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE: case CHAR_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return build2 (COMPLEX_EXPR, type,
1992 fold_convert (TREE_TYPE (type), arg),
1993 fold_convert (TREE_TYPE (type), integer_zero_node));
1998 if (TREE_CODE (arg) == COMPLEX_EXPR)
2000 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2001 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2002 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2005 arg = save_expr (arg);
2006 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2007 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2008 rpart = fold_convert (TREE_TYPE (type), rpart);
2009 ipart = fold_convert (TREE_TYPE (type), ipart);
2010 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2018 if (integer_zerop (arg))
2019 return build_zero_vector (type);
2020 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2021 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == VECTOR_TYPE);
2023 return fold_build1 (NOP_EXPR, type, arg);
2026 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2033 /* Return false if expr can be assumed not to be an value, true
2037 maybe_lvalue_p (tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2051 case ALIGN_INDIRECT_REF:
2052 case MISALIGNED_INDIRECT_REF:
2054 case ARRAY_RANGE_REF:
2060 case PREINCREMENT_EXPR:
2061 case PREDECREMENT_EXPR:
2063 case TRY_CATCH_EXPR:
2064 case WITH_CLEANUP_EXPR:
2075 /* Assume the worst for front-end tree codes. */
2076 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2084 /* Return an expr equal to X but certainly not valid as an lvalue. */
2089 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2094 if (! maybe_lvalue_p (x))
2096 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2102 int pedantic_lvalues;
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2108 pedantic_non_lvalue (tree x)
2110 if (pedantic_lvalues)
2111 return non_lvalue (x);
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2122 invert_tree_comparison (enum tree_code code, bool honor_nans)
2124 if (honor_nans && flag_trapping_math)
2134 return honor_nans ? UNLE_EXPR : LE_EXPR;
2136 return honor_nans ? UNLT_EXPR : LT_EXPR;
2138 return honor_nans ? UNGE_EXPR : GE_EXPR;
2140 return honor_nans ? UNGT_EXPR : GT_EXPR;
2154 return UNORDERED_EXPR;
2155 case UNORDERED_EXPR:
2156 return ORDERED_EXPR;
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2166 swap_tree_comparison (enum tree_code code)
2173 case UNORDERED_EXPR:
2199 /* Convert a comparison tree code from an enum tree_code representation
2200 into a compcode bit-based encoding. This function is the inverse of
2201 compcode_to_comparison. */
2203 static enum comparison_code
2204 comparison_to_compcode (enum tree_code code)
2221 return COMPCODE_ORD;
2222 case UNORDERED_EXPR:
2223 return COMPCODE_UNORD;
2225 return COMPCODE_UNLT;
2227 return COMPCODE_UNEQ;
2229 return COMPCODE_UNLE;
2231 return COMPCODE_UNGT;
2233 return COMPCODE_LTGT;
2235 return COMPCODE_UNGE;
2241 /* Convert a compcode bit-based encoding of a comparison operator back
2242 to GCC's enum tree_code representation. This function is the
2243 inverse of comparison_to_compcode. */
2245 static enum tree_code
2246 compcode_to_comparison (enum comparison_code code)
2263 return ORDERED_EXPR;
2264 case COMPCODE_UNORD:
2265 return UNORDERED_EXPR;
2283 /* Return a tree for the comparison which is the combination of
2284 doing the AND or OR (depending on CODE) of the two operations LCODE
2285 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2286 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2287 if this makes the transformation invalid. */
2290 combine_comparisons (enum tree_code code, enum tree_code lcode,
2291 enum tree_code rcode, tree truth_type,
2292 tree ll_arg, tree lr_arg)
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2297 enum comparison_code compcode;
2301 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2302 compcode = lcompcode & rcompcode;
2305 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2306 compcode = lcompcode | rcompcode;
2315 /* Eliminate unordered comparisons, as well as LTGT and ORD
2316 which are not used unless the mode has NaNs. */
2317 compcode &= ~COMPCODE_UNORD;
2318 if (compcode == COMPCODE_LTGT)
2319 compcode = COMPCODE_NE;
2320 else if (compcode == COMPCODE_ORD)
2321 compcode = COMPCODE_TRUE;
2323 else if (flag_trapping_math)
2325 /* Check that the original operation and the optimized ones will trap
2326 under the same condition. */
2327 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2328 && (lcompcode != COMPCODE_EQ)
2329 && (lcompcode != COMPCODE_ORD);
2330 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2331 && (rcompcode != COMPCODE_EQ)
2332 && (rcompcode != COMPCODE_ORD);
2333 bool trap = (compcode & COMPCODE_UNORD) == 0
2334 && (compcode != COMPCODE_EQ)
2335 && (compcode != COMPCODE_ORD);
2337 /* In a short-circuited boolean expression the LHS might be
2338 such that the RHS, if evaluated, will never trap. For
2339 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2340 if neither x nor y is NaN. (This is a mixed blessing: for
2341 example, the expression above will never trap, hence
2342 optimizing it to x < y would be invalid). */
2343 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2344 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2347 /* If the comparison was short-circuited, and only the RHS
2348 trapped, we may now generate a spurious trap. */
2350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2353 /* If we changed the conditions that cause a trap, we lose. */
2354 if ((ltrap || rtrap) != trap)
2358 if (compcode == COMPCODE_TRUE)
2359 return constant_boolean_node (true, truth_type);
2360 else if (compcode == COMPCODE_FALSE)
2361 return constant_boolean_node (false, truth_type);
2363 return fold_build2 (compcode_to_comparison (compcode),
2364 truth_type, ll_arg, lr_arg);
2367 /* Return nonzero if CODE is a tree code that represents a truth value. */
2370 truth_value_p (enum tree_code code)
2372 return (TREE_CODE_CLASS (code) == tcc_comparison
2373 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2374 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2375 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2378 /* Return nonzero if two operands (typically of the same tree node)
2379 are necessarily equal. If either argument has side-effects this
2380 function returns zero. FLAGS modifies behavior as follows:
2382 If OEP_ONLY_CONST is set, only return nonzero for constants.
2383 This function tests whether the operands are indistinguishable;
2384 it does not test whether they are equal using C's == operation.
2385 The distinction is important for IEEE floating point, because
2386 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2387 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2389 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2390 even though it may hold multiple values during a function.
2391 This is because a GCC tree node guarantees that nothing else is
2392 executed between the evaluation of its "operands" (which may often
2393 be evaluated in arbitrary order). Hence if the operands themselves
2394 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2395 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2396 unset means assuming isochronic (or instantaneous) tree equivalence.
2397 Unless comparing arbitrary expression trees, such as from different
2398 statements, this flag can usually be left unset.
2400 If OEP_PURE_SAME is set, then pure functions with identical arguments
2401 are considered the same. It is used when the caller has other ways
2402 to ensure that global memory is unchanged in between. */
2405 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2407 /* If either is ERROR_MARK, they aren't equal. */
2408 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. */
2414 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2420 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2421 /* This is needed for conversions and for COMPONENT_REF.
2422 Might as well play it safe and always test this. */
2423 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2424 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2425 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2428 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2429 We don't care about side effects in that case because the SAVE_EXPR
2430 takes care of that for us. In all other cases, two expressions are
2431 equal if they have no side effects. If we have two identical
2432 expressions with side effects that should be treated the same due
2433 to the only side effects being identical SAVE_EXPR's, that will
2434 be detected in the recursive calls below. */
2435 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2436 && (TREE_CODE (arg0) == SAVE_EXPR
2437 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2440 /* Next handle constant cases, those for which we can return 1 even
2441 if ONLY_CONST is set. */
2442 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2443 switch (TREE_CODE (arg0))
2446 return (! TREE_CONSTANT_OVERFLOW (arg0)
2447 && ! TREE_CONSTANT_OVERFLOW (arg1)
2448 && tree_int_cst_equal (arg0, arg1));
2451 return (! TREE_CONSTANT_OVERFLOW (arg0)
2452 && ! TREE_CONSTANT_OVERFLOW (arg1)
2453 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2454 TREE_REAL_CST (arg1)));
2460 if (TREE_CONSTANT_OVERFLOW (arg0)
2461 || TREE_CONSTANT_OVERFLOW (arg1))
2464 v1 = TREE_VECTOR_CST_ELTS (arg0);
2465 v2 = TREE_VECTOR_CST_ELTS (arg1);
2468 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2471 v1 = TREE_CHAIN (v1);
2472 v2 = TREE_CHAIN (v2);
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2497 if (flags & OEP_ONLY_CONST)
2500 /* Define macros to test an operand from arg0 and arg1 for equality and a
2501 variant that allows null and views null as being different from any
2502 non-null value. In the latter case, if either is null, the both
2503 must be; otherwise, do the normal comparison. */
2504 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2505 TREE_OPERAND (arg1, N), flags)
2507 #define OP_SAME_WITH_NULL(N) \
2508 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2509 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2511 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 /* Two conversions are equal only if signedness and modes match. */
2515 switch (TREE_CODE (arg0))
2520 case FIX_TRUNC_EXPR:
2521 case FIX_FLOOR_EXPR:
2522 case FIX_ROUND_EXPR:
2523 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2524 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2534 case tcc_comparison:
2536 if (OP_SAME (0) && OP_SAME (1))
2539 /* For commutative ops, allow the other order. */
2540 return (commutative_tree_code (TREE_CODE (arg0))
2541 && operand_equal_p (TREE_OPERAND (arg0, 0),
2542 TREE_OPERAND (arg1, 1), flags)
2543 && operand_equal_p (TREE_OPERAND (arg0, 1),
2544 TREE_OPERAND (arg1, 0), flags));
2547 /* If either of the pointer (or reference) expressions we are
2548 dereferencing contain a side effect, these cannot be equal. */
2549 if (TREE_SIDE_EFFECTS (arg0)
2550 || TREE_SIDE_EFFECTS (arg1))
2553 switch (TREE_CODE (arg0))
2556 case ALIGN_INDIRECT_REF:
2557 case MISALIGNED_INDIRECT_REF:
2563 case ARRAY_RANGE_REF:
2564 /* Operands 2 and 3 may be null. */
2567 && OP_SAME_WITH_NULL (2)
2568 && OP_SAME_WITH_NULL (3));
2571 /* Handle operand 2 the same as for ARRAY_REF. */
2572 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2575 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2581 case tcc_expression:
2582 switch (TREE_CODE (arg0))
2585 case TRUTH_NOT_EXPR:
2588 case TRUTH_ANDIF_EXPR:
2589 case TRUTH_ORIF_EXPR:
2590 return OP_SAME (0) && OP_SAME (1);
2592 case TRUTH_AND_EXPR:
2594 case TRUTH_XOR_EXPR:
2595 if (OP_SAME (0) && OP_SAME (1))
2598 /* Otherwise take into account this is a commutative operation. */
2599 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2600 TREE_OPERAND (arg1, 1), flags)
2601 && operand_equal_p (TREE_OPERAND (arg0, 1),
2602 TREE_OPERAND (arg1, 0), flags));
2605 /* If the CALL_EXPRs call different functions, then they
2606 clearly can not be equal. */
2611 unsigned int cef = call_expr_flags (arg0);
2612 if (flags & OEP_PURE_SAME)
2613 cef &= ECF_CONST | ECF_PURE;
2620 /* Now see if all the arguments are the same. operand_equal_p
2621 does not handle TREE_LIST, so we walk the operands here
2622 feeding them to operand_equal_p. */
2623 arg0 = TREE_OPERAND (arg0, 1);
2624 arg1 = TREE_OPERAND (arg1, 1);
2625 while (arg0 && arg1)
2627 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2631 arg0 = TREE_CHAIN (arg0);
2632 arg1 = TREE_CHAIN (arg1);
2635 /* If we get here and both argument lists are exhausted
2636 then the CALL_EXPRs are equal. */
2637 return ! (arg0 || arg1);
2643 case tcc_declaration:
2644 /* Consider __builtin_sqrt equal to sqrt. */
2645 return (TREE_CODE (arg0) == FUNCTION_DECL
2646 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2647 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2648 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2655 #undef OP_SAME_WITH_NULL
2658 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2659 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2661 When in doubt, return 0. */
2664 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2666 int unsignedp1, unsignedpo;
2667 tree primarg0, primarg1, primother;
2668 unsigned int correct_width;
2670 if (operand_equal_p (arg0, arg1, 0))
2673 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2674 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2677 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2678 and see if the inner values are the same. This removes any
2679 signedness comparison, which doesn't matter here. */
2680 primarg0 = arg0, primarg1 = arg1;
2681 STRIP_NOPS (primarg0);
2682 STRIP_NOPS (primarg1);
2683 if (operand_equal_p (primarg0, primarg1, 0))
2686 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2687 actual comparison operand, ARG0.
2689 First throw away any conversions to wider types
2690 already present in the operands. */
2692 primarg1 = get_narrower (arg1, &unsignedp1);
2693 primother = get_narrower (other, &unsignedpo);
2695 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2696 if (unsignedp1 == unsignedpo
2697 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2698 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2700 tree type = TREE_TYPE (arg0);
2702 /* Make sure shorter operand is extended the right way
2703 to match the longer operand. */
2704 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2705 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2707 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2714 /* See if ARG is an expression that is either a comparison or is performing
2715 arithmetic on comparisons. The comparisons must only be comparing
2716 two different values, which will be stored in *CVAL1 and *CVAL2; if
2717 they are nonzero it means that some operands have already been found.
2718 No variables may be used anywhere else in the expression except in the
2719 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2720 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2722 If this is true, return 1. Otherwise, return zero. */
2725 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2727 enum tree_code code = TREE_CODE (arg);
2728 enum tree_code_class class = TREE_CODE_CLASS (code);
2730 /* We can handle some of the tcc_expression cases here. */
2731 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2733 else if (class == tcc_expression
2734 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2735 || code == COMPOUND_EXPR))
2738 else if (class == tcc_expression && code == SAVE_EXPR
2739 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2741 /* If we've already found a CVAL1 or CVAL2, this expression is
2742 two complex to handle. */
2743 if (*cval1 || *cval2)
2753 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2756 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2757 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2758 cval1, cval2, save_p));
2763 case tcc_expression:
2764 if (code == COND_EXPR)
2765 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2766 cval1, cval2, save_p)
2767 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2768 cval1, cval2, save_p)
2769 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2770 cval1, cval2, save_p));
2773 case tcc_comparison:
2774 /* First see if we can handle the first operand, then the second. For
2775 the second operand, we know *CVAL1 can't be zero. It must be that
2776 one side of the comparison is each of the values; test for the
2777 case where this isn't true by failing if the two operands
2780 if (operand_equal_p (TREE_OPERAND (arg, 0),
2781 TREE_OPERAND (arg, 1), 0))
2785 *cval1 = TREE_OPERAND (arg, 0);
2786 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2788 else if (*cval2 == 0)
2789 *cval2 = TREE_OPERAND (arg, 0);
2790 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2795 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2797 else if (*cval2 == 0)
2798 *cval2 = TREE_OPERAND (arg, 1);
2799 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2811 /* ARG is a tree that is known to contain just arithmetic operations and
2812 comparisons. Evaluate the operations in the tree substituting NEW0 for
2813 any occurrence of OLD0 as an operand of a comparison and likewise for
2817 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2819 tree type = TREE_TYPE (arg);
2820 enum tree_code code = TREE_CODE (arg);
2821 enum tree_code_class class = TREE_CODE_CLASS (code);
2823 /* We can handle some of the tcc_expression cases here. */
2824 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2826 else if (class == tcc_expression
2827 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2833 return fold_build1 (code, type,
2834 eval_subst (TREE_OPERAND (arg, 0),
2835 old0, new0, old1, new1));
2838 return fold_build2 (code, type,
2839 eval_subst (TREE_OPERAND (arg, 0),
2840 old0, new0, old1, new1),
2841 eval_subst (TREE_OPERAND (arg, 1),
2842 old0, new0, old1, new1));
2844 case tcc_expression:
2848 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2851 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2854 return fold_build3 (code, type,
2855 eval_subst (TREE_OPERAND (arg, 0),
2856 old0, new0, old1, new1),
2857 eval_subst (TREE_OPERAND (arg, 1),
2858 old0, new0, old1, new1),
2859 eval_subst (TREE_OPERAND (arg, 2),
2860 old0, new0, old1, new1));
2864 /* Fall through - ??? */
2866 case tcc_comparison:
2868 tree arg0 = TREE_OPERAND (arg, 0);
2869 tree arg1 = TREE_OPERAND (arg, 1);
2871 /* We need to check both for exact equality and tree equality. The
2872 former will be true if the operand has a side-effect. In that
2873 case, we know the operand occurred exactly once. */
2875 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2877 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2880 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2882 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2885 return fold_build2 (code, type, arg0, arg1);
2893 /* Return a tree for the case when the result of an expression is RESULT
2894 converted to TYPE and OMITTED was previously an operand of the expression
2895 but is now not needed (e.g., we folded OMITTED * 0).
2897 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2898 the conversion of RESULT to TYPE. */
2901 omit_one_operand (tree type, tree result, tree omitted)
2903 tree t = fold_convert (type, result);
2905 if (TREE_SIDE_EFFECTS (omitted))
2906 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2908 return non_lvalue (t);
2911 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2914 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2916 tree t = fold_convert (type, result);
2918 if (TREE_SIDE_EFFECTS (omitted))
2919 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2921 return pedantic_non_lvalue (t);
2924 /* Return a tree for the case when the result of an expression is RESULT
2925 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2926 of the expression but are now not needed.
2928 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2929 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2930 evaluated before OMITTED2. Otherwise, if neither has side effects,
2931 just do the conversion of RESULT to TYPE. */
2934 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2936 tree t = fold_convert (type, result);
2938 if (TREE_SIDE_EFFECTS (omitted2))
2939 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2940 if (TREE_SIDE_EFFECTS (omitted1))
2941 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2943 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2947 /* Return a simplified tree node for the truth-negation of ARG. This
2948 never alters ARG itself. We assume that ARG is an operation that
2949 returns a truth value (0 or 1).
2951 FIXME: one would think we would fold the result, but it causes
2952 problems with the dominator optimizer. */
2954 invert_truthvalue (tree arg)
2956 tree type = TREE_TYPE (arg);
2957 enum tree_code code = TREE_CODE (arg);
2959 if (code == ERROR_MARK)
2962 /* If this is a comparison, we can simply invert it, except for
2963 floating-point non-equality comparisons, in which case we just
2964 enclose a TRUTH_NOT_EXPR around what we have. */
2966 if (TREE_CODE_CLASS (code) == tcc_comparison)
2968 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2969 if (FLOAT_TYPE_P (op_type)
2970 && flag_trapping_math
2971 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2972 && code != NE_EXPR && code != EQ_EXPR)
2973 return build1 (TRUTH_NOT_EXPR, type, arg);
2976 code = invert_tree_comparison (code,
2977 HONOR_NANS (TYPE_MODE (op_type)));
2978 if (code == ERROR_MARK)
2979 return build1 (TRUTH_NOT_EXPR, type, arg);
2981 return build2 (code, type,
2982 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2989 return constant_boolean_node (integer_zerop (arg), type);
2991 case TRUTH_AND_EXPR:
2992 return build2 (TRUTH_OR_EXPR, type,
2993 invert_truthvalue (TREE_OPERAND (arg, 0)),
2994 invert_truthvalue (TREE_OPERAND (arg, 1)));
2997 return build2 (TRUTH_AND_EXPR, type,
2998 invert_truthvalue (TREE_OPERAND (arg, 0)),
2999 invert_truthvalue (TREE_OPERAND (arg, 1)));
3001 case TRUTH_XOR_EXPR:
3002 /* Here we can invert either operand. We invert the first operand
3003 unless the second operand is a TRUTH_NOT_EXPR in which case our
3004 result is the XOR of the first operand with the inside of the
3005 negation of the second operand. */
3007 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3008 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3009 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3011 return build2 (TRUTH_XOR_EXPR, type,
3012 invert_truthvalue (TREE_OPERAND (arg, 0)),
3013 TREE_OPERAND (arg, 1));
3015 case TRUTH_ANDIF_EXPR:
3016 return build2 (TRUTH_ORIF_EXPR, type,
3017 invert_truthvalue (TREE_OPERAND (arg, 0)),
3018 invert_truthvalue (TREE_OPERAND (arg, 1)));
3020 case TRUTH_ORIF_EXPR:
3021 return build2 (TRUTH_ANDIF_EXPR, type,
3022 invert_truthvalue (TREE_OPERAND (arg, 0)),
3023 invert_truthvalue (TREE_OPERAND (arg, 1)));
3025 case TRUTH_NOT_EXPR:
3026 return TREE_OPERAND (arg, 0);
3029 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3030 invert_truthvalue (TREE_OPERAND (arg, 1)),
3031 invert_truthvalue (TREE_OPERAND (arg, 2)));
3034 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3035 invert_truthvalue (TREE_OPERAND (arg, 1)));
3037 case NON_LVALUE_EXPR:
3038 return invert_truthvalue (TREE_OPERAND (arg, 0));
3041 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3046 return build1 (TREE_CODE (arg), type,
3047 invert_truthvalue (TREE_OPERAND (arg, 0)));
3050 if (!integer_onep (TREE_OPERAND (arg, 1)))
3052 return build2 (EQ_EXPR, type, arg,
3053 fold_convert (type, integer_zero_node));
3056 return build1 (TRUTH_NOT_EXPR, type, arg);
3058 case CLEANUP_POINT_EXPR:
3059 return build1 (CLEANUP_POINT_EXPR, type,
3060 invert_truthvalue (TREE_OPERAND (arg, 0)));
3065 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3066 return build1 (TRUTH_NOT_EXPR, type, arg);
3069 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3070 operands are another bit-wise operation with a common input. If so,
3071 distribute the bit operations to save an operation and possibly two if
3072 constants are involved. For example, convert
3073 (A | B) & (A | C) into A | (B & C)
3074 Further simplification will occur if B and C are constants.
3076 If this optimization cannot be done, 0 will be returned. */
3079 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3084 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3085 || TREE_CODE (arg0) == code
3086 || (TREE_CODE (arg0) != BIT_AND_EXPR
3087 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3090 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3092 common = TREE_OPERAND (arg0, 0);
3093 left = TREE_OPERAND (arg0, 1);
3094 right = TREE_OPERAND (arg1, 1);
3096 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3098 common = TREE_OPERAND (arg0, 0);
3099 left = TREE_OPERAND (arg0, 1);
3100 right = TREE_OPERAND (arg1, 0);
3102 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3104 common = TREE_OPERAND (arg0, 1);
3105 left = TREE_OPERAND (arg0, 0);
3106 right = TREE_OPERAND (arg1, 1);
3108 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3110 common = TREE_OPERAND (arg0, 1);
3111 left = TREE_OPERAND (arg0, 0);
3112 right = TREE_OPERAND (arg1, 0);
3117 return fold_build2 (TREE_CODE (arg0), type, common,
3118 fold_build2 (code, type, left, right));
3121 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3122 with code CODE. This optimization is unsafe. */
3124 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3126 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3127 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3129 /* (A / C) +- (B / C) -> (A +- B) / C. */
3131 && operand_equal_p (TREE_OPERAND (arg0, 1),
3132 TREE_OPERAND (arg1, 1), 0))
3133 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3134 fold_build2 (code, type,
3135 TREE_OPERAND (arg0, 0),
3136 TREE_OPERAND (arg1, 0)),
3137 TREE_OPERAND (arg0, 1));
3139 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3140 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3141 TREE_OPERAND (arg1, 0), 0)
3142 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3143 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3145 REAL_VALUE_TYPE r0, r1;
3146 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3147 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3149 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3151 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3152 real_arithmetic (&r0, code, &r0, &r1);
3153 return fold_build2 (MULT_EXPR, type,
3154 TREE_OPERAND (arg0, 0),
3155 build_real (type, r0));
3161 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3162 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3165 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3172 tree size = TYPE_SIZE (TREE_TYPE (inner));
3173 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3174 || POINTER_TYPE_P (TREE_TYPE (inner)))
3175 && host_integerp (size, 0)
3176 && tree_low_cst (size, 0) == bitsize)
3177 return fold_convert (type, inner);
3180 result = build3 (BIT_FIELD_REF, type, inner,
3181 size_int (bitsize), bitsize_int (bitpos));
3183 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3188 /* Optimize a bit-field compare.
3190 There are two cases: First is a compare against a constant and the
3191 second is a comparison of two items where the fields are at the same
3192 bit position relative to the start of a chunk (byte, halfword, word)
3193 large enough to contain it. In these cases we can avoid the shift
3194 implicit in bitfield extractions.
3196 For constants, we emit a compare of the shifted constant with the
3197 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3198 compared. For two fields at the same position, we do the ANDs with the
3199 similar mask and compare the result of the ANDs.
3201 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3202 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3203 are the left and right operands of the comparison, respectively.
3205 If the optimization described above can be done, we return the resulting
3206 tree. Otherwise we return zero. */
3209 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3212 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3213 tree type = TREE_TYPE (lhs);
3214 tree signed_type, unsigned_type;
3215 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3216 enum machine_mode lmode, rmode, nmode;
3217 int lunsignedp, runsignedp;
3218 int lvolatilep = 0, rvolatilep = 0;
3219 tree linner, rinner = NULL_TREE;
3223 /* Get all the information about the extractions being done. If the bit size
3224 if the same as the size of the underlying object, we aren't doing an
3225 extraction at all and so can do nothing. We also don't want to
3226 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3227 then will no longer be able to replace it. */
3228 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3229 &lunsignedp, &lvolatilep, false);
3230 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3231 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3236 /* If this is not a constant, we can only do something if bit positions,
3237 sizes, and signedness are the same. */
3238 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3239 &runsignedp, &rvolatilep, false);
3241 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3242 || lunsignedp != runsignedp || offset != 0
3243 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3247 /* See if we can find a mode to refer to this field. We should be able to,
3248 but fail if we can't. */
3249 nmode = get_best_mode (lbitsize, lbitpos,
3250 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3251 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3252 TYPE_ALIGN (TREE_TYPE (rinner))),
3253 word_mode, lvolatilep || rvolatilep);
3254 if (nmode == VOIDmode)
3257 /* Set signed and unsigned types of the precision of this mode for the
3259 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3260 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3262 /* Compute the bit position and size for the new reference and our offset
3263 within it. If the new reference is the same size as the original, we
3264 won't optimize anything, so return zero. */
3265 nbitsize = GET_MODE_BITSIZE (nmode);
3266 nbitpos = lbitpos & ~ (nbitsize - 1);
3268 if (nbitsize == lbitsize)
3271 if (BYTES_BIG_ENDIAN)
3272 lbitpos = nbitsize - lbitsize - lbitpos;
3274 /* Make the mask to be used against the extracted field. */
3275 mask = build_int_cst (unsigned_type, -1);
3276 mask = force_fit_type (mask, 0, false, false);
3277 mask = fold_convert (unsigned_type, mask);
3278 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3279 mask = const_binop (RSHIFT_EXPR, mask,
3280 size_int (nbitsize - lbitsize - lbitpos), 0);
3283 /* If not comparing with constant, just rework the comparison
3285 return build2 (code, compare_type,
3286 build2 (BIT_AND_EXPR, unsigned_type,
3287 make_bit_field_ref (linner, unsigned_type,
3288 nbitsize, nbitpos, 1),
3290 build2 (BIT_AND_EXPR, unsigned_type,
3291 make_bit_field_ref (rinner, unsigned_type,
3292 nbitsize, nbitpos, 1),
3295 /* Otherwise, we are handling the constant case. See if the constant is too
3296 big for the field. Warn and return a tree of for 0 (false) if so. We do
3297 this not only for its own sake, but to avoid having to test for this
3298 error case below. If we didn't, we might generate wrong code.
3300 For unsigned fields, the constant shifted right by the field length should
3301 be all zero. For signed fields, the high-order bits should agree with
3306 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3307 fold_convert (unsigned_type, rhs),
3308 size_int (lbitsize), 0)))
3310 warning (0, "comparison is always %d due to width of bit-field",
3312 return constant_boolean_node (code == NE_EXPR, compare_type);
3317 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3318 size_int (lbitsize - 1), 0);
3319 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3321 warning (0, "comparison is always %d due to width of bit-field",
3323 return constant_boolean_node (code == NE_EXPR, compare_type);
3327 /* Single-bit compares should always be against zero. */
3328 if (lbitsize == 1 && ! integer_zerop (rhs))
3330 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3331 rhs = fold_convert (type, integer_zero_node);
3334 /* Make a new bitfield reference, shift the constant over the
3335 appropriate number of bits and mask it with the computed mask
3336 (in case this was a signed field). If we changed it, make a new one. */
3337 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3340 TREE_SIDE_EFFECTS (lhs) = 1;
3341 TREE_THIS_VOLATILE (lhs) = 1;
3344 rhs = fold (const_binop (BIT_AND_EXPR,
3345 const_binop (LSHIFT_EXPR,
3346 fold_convert (unsigned_type, rhs),
3347 size_int (lbitpos), 0),
3350 return build2 (code, compare_type,
3351 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3355 /* Subroutine for fold_truthop: decode a field reference.
3357 If EXP is a comparison reference, we return the innermost reference.
3359 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3360 set to the starting bit number.
3362 If the innermost field can be completely contained in a mode-sized
3363 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3365 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3366 otherwise it is not changed.
3368 *PUNSIGNEDP is set to the signedness of the field.
3370 *PMASK is set to the mask used. This is either contained in a
3371 BIT_AND_EXPR or derived from the width of the field.
3373 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3375 Return 0 if this is not a component reference or is one that we can't
3376 do anything with. */
3379 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3380 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3381 int *punsignedp, int *pvolatilep,
3382 tree *pmask, tree *pand_mask)
3384 tree outer_type = 0;
3386 tree mask, inner, offset;
3388 unsigned int precision;
3390 /* All the optimizations using this function assume integer fields.
3391 There are problems with FP fields since the type_for_size call
3392 below can fail for, e.g., XFmode. */
3393 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3396 /* We are interested in the bare arrangement of bits, so strip everything
3397 that doesn't affect the machine mode. However, record the type of the
3398 outermost expression if it may matter below. */
3399 if (TREE_CODE (exp) == NOP_EXPR
3400 || TREE_CODE (exp) == CONVERT_EXPR
3401 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3402 outer_type = TREE_TYPE (exp);
3405 if (TREE_CODE (exp) == BIT_AND_EXPR)
3407 and_mask = TREE_OPERAND (exp, 1);
3408 exp = TREE_OPERAND (exp, 0);
3409 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3410 if (TREE_CODE (and_mask) != INTEGER_CST)
3414 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3415 punsignedp, pvolatilep, false);
3416 if ((inner == exp && and_mask == 0)
3417 || *pbitsize < 0 || offset != 0
3418 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3421 /* If the number of bits in the reference is the same as the bitsize of
3422 the outer type, then the outer type gives the signedness. Otherwise
3423 (in case of a small bitfield) the signedness is unchanged. */
3424 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3425 *punsignedp = TYPE_UNSIGNED (outer_type);
3427 /* Compute the mask to access the bitfield. */
3428 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3429 precision = TYPE_PRECISION (unsigned_type);
3431 mask = build_int_cst (unsigned_type, -1);
3432 mask = force_fit_type (mask, 0, false, false);
3434 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3435 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3437 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3439 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3440 fold_convert (unsigned_type, and_mask), mask);
3443 *pand_mask = and_mask;
3447 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3451 all_ones_mask_p (tree mask, int size)
3453 tree type = TREE_TYPE (mask);
3454 unsigned int precision = TYPE_PRECISION (type);
3457 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3458 tmask = force_fit_type (tmask, 0, false, false);
3461 tree_int_cst_equal (mask,
3462 const_binop (RSHIFT_EXPR,
3463 const_binop (LSHIFT_EXPR, tmask,
3464 size_int (precision - size),
3466 size_int (precision - size), 0));
3469 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3470 represents the sign bit of EXP's type. If EXP represents a sign
3471 or zero extension, also test VAL against the unextended type.
3472 The return value is the (sub)expression whose sign bit is VAL,
3473 or NULL_TREE otherwise. */
3476 sign_bit_p (tree exp, tree val)
3478 unsigned HOST_WIDE_INT mask_lo, lo;
3479 HOST_WIDE_INT mask_hi, hi;
3483 /* Tree EXP must have an integral type. */
3484 t = TREE_TYPE (exp);
3485 if (! INTEGRAL_TYPE_P (t))
3488 /* Tree VAL must be an integer constant. */
3489 if (TREE_CODE (val) != INTEGER_CST
3490 || TREE_CONSTANT_OVERFLOW (val))
3493 width = TYPE_PRECISION (t);
3494 if (width > HOST_BITS_PER_WIDE_INT)
3496 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3499 mask_hi = ((unsigned HOST_WIDE_INT) -1
3500 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3506 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3509 mask_lo = ((unsigned HOST_WIDE_INT) -1
3510 >> (HOST_BITS_PER_WIDE_INT - width));
3513 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3514 treat VAL as if it were unsigned. */
3515 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3516 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3519 /* Handle extension from a narrower type. */
3520 if (TREE_CODE (exp) == NOP_EXPR
3521 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3522 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3527 /* Subroutine for fold_truthop: determine if an operand is simple enough
3528 to be evaluated unconditionally. */
3531 simple_operand_p (tree exp)
3533 /* Strip any conversions that don't change the machine mode. */
3536 return (CONSTANT_CLASS_P (exp)
3537 || TREE_CODE (exp) == SSA_NAME
3539 && ! TREE_ADDRESSABLE (exp)
3540 && ! TREE_THIS_VOLATILE (exp)
3541 && ! DECL_NONLOCAL (exp)
3542 /* Don't regard global variables as simple. They may be
3543 allocated in ways unknown to the compiler (shared memory,
3544 #pragma weak, etc). */
3545 && ! TREE_PUBLIC (exp)
3546 && ! DECL_EXTERNAL (exp)
3547 /* Loading a static variable is unduly expensive, but global
3548 registers aren't expensive. */
3549 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3552 /* The following functions are subroutines to fold_range_test and allow it to
3553 try to change a logical combination of comparisons into a range test.
3556 X == 2 || X == 3 || X == 4 || X == 5
3560 (unsigned) (X - 2) <= 3
3562 We describe each set of comparisons as being either inside or outside
3563 a range, using a variable named like IN_P, and then describe the
3564 range with a lower and upper bound. If one of the bounds is omitted,
3565 it represents either the highest or lowest value of the type.
3567 In the comments below, we represent a range by two numbers in brackets
3568 preceded by a "+" to designate being inside that range, or a "-" to
3569 designate being outside that range, so the condition can be inverted by
3570 flipping the prefix. An omitted bound is represented by a "-". For
3571 example, "- [-, 10]" means being outside the range starting at the lowest
3572 possible value and ending at 10, in other words, being greater than 10.
3573 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3576 We set up things so that the missing bounds are handled in a consistent
3577 manner so neither a missing bound nor "true" and "false" need to be
3578 handled using a special case. */
3580 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3581 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3582 and UPPER1_P are nonzero if the respective argument is an upper bound
3583 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3584 must be specified for a comparison. ARG1 will be converted to ARG0's
3585 type if both are specified. */
3588 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3589 tree arg1, int upper1_p)
3595 /* If neither arg represents infinity, do the normal operation.
3596 Else, if not a comparison, return infinity. Else handle the special
3597 comparison rules. Note that most of the cases below won't occur, but
3598 are handled for consistency. */
3600 if (arg0 != 0 && arg1 != 0)
3602 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3603 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3605 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3608 if (TREE_CODE_CLASS (code) != tcc_comparison)
3611 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3612 for neither. In real maths, we cannot assume open ended ranges are
3613 the same. But, this is computer arithmetic, where numbers are finite.
3614 We can therefore make the transformation of any unbounded range with
3615 the value Z, Z being greater than any representable number. This permits
3616 us to treat unbounded ranges as equal. */
3617 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3618 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3622 result = sgn0 == sgn1;
3625 result = sgn0 != sgn1;
3628 result = sgn0 < sgn1;
3631 result = sgn0 <= sgn1;
3634 result = sgn0 > sgn1;
3637 result = sgn0 >= sgn1;
3643 return constant_boolean_node (result, type);
3646 /* Given EXP, a logical expression, set the range it is testing into
3647 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3648 actually being tested. *PLOW and *PHIGH will be made of the same type
3649 as the returned expression. If EXP is not a comparison, we will most
3650 likely not be returning a useful value and range. */
3653 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3655 enum tree_code code;
3656 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3657 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3659 tree low, high, n_low, n_high;
3661 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3662 and see if we can refine the range. Some of the cases below may not
3663 happen, but it doesn't seem worth worrying about this. We "continue"
3664 the outer loop when we've changed something; otherwise we "break"
3665 the switch, which will "break" the while. */
3668 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3672 code = TREE_CODE (exp);
3673 exp_type = TREE_TYPE (exp);
3675 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3677 if (TREE_CODE_LENGTH (code) > 0)
3678 arg0 = TREE_OPERAND (exp, 0);
3679 if (TREE_CODE_CLASS (code) == tcc_comparison
3680 || TREE_CODE_CLASS (code) == tcc_unary
3681 || TREE_CODE_CLASS (code) == tcc_binary)
3682 arg0_type = TREE_TYPE (arg0);
3683 if (TREE_CODE_CLASS (code) == tcc_binary
3684 || TREE_CODE_CLASS (code) == tcc_comparison
3685 || (TREE_CODE_CLASS (code) == tcc_expression
3686 && TREE_CODE_LENGTH (code) > 1))
3687 arg1 = TREE_OPERAND (exp, 1);
3692 case TRUTH_NOT_EXPR:
3693 in_p = ! in_p, exp = arg0;
3696 case EQ_EXPR: case NE_EXPR:
3697 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3698 /* We can only do something if the range is testing for zero
3699 and if the second operand is an integer constant. Note that
3700 saying something is "in" the range we make is done by
3701 complementing IN_P since it will set in the initial case of
3702 being not equal to zero; "out" is leaving it alone. */
3703 if (low == 0 || high == 0
3704 || ! integer_zerop (low) || ! integer_zerop (high)
3705 || TREE_CODE (arg1) != INTEGER_CST)
3710 case NE_EXPR: /* - [c, c] */
3713 case EQ_EXPR: /* + [c, c] */
3714 in_p = ! in_p, low = high = arg1;
3716 case GT_EXPR: /* - [-, c] */
3717 low = 0, high = arg1;
3719 case GE_EXPR: /* + [c, -] */
3720 in_p = ! in_p, low = arg1, high = 0;
3722 case LT_EXPR: /* - [c, -] */
3723 low = arg1, high = 0;
3725 case LE_EXPR: /* + [-, c] */
3726 in_p = ! in_p, low = 0, high = arg1;
3732 /* If this is an unsigned comparison, we also know that EXP is
3733 greater than or equal to zero. We base the range tests we make
3734 on that fact, so we record it here so we can parse existing
3735 range tests. We test arg0_type since often the return type
3736 of, e.g. EQ_EXPR, is boolean. */
3737 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3739 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3741 fold_convert (arg0_type, integer_zero_node),
3745 in_p = n_in_p, low = n_low, high = n_high;
3747 /* If the high bound is missing, but we have a nonzero low
3748 bound, reverse the range so it goes from zero to the low bound
3750 if (high == 0 && low && ! integer_zerop (low))
3753 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3754 integer_one_node, 0);
3755 low = fold_convert (arg0_type, integer_zero_node);
3763 /* (-x) IN [a,b] -> x in [-b, -a] */
3764 n_low = range_binop (MINUS_EXPR, exp_type,
3765 fold_convert (exp_type, integer_zero_node),
3767 n_high = range_binop (MINUS_EXPR, exp_type,
3768 fold_convert (exp_type, integer_zero_node),
3770 low = n_low, high = n_high;
3776 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3777 fold_convert (exp_type, integer_one_node));
3780 case PLUS_EXPR: case MINUS_EXPR:
3781 if (TREE_CODE (arg1) != INTEGER_CST)
3784 /* If EXP is signed, any overflow in the computation is undefined,
3785 so we don't worry about it so long as our computations on
3786 the bounds don't overflow. For unsigned, overflow is defined
3787 and this is exactly the right thing. */
3788 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3789 arg0_type, low, 0, arg1, 0);
3790 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3791 arg0_type, high, 1, arg1, 0);
3792 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3793 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3796 /* Check for an unsigned range which has wrapped around the maximum
3797 value thus making n_high < n_low, and normalize it. */
3798 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3800 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3801 integer_one_node, 0);
3802 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3803 integer_one_node, 0);
3805 /* If the range is of the form +/- [ x+1, x ], we won't
3806 be able to normalize it. But then, it represents the
3807 whole range or the empty set, so make it
3809 if (tree_int_cst_equal (n_low, low)
3810 && tree_int_cst_equal (n_high, high))
3816 low = n_low, high = n_high;
3821 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3822 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3825 if (! INTEGRAL_TYPE_P (arg0_type)
3826 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3827 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3830 n_low = low, n_high = high;
3833 n_low = fold_convert (arg0_type, n_low);
3836 n_high = fold_convert (arg0_type, n_high);
3839 /* If we're converting arg0 from an unsigned type, to exp,
3840 a signed type, we will be doing the comparison as unsigned.
3841 The tests above have already verified that LOW and HIGH
3844 So we have to ensure that we will handle large unsigned
3845 values the same way that the current signed bounds treat
3848 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3851 tree equiv_type = lang_hooks.types.type_for_mode
3852 (TYPE_MODE (arg0_type), 1);
3854 /* A range without an upper bound is, naturally, unbounded.
3855 Since convert would have cropped a very large value, use
3856 the max value for the destination type. */
3858 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3859 : TYPE_MAX_VALUE (arg0_type);
3861 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3862 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3863 fold_convert (arg0_type,
3865 fold_convert (arg0_type,
3868 /* If the low bound is specified, "and" the range with the
3869 range for which the original unsigned value will be
3873 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3874 1, n_low, n_high, 1,
3875 fold_convert (arg0_type,
3880 in_p = (n_in_p == in_p);
3884 /* Otherwise, "or" the range with the range of the input
3885 that will be interpreted as negative. */
3886 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3887 0, n_low, n_high, 1,
3888 fold_convert (arg0_type,
3893 in_p = (in_p != n_in_p);
3898 low = n_low, high = n_high;
3908 /* If EXP is a constant, we can evaluate whether this is true or false. */
3909 if (TREE_CODE (exp) == INTEGER_CST)
3911 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3913 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3919 *pin_p = in_p, *plow = low, *phigh = high;
3923 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3924 type, TYPE, return an expression to test if EXP is in (or out of, depending
3925 on IN_P) the range. Return 0 if the test couldn't be created. */
3928 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3930 tree etype = TREE_TYPE (exp);
3935 value = build_range_check (type, exp, 1, low, high);
3937 return invert_truthvalue (value);
3942 if (low == 0 && high == 0)
3943 return fold_convert (type, integer_one_node);
3946 return fold_build2 (LE_EXPR, type, exp,
3947 fold_convert (etype, high));
3950 return fold_build2 (GE_EXPR, type, exp,
3951 fold_convert (etype, low));
3953 if (operand_equal_p (low, high, 0))
3954 return fold_build2 (EQ_EXPR, type, exp,
3955 fold_convert (etype, low));
3957 if (integer_zerop (low))
3959 if (! TYPE_UNSIGNED (etype))
3961 etype = lang_hooks.types.unsigned_type (etype);
3962 high = fold_convert (etype, high);
3963 exp = fold_convert (etype, exp);
3965 return build_range_check (type, exp, 1, 0, high);
3968 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3969 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3971 unsigned HOST_WIDE_INT lo;
3975 prec = TYPE_PRECISION (etype);
3976 if (prec <= HOST_BITS_PER_WIDE_INT)
3979 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3983 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3984 lo = (unsigned HOST_WIDE_INT) -1;
3987 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3989 if (TYPE_UNSIGNED (etype))
3991 etype = lang_hooks.types.signed_type (etype);
3992 exp = fold_convert (etype, exp);
3994 return fold_build2 (GT_EXPR, type, exp,
3995 fold_convert (etype, integer_zero_node));
3999 value = const_binop (MINUS_EXPR, high, low, 0);
4000 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
4002 tree utype, minv, maxv;
4004 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4005 for the type in question, as we rely on this here. */
4006 switch (TREE_CODE (etype))
4011 utype = lang_hooks.types.unsigned_type (etype);
4012 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4013 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4014 integer_one_node, 1);
4015 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4016 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4020 high = fold_convert (etype, high);
4021 low = fold_convert (etype, low);
4022 exp = fold_convert (etype, exp);
4023 value = const_binop (MINUS_EXPR, high, low, 0);
4031 if (value != 0 && ! TREE_OVERFLOW (value))
4032 return build_range_check (type,
4033 fold_build2 (MINUS_EXPR, etype, exp, low),
4034 1, fold_convert (etype, integer_zero_node),
4040 /* Given two ranges, see if we can merge them into one. Return 1 if we
4041 can, 0 if we can't. Set the output range into the specified parameters. */
4044 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4045 tree high0, int in1_p, tree low1, tree high1)
4053 int lowequal = ((low0 == 0 && low1 == 0)
4054 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4055 low0, 0, low1, 0)));
4056 int highequal = ((high0 == 0 && high1 == 0)
4057 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4058 high0, 1, high1, 1)));
4060 /* Make range 0 be the range that starts first, or ends last if they
4061 start at the same value. Swap them if it isn't. */
4062 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4065 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4066 high1, 1, high0, 1))))
4068 temp = in0_p, in0_p = in1_p, in1_p = temp;
4069 tem = low0, low0 = low1, low1 = tem;
4070 tem = high0, high0 = high1, high1 = tem;
4073 /* Now flag two cases, whether the ranges are disjoint or whether the
4074 second range is totally subsumed in the first. Note that the tests
4075 below are simplified by the ones above. */
4076 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4077 high0, 1, low1, 0));
4078 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4079 high1, 1, high0, 1));
4081 /* We now have four cases, depending on whether we are including or
4082 excluding the two ranges. */
4085 /* If they don't overlap, the result is false. If the second range
4086 is a subset it is the result. Otherwise, the range is from the start
4087 of the second to the end of the first. */
4089 in_p = 0, low = high = 0;
4091 in_p = 1, low = low1, high = high1;
4093 in_p = 1, low = low1, high = high0;
4096 else if (in0_p && ! in1_p)
4098 /* If they don't overlap, the result is the first range. If they are
4099 equal, the result is false. If the second range is a subset of the
4100 first, and the ranges begin at the same place, we go from just after
4101 the end of the first range to the end of the second. If the second
4102 range is not a subset of the first, or if it is a subset and both
4103 ranges end at the same place, the range starts at the start of the
4104 first range and ends just before the second range.
4105 Otherwise, we can't describe this as a single range. */
4107 in_p = 1, low = low0, high = high0;
4108 else if (lowequal && highequal)
4109 in_p = 0, low = high = 0;
4110 else if (subset && lowequal)
4112 in_p = 1, high = high0;
4113 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4114 integer_one_node, 0);
4116 else if (! subset || highequal)
4118 in_p = 1, low = low0;
4119 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4120 integer_one_node, 0);
4126 else if (! in0_p && in1_p)
4128 /* If they don't overlap, the result is the second range. If the second
4129 is a subset of the first, the result is false. Otherwise,
4130 the range starts just after the first range and ends at the
4131 end of the second. */
4133 in_p = 1, low = low1, high = high1;
4134 else if (subset || highequal)
4135 in_p = 0, low = high = 0;
4138 in_p = 1, high = high1;
4139 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4140 integer_one_node, 0);
4146 /* The case where we are excluding both ranges. Here the complex case
4147 is if they don't overlap. In that case, the only time we have a
4148 range is if they are adjacent. If the second is a subset of the
4149 first, the result is the first. Otherwise, the range to exclude
4150 starts at the beginning of the first range and ends at the end of the
4154 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4155 range_binop (PLUS_EXPR, NULL_TREE,
4157 integer_one_node, 1),
4159 in_p = 0, low = low0, high = high1;
4162 /* Canonicalize - [min, x] into - [-, x]. */
4163 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4164 switch (TREE_CODE (TREE_TYPE (low0)))
4167 if (TYPE_PRECISION (TREE_TYPE (low0))
4168 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4173 if (tree_int_cst_equal (low0,
4174 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4178 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4179 && integer_zerop (low0))
4186 /* Canonicalize - [x, max] into - [x, -]. */
4187 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4188 switch (TREE_CODE (TREE_TYPE (high1)))
4191 if (TYPE_PRECISION (TREE_TYPE (high1))
4192 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4197 if (tree_int_cst_equal (high1,
4198 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4202 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4203 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4205 integer_one_node, 1)))
4212 /* The ranges might be also adjacent between the maximum and
4213 minimum values of the given type. For
4214 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4215 return + [x + 1, y - 1]. */
4216 if (low0 == 0 && high1 == 0)
4218 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4219 integer_one_node, 1);
4220 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4221 integer_one_node, 0);
4222 if (low == 0 || high == 0)
4232 in_p = 0, low = low0, high = high0;
4234 in_p = 0, low = low0, high = high1;
4237 *pin_p = in_p, *plow = low, *phigh = high;
4242 /* Subroutine of fold, looking inside expressions of the form
4243 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4244 of the COND_EXPR. This function is being used also to optimize
4245 A op B ? C : A, by reversing the comparison first.
4247 Return a folded expression whose code is not a COND_EXPR
4248 anymore, or NULL_TREE if no folding opportunity is found. */
4251 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4253 enum tree_code comp_code = TREE_CODE (arg0);
4254 tree arg00 = TREE_OPERAND (arg0, 0);
4255 tree arg01 = TREE_OPERAND (arg0, 1);
4256 tree arg1_type = TREE_TYPE (arg1);
4262 /* If we have A op 0 ? A : -A, consider applying the following
4265 A == 0? A : -A same as -A
4266 A != 0? A : -A same as A
4267 A >= 0? A : -A same as abs (A)
4268 A > 0? A : -A same as abs (A)
4269 A <= 0? A : -A same as -abs (A)
4270 A < 0? A : -A same as -abs (A)
4272 None of these transformations work for modes with signed
4273 zeros. If A is +/-0, the first two transformations will
4274 change the sign of the result (from +0 to -0, or vice
4275 versa). The last four will fix the sign of the result,
4276 even though the original expressions could be positive or
4277 negative, depending on the sign of A.
4279 Note that all these transformations are correct if A is
4280 NaN, since the two alternatives (A and -A) are also NaNs. */
4281 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4282 ? real_zerop (arg01)
4283 : integer_zerop (arg01))
4284 && ((TREE_CODE (arg2) == NEGATE_EXPR
4285 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4286 /* In the case that A is of the form X-Y, '-A' (arg2) may
4287 have already been folded to Y-X, check for that. */
4288 || (TREE_CODE (arg1) == MINUS_EXPR
4289 && TREE_CODE (arg2) == MINUS_EXPR
4290 && operand_equal_p (TREE_OPERAND (arg1, 0),
4291 TREE_OPERAND (arg2, 1), 0)
4292 && operand_equal_p (TREE_OPERAND (arg1, 1),
4293 TREE_OPERAND (arg2, 0), 0))))
4298 tem = fold_convert (arg1_type, arg1);
4299 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4302 return pedantic_non_lvalue (fold_convert (type, arg1));
4305 if (flag_trapping_math)
4310 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4311 arg1 = fold_convert (lang_hooks.types.signed_type
4312 (TREE_TYPE (arg1)), arg1);
4313 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4314 return pedantic_non_lvalue (fold_convert (type, tem));
4317 if (flag_trapping_math)
4321 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4322 arg1 = fold_convert (lang_hooks.types.signed_type
4323 (TREE_TYPE (arg1)), arg1);
4324 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4325 return negate_expr (fold_convert (type, tem));
4327 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4331 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4332 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4333 both transformations are correct when A is NaN: A != 0
4334 is then true, and A == 0 is false. */
4336 if (integer_zerop (arg01) && integer_zerop (arg2))
4338 if (comp_code == NE_EXPR)
4339 return pedantic_non_lvalue (fold_convert (type, arg1));
4340 else if (comp_code == EQ_EXPR)
4341 return fold_convert (type, integer_zero_node);
4344 /* Try some transformations of A op B ? A : B.
4346 A == B? A : B same as B
4347 A != B? A : B same as A
4348 A >= B? A : B same as max (A, B)
4349 A > B? A : B same as max (B, A)
4350 A <= B? A : B same as min (A, B)
4351 A < B? A : B same as min (B, A)
4353 As above, these transformations don't work in the presence
4354 of signed zeros. For example, if A and B are zeros of
4355 opposite sign, the first two transformations will change
4356 the sign of the result. In the last four, the original
4357 expressions give different results for (A=+0, B=-0) and
4358 (A=-0, B=+0), but the transformed expressions do not.
4360 The first two transformations are correct if either A or B
4361 is a NaN. In the first transformation, the condition will
4362 be false, and B will indeed be chosen. In the case of the
4363 second transformation, the condition A != B will be true,
4364 and A will be chosen.
4366 The conversions to max() and min() are not correct if B is
4367 a number and A is not. The conditions in the original
4368 expressions will be false, so all four give B. The min()
4369 and max() versions would give a NaN instead. */
4370 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4371 /* Avoid these transformations if the COND_EXPR may be used
4372 as an lvalue in the C++ front-end. PR c++/19199. */
4374 || strcmp (lang_hooks.name, "GNU C++") != 0
4375 || ! maybe_lvalue_p (arg1)
4376 || ! maybe_lvalue_p (arg2)))
4378 tree comp_op0 = arg00;
4379 tree comp_op1 = arg01;
4380 tree comp_type = TREE_TYPE (comp_op0);
4382 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4383 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4393 return pedantic_non_lvalue (fold_convert (type, arg2));
4395 return pedantic_non_lvalue (fold_convert (type, arg1));
4400 /* In C++ a ?: expression can be an lvalue, so put the
4401 operand which will be used if they are equal first
4402 so that we can convert this back to the
4403 corresponding COND_EXPR. */
4404 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4406 comp_op0 = fold_convert (comp_type, comp_op0);
4407 comp_op1 = fold_convert (comp_type, comp_op1);
4408 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4409 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4410 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4411 return pedantic_non_lvalue (fold_convert (type, tem));
4418 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4420 comp_op0 = fold_convert (comp_type, comp_op0);
4421 comp_op1 = fold_convert (comp_type, comp_op1);
4422 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4423 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4424 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4425 return pedantic_non_lvalue (fold_convert (type, tem));
4429 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4430 return pedantic_non_lvalue (fold_convert (type, arg2));
4433 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4434 return pedantic_non_lvalue (fold_convert (type, arg1));
4437 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4442 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4443 we might still be able to simplify this. For example,
4444 if C1 is one less or one more than C2, this might have started
4445 out as a MIN or MAX and been transformed by this function.
4446 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4448 if (INTEGRAL_TYPE_P (type)
4449 && TREE_CODE (arg01) == INTEGER_CST
4450 && TREE_CODE (arg2) == INTEGER_CST)
4454 /* We can replace A with C1 in this case. */
4455 arg1 = fold_convert (type, arg01);
4456 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4459 /* If C1 is C2 + 1, this is min(A, C2). */
4460 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4462 && operand_equal_p (arg01,
4463 const_binop (PLUS_EXPR, arg2,
4464 integer_one_node, 0),
4466 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4471 /* If C1 is C2 - 1, this is min(A, C2). */
4472 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4474 && operand_equal_p (arg01,
4475 const_binop (MINUS_EXPR, arg2,
4476 integer_one_node, 0),
4478 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4483 /* If C1 is C2 - 1, this is max(A, C2). */
4484 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4486 && operand_equal_p (arg01,
4487 const_binop (MINUS_EXPR, arg2,
4488 integer_one_node, 0),
4490 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4495 /* If C1 is C2 + 1, this is max(A, C2). */
4496 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4498 && operand_equal_p (arg01,
4499 const_binop (PLUS_EXPR, arg2,
4500 integer_one_node, 0),
4502 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4516 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4517 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4520 /* EXP is some logical combination of boolean tests. See if we can
4521 merge it into some range test. Return the new tree if so. */
4524 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4526 int or_op = (code == TRUTH_ORIF_EXPR
4527 || code == TRUTH_OR_EXPR);
4528 int in0_p, in1_p, in_p;
4529 tree low0, low1, low, high0, high1, high;
4530 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4531 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4534 /* If this is an OR operation, invert both sides; we will invert
4535 again at the end. */
4537 in0_p = ! in0_p, in1_p = ! in1_p;
4539 /* If both expressions are the same, if we can merge the ranges, and we
4540 can build the range test, return it or it inverted. If one of the
4541 ranges is always true or always false, consider it to be the same
4542 expression as the other. */
4543 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4544 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4546 && 0 != (tem = (build_range_check (type,
4548 : rhs != 0 ? rhs : integer_zero_node,
4550 return or_op ? invert_truthvalue (tem) : tem;
4552 /* On machines where the branch cost is expensive, if this is a
4553 short-circuited branch and the underlying object on both sides
4554 is the same, make a non-short-circuit operation. */
4555 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4556 && lhs != 0 && rhs != 0
4557 && (code == TRUTH_ANDIF_EXPR
4558 || code == TRUTH_ORIF_EXPR)
4559 && operand_equal_p (lhs, rhs, 0))
4561 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4562 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4563 which cases we can't do this. */
4564 if (simple_operand_p (lhs))
4565 return build2 (code == TRUTH_ANDIF_EXPR
4566 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4569 else if (lang_hooks.decls.global_bindings_p () == 0
4570 && ! CONTAINS_PLACEHOLDER_P (lhs))
4572 tree common = save_expr (lhs);
4574 if (0 != (lhs = build_range_check (type, common,
4575 or_op ? ! in0_p : in0_p,
4577 && (0 != (rhs = build_range_check (type, common,
4578 or_op ? ! in1_p : in1_p,
4580 return build2 (code == TRUTH_ANDIF_EXPR
4581 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4589 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4590 bit value. Arrange things so the extra bits will be set to zero if and
4591 only if C is signed-extended to its full width. If MASK is nonzero,
4592 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4595 unextend (tree c, int p, int unsignedp, tree mask)
4597 tree type = TREE_TYPE (c);
4598 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4601 if (p == modesize || unsignedp)
4604 /* We work by getting just the sign bit into the low-order bit, then
4605 into the high-order bit, then sign-extend. We then XOR that value
4607 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4608 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4610 /* We must use a signed type in order to get an arithmetic right shift.
4611 However, we must also avoid introducing accidental overflows, so that
4612 a subsequent call to integer_zerop will work. Hence we must
4613 do the type conversion here. At this point, the constant is either
4614 zero or one, and the conversion to a signed type can never overflow.
4615 We could get an overflow if this conversion is done anywhere else. */
4616 if (TYPE_UNSIGNED (type))
4617 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4619 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4620 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4622 temp = const_binop (BIT_AND_EXPR, temp,
4623 fold_convert (TREE_TYPE (c), mask), 0);
4624 /* If necessary, convert the type back to match the type of C. */
4625 if (TYPE_UNSIGNED (type))
4626 temp = fold_convert (type, temp);
4628 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4631 /* Find ways of folding logical expressions of LHS and RHS:
4632 Try to merge two comparisons to the same innermost item.
4633 Look for range tests like "ch >= '0' && ch <= '9'".
4634 Look for combinations of simple terms on machines with expensive branches
4635 and evaluate the RHS unconditionally.
4637 For example, if we have p->a == 2 && p->b == 4 and we can make an
4638 object large enough to span both A and B, we can do this with a comparison
4639 against the object ANDed with the a mask.
4641 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4642 operations to do this with one comparison.
4644 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4645 function and the one above.
4647 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4648 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4650 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4653 We return the simplified tree or 0 if no optimization is possible. */
4656 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4658 /* If this is the "or" of two comparisons, we can do something if
4659 the comparisons are NE_EXPR. If this is the "and", we can do something
4660 if the comparisons are EQ_EXPR. I.e.,
4661 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4663 WANTED_CODE is this operation code. For single bit fields, we can
4664 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4665 comparison for one-bit fields. */
4667 enum tree_code wanted_code;
4668 enum tree_code lcode, rcode;
4669 tree ll_arg, lr_arg, rl_arg, rr_arg;
4670 tree ll_inner, lr_inner, rl_inner, rr_inner;
4671 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4672 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4673 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4674 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4675 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4676 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4677 enum machine_mode lnmode, rnmode;
4678 tree ll_mask, lr_mask, rl_mask, rr_mask;
4679 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4680 tree l_const, r_const;
4681 tree lntype, rntype, result;
4682 int first_bit, end_bit;
4685 /* Start by getting the comparison codes. Fail if anything is volatile.
4686 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4687 it were surrounded with a NE_EXPR. */
4689 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4692 lcode = TREE_CODE (lhs);
4693 rcode = TREE_CODE (rhs);
4695 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4697 lhs = build2 (NE_EXPR, truth_type, lhs,
4698 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4702 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4704 rhs = build2 (NE_EXPR, truth_type, rhs,
4705 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4709 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4710 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4713 ll_arg = TREE_OPERAND (lhs, 0);
4714 lr_arg = TREE_OPERAND (lhs, 1);
4715 rl_arg = TREE_OPERAND (rhs, 0);
4716 rr_arg = TREE_OPERAND (rhs, 1);
4718 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4719 if (simple_operand_p (ll_arg)
4720 && simple_operand_p (lr_arg))
4723 if (operand_equal_p (ll_arg, rl_arg, 0)
4724 && operand_equal_p (lr_arg, rr_arg, 0))
4726 result = combine_comparisons (code, lcode, rcode,
4727 truth_type, ll_arg, lr_arg);
4731 else if (operand_equal_p (ll_arg, rr_arg, 0)
4732 && operand_equal_p (lr_arg, rl_arg, 0))
4734 result = combine_comparisons (code, lcode,
4735 swap_tree_comparison (rcode),
4736 truth_type, ll_arg, lr_arg);
4742 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4743 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4745 /* If the RHS can be evaluated unconditionally and its operands are
4746 simple, it wins to evaluate the RHS unconditionally on machines
4747 with expensive branches. In this case, this isn't a comparison
4748 that can be merged. Avoid doing this if the RHS is a floating-point
4749 comparison since those can trap. */
4751 if (BRANCH_COST >= 2
4752 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4753 && simple_operand_p (rl_arg)
4754 && simple_operand_p (rr_arg))
4756 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4757 if (code == TRUTH_OR_EXPR
4758 && lcode == NE_EXPR && integer_zerop (lr_arg)
4759 && rcode == NE_EXPR && integer_zerop (rr_arg)
4760 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4761 return build2 (NE_EXPR, truth_type,
4762 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4764 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4766 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4767 if (code == TRUTH_AND_EXPR
4768 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4769 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4770 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4771 return build2 (EQ_EXPR, truth_type,
4772 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4774 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4776 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4777 return build2 (code, truth_type, lhs, rhs);
4780 /* See if the comparisons can be merged. Then get all the parameters for
4783 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4784 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4788 ll_inner = decode_field_reference (ll_arg,
4789 &ll_bitsize, &ll_bitpos, &ll_mode,
4790 &ll_unsignedp, &volatilep, &ll_mask,
4792 lr_inner = decode_field_reference (lr_arg,
4793 &lr_bitsize, &lr_bitpos, &lr_mode,
4794 &lr_unsignedp, &volatilep, &lr_mask,
4796 rl_inner = decode_field_reference (rl_arg,
4797 &rl_bitsize, &rl_bitpos, &rl_mode,
4798 &rl_unsignedp, &volatilep, &rl_mask,
4800 rr_inner = decode_field_reference (rr_arg,
4801 &rr_bitsize, &rr_bitpos, &rr_mode,
4802 &rr_unsignedp, &volatilep, &rr_mask,
4805 /* It must be true that the inner operation on the lhs of each
4806 comparison must be the same if we are to be able to do anything.
4807 Then see if we have constants. If not, the same must be true for
4809 if (volatilep || ll_inner == 0 || rl_inner == 0
4810 || ! operand_equal_p (ll_inner, rl_inner, 0))
4813 if (TREE_CODE (lr_arg) == INTEGER_CST
4814 && TREE_CODE (rr_arg) == INTEGER_CST)
4815 l_const = lr_arg, r_const = rr_arg;
4816 else if (lr_inner == 0 || rr_inner == 0
4817 || ! operand_equal_p (lr_inner, rr_inner, 0))
4820 l_const = r_const = 0;
4822 /* If either comparison code is not correct for our logical operation,
4823 fail. However, we can convert a one-bit comparison against zero into
4824 the opposite comparison against that bit being set in the field. */
4826 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4827 if (lcode != wanted_code)
4829 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4831 /* Make the left operand unsigned, since we are only interested
4832 in the value of one bit. Otherwise we are doing the wrong
4841 /* This is analogous to the code for l_const above. */
4842 if (rcode != wanted_code)
4844 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4853 /* After this point all optimizations will generate bit-field
4854 references, which we might not want. */
4855 if (! lang_hooks.can_use_bit_fields_p ())
4858 /* See if we can find a mode that contains both fields being compared on
4859 the left. If we can't, fail. Otherwise, update all constants and masks
4860 to be relative to a field of that size. */
4861 first_bit = MIN (ll_bitpos, rl_bitpos);
4862 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4863 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4864 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4866 if (lnmode == VOIDmode)
4869 lnbitsize = GET_MODE_BITSIZE (lnmode);
4870 lnbitpos = first_bit & ~ (lnbitsize - 1);
4871 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4872 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4874 if (BYTES_BIG_ENDIAN)
4876 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4877 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4880 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4881 size_int (xll_bitpos), 0);
4882 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4883 size_int (xrl_bitpos), 0);
4887 l_const = fold_convert (lntype, l_const);
4888 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4889 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4890 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4891 fold_build1 (BIT_NOT_EXPR,
4895 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4897 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4902 r_const = fold_convert (lntype, r_const);
4903 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4904 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4905 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4906 fold_build1 (BIT_NOT_EXPR,
4910 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4912 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4916 /* If the right sides are not constant, do the same for it. Also,
4917 disallow this optimization if a size or signedness mismatch occurs
4918 between the left and right sides. */
4921 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4922 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4923 /* Make sure the two fields on the right
4924 correspond to the left without being swapped. */
4925 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4928 first_bit = MIN (lr_bitpos, rr_bitpos);
4929 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4930 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4931 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4933 if (rnmode == VOIDmode)
4936 rnbitsize = GET_MODE_BITSIZE (rnmode);
4937 rnbitpos = first_bit & ~ (rnbitsize - 1);
4938 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4939 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4941 if (BYTES_BIG_ENDIAN)
4943 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4944 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4947 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4948 size_int (xlr_bitpos), 0);
4949 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4950 size_int (xrr_bitpos), 0);
4952 /* Make a mask that corresponds to both fields being compared.
4953 Do this for both items being compared. If the operands are the
4954 same size and the bits being compared are in the same position
4955 then we can do this by masking both and comparing the masked
4957 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4958 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4959 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4961 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4962 ll_unsignedp || rl_unsignedp);
4963 if (! all_ones_mask_p (ll_mask, lnbitsize))
4964 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4966 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4967 lr_unsignedp || rr_unsignedp);
4968 if (! all_ones_mask_p (lr_mask, rnbitsize))
4969 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4971 return build2 (wanted_code, truth_type, lhs, rhs);
4974 /* There is still another way we can do something: If both pairs of
4975 fields being compared are adjacent, we may be able to make a wider
4976 field containing them both.
4978 Note that we still must mask the lhs/rhs expressions. Furthermore,
4979 the mask must be shifted to account for the shift done by
4980 make_bit_field_ref. */
4981 if ((ll_bitsize + ll_bitpos == rl_bitpos
4982 && lr_bitsize + lr_bitpos == rr_bitpos)
4983 || (ll_bitpos == rl_bitpos + rl_bitsize
4984 && lr_bitpos == rr_bitpos + rr_bitsize))
4988 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4989 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4990 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4991 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4993 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4994 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4995 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4996 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4998 /* Convert to the smaller type before masking out unwanted bits. */
5000 if (lntype != rntype)
5002 if (lnbitsize > rnbitsize)
5004 lhs = fold_convert (rntype, lhs);
5005 ll_mask = fold_convert (rntype, ll_mask);
5008 else if (lnbitsize < rnbitsize)
5010 rhs = fold_convert (lntype, rhs);
5011 lr_mask = fold_convert (lntype, lr_mask);
5016 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5017 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5019 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5020 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5022 return build2 (wanted_code, truth_type, lhs, rhs);
5028 /* Handle the case of comparisons with constants. If there is something in
5029 common between the masks, those bits of the constants must be the same.
5030 If not, the condition is always false. Test for this to avoid generating
5031 incorrect code below. */
5032 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5033 if (! integer_zerop (result)
5034 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5035 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5037 if (wanted_code == NE_EXPR)
5039 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5040 return constant_boolean_node (true, truth_type);
5044 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5045 return constant_boolean_node (false, truth_type);
5049 /* Construct the expression we will return. First get the component
5050 reference we will make. Unless the mask is all ones the width of
5051 that field, perform the mask operation. Then compare with the
5053 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5054 ll_unsignedp || rl_unsignedp);
5056 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5057 if (! all_ones_mask_p (ll_mask, lnbitsize))
5058 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5060 return build2 (wanted_code, truth_type, result,
5061 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5064 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5068 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5071 enum tree_code op_code;
5072 tree comp_const = op1;
5074 int consts_equal, consts_lt;
5077 STRIP_SIGN_NOPS (arg0);
5079 op_code = TREE_CODE (arg0);
5080 minmax_const = TREE_OPERAND (arg0, 1);
5081 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5082 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5083 inner = TREE_OPERAND (arg0, 0);
5085 /* If something does not permit us to optimize, return the original tree. */
5086 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5087 || TREE_CODE (comp_const) != INTEGER_CST
5088 || TREE_CONSTANT_OVERFLOW (comp_const)
5089 || TREE_CODE (minmax_const) != INTEGER_CST
5090 || TREE_CONSTANT_OVERFLOW (minmax_const))
5093 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5094 and GT_EXPR, doing the rest with recursive calls using logical
5098 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5100 /* FIXME: We should be able to invert code without building a
5101 scratch tree node, but doing so would require us to
5102 duplicate a part of invert_truthvalue here. */
5103 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5104 tem = optimize_minmax_comparison (TREE_CODE (tem),
5106 TREE_OPERAND (tem, 0),
5107 TREE_OPERAND (tem, 1));
5108 return invert_truthvalue (tem);
5113 fold_build2 (TRUTH_ORIF_EXPR, type,
5114 optimize_minmax_comparison
5115 (EQ_EXPR, type, arg0, comp_const),
5116 optimize_minmax_comparison
5117 (GT_EXPR, type, arg0, comp_const));
5120 if (op_code == MAX_EXPR && consts_equal)
5121 /* MAX (X, 0) == 0 -> X <= 0 */
5122 return fold_build2 (LE_EXPR, type, inner, comp_const);
5124 else if (op_code == MAX_EXPR && consts_lt)
5125 /* MAX (X, 0) == 5 -> X == 5 */
5126 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5128 else if (op_code == MAX_EXPR)
5129 /* MAX (X, 0) == -1 -> false */
5130 return omit_one_operand (type, integer_zero_node, inner);
5132 else if (consts_equal)
5133 /* MIN (X, 0) == 0 -> X >= 0 */
5134 return fold_build2 (GE_EXPR, type, inner, comp_const);
5137 /* MIN (X, 0) == 5 -> false */
5138 return omit_one_operand (type, integer_zero_node, inner);
5141 /* MIN (X, 0) == -1 -> X == -1 */
5142 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5145 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5146 /* MAX (X, 0) > 0 -> X > 0
5147 MAX (X, 0) > 5 -> X > 5 */
5148 return fold_build2 (GT_EXPR, type, inner, comp_const);
5150 else if (op_code == MAX_EXPR)
5151 /* MAX (X, 0) > -1 -> true */
5152 return omit_one_operand (type, integer_one_node, inner);
5154 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5155 /* MIN (X, 0) > 0 -> false
5156 MIN (X, 0) > 5 -> false */
5157 return omit_one_operand (type, integer_zero_node, inner);
5160 /* MIN (X, 0) > -1 -> X > -1 */
5161 return fold_build2 (GT_EXPR, type, inner, comp_const);
5168 /* T is an integer expression that is being multiplied, divided, or taken a
5169 modulus (CODE says which and what kind of divide or modulus) by a
5170 constant C. See if we can eliminate that operation by folding it with
5171 other operations already in T. WIDE_TYPE, if non-null, is a type that
5172 should be used for the computation if wider than our type.
5174 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5175 (X * 2) + (Y * 4). We must, however, be assured that either the original
5176 expression would not overflow or that overflow is undefined for the type
5177 in the language in question.
5179 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5180 the machine has a multiply-accumulate insn or that this is part of an
5181 addressing calculation.
5183 If we return a non-null expression, it is an equivalent form of the
5184 original computation, but need not be in the original type. */
5187 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5189 /* To avoid exponential search depth, refuse to allow recursion past
5190 three levels. Beyond that (1) it's highly unlikely that we'll find
5191 something interesting and (2) we've probably processed it before
5192 when we built the inner expression. */
5201 ret = extract_muldiv_1 (t, c, code, wide_type);
5208 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5210 tree type = TREE_TYPE (t);
5211 enum tree_code tcode = TREE_CODE (t);
5212 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5213 > GET_MODE_SIZE (TYPE_MODE (type)))
5214 ? wide_type : type);
5216 int same_p = tcode == code;
5217 tree op0 = NULL_TREE, op1 = NULL_TREE;
5219 /* Don't deal with constants of zero here; they confuse the code below. */
5220 if (integer_zerop (c))
5223 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5224 op0 = TREE_OPERAND (t, 0);
5226 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5227 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5229 /* Note that we need not handle conditional operations here since fold
5230 already handles those cases. So just do arithmetic here. */
5234 /* For a constant, we can always simplify if we are a multiply
5235 or (for divide and modulus) if it is a multiple of our constant. */
5236 if (code == MULT_EXPR
5237 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5238 return const_binop (code, fold_convert (ctype, t),
5239 fold_convert (ctype, c), 0);
5242 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5243 /* If op0 is an expression ... */
5244 if ((COMPARISON_CLASS_P (op0)
5245 || UNARY_CLASS_P (op0)
5246 || BINARY_CLASS_P (op0)
5247 || EXPRESSION_CLASS_P (op0))
5248 /* ... and is unsigned, and its type is smaller than ctype,
5249 then we cannot pass through as widening. */
5250 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5251 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5252 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5253 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5254 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5255 /* ... or this is a truncation (t is narrower than op0),
5256 then we cannot pass through this narrowing. */
5257 || (GET_MODE_SIZE (TYPE_MODE (type))
5258 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5259 /* ... or signedness changes for division or modulus,
5260 then we cannot pass through this conversion. */
5261 || (code != MULT_EXPR
5262 && (TYPE_UNSIGNED (ctype)
5263 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5266 /* Pass the constant down and see if we can make a simplification. If
5267 we can, replace this expression with the inner simplification for
5268 possible later conversion to our or some other type. */
5269 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5270 && TREE_CODE (t2) == INTEGER_CST
5271 && ! TREE_CONSTANT_OVERFLOW (t2)
5272 && (0 != (t1 = extract_muldiv (op0, t2, code,
5274 ? ctype : NULL_TREE))))
5279 /* If widening the type changes it from signed to unsigned, then we
5280 must avoid building ABS_EXPR itself as unsigned. */
5281 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5283 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5284 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5286 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5287 return fold_convert (ctype, t1);
5293 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5294 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5297 case MIN_EXPR: case MAX_EXPR:
5298 /* If widening the type changes the signedness, then we can't perform
5299 this optimization as that changes the result. */
5300 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5303 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5304 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5305 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5307 if (tree_int_cst_sgn (c) < 0)
5308 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5310 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5311 fold_convert (ctype, t2));
5315 case LSHIFT_EXPR: case RSHIFT_EXPR:
5316 /* If the second operand is constant, this is a multiplication
5317 or floor division, by a power of two, so we can treat it that
5318 way unless the multiplier or divisor overflows. Signed
5319 left-shift overflow is implementation-defined rather than
5320 undefined in C90, so do not convert signed left shift into
5322 if (TREE_CODE (op1) == INTEGER_CST
5323 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5324 /* const_binop may not detect overflow correctly,
5325 so check for it explicitly here. */
5326 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5327 && TREE_INT_CST_HIGH (op1) == 0
5328 && 0 != (t1 = fold_convert (ctype,
5329 const_binop (LSHIFT_EXPR,
5332 && ! TREE_OVERFLOW (t1))
5333 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5334 ? MULT_EXPR : FLOOR_DIV_EXPR,
5335 ctype, fold_convert (ctype, op0), t1),
5336 c, code, wide_type);
5339 case PLUS_EXPR: case MINUS_EXPR:
5340 /* See if we can eliminate the operation on both sides. If we can, we
5341 can return a new PLUS or MINUS. If we can't, the only remaining
5342 cases where we can do anything are if the second operand is a
5344 t1 = extract_muldiv (op0, c, code, wide_type);
5345 t2 = extract_muldiv (op1, c, code, wide_type);
5346 if (t1 != 0 && t2 != 0
5347 && (code == MULT_EXPR
5348 /* If not multiplication, we can only do this if both operands
5349 are divisible by c. */
5350 || (multiple_of_p (ctype, op0, c)
5351 && multiple_of_p (ctype, op1, c))))
5352 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5353 fold_convert (ctype, t2));
5355 /* If this was a subtraction, negate OP1 and set it to be an addition.
5356 This simplifies the logic below. */
5357 if (tcode == MINUS_EXPR)
5358 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5360 if (TREE_CODE (op1) != INTEGER_CST)
5363 /* If either OP1 or C are negative, this optimization is not safe for
5364 some of the division and remainder types while for others we need
5365 to change the code. */
5366 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5368 if (code == CEIL_DIV_EXPR)
5369 code = FLOOR_DIV_EXPR;
5370 else if (code == FLOOR_DIV_EXPR)
5371 code = CEIL_DIV_EXPR;
5372 else if (code != MULT_EXPR
5373 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5377 /* If it's a multiply or a division/modulus operation of a multiple
5378 of our constant, do the operation and verify it doesn't overflow. */
5379 if (code == MULT_EXPR
5380 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5382 op1 = const_binop (code, fold_convert (ctype, op1),
5383 fold_convert (ctype, c), 0);
5384 /* We allow the constant to overflow with wrapping semantics. */
5386 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5392 /* If we have an unsigned type is not a sizetype, we cannot widen
5393 the operation since it will change the result if the original
5394 computation overflowed. */
5395 if (TYPE_UNSIGNED (ctype)
5396 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5400 /* If we were able to eliminate our operation from the first side,
5401 apply our operation to the second side and reform the PLUS. */
5402 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5403 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5405 /* The last case is if we are a multiply. In that case, we can
5406 apply the distributive law to commute the multiply and addition
5407 if the multiplication of the constants doesn't overflow. */
5408 if (code == MULT_EXPR)
5409 return fold_build2 (tcode, ctype,
5410 fold_build2 (code, ctype,
5411 fold_convert (ctype, op0),
5412 fold_convert (ctype, c)),
5418 /* We have a special case here if we are doing something like
5419 (C * 8) % 4 since we know that's zero. */
5420 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5421 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5422 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5423 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5424 return omit_one_operand (type, integer_zero_node, op0);
5426 /* ... fall through ... */
5428 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5429 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5430 /* If we can extract our operation from the LHS, do so and return a
5431 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5432 do something only if the second operand is a constant. */
5434 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5435 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5436 fold_convert (ctype, op1));
5437 else if (tcode == MULT_EXPR && code == MULT_EXPR
5438 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5439 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5440 fold_convert (ctype, t1));
5441 else if (TREE_CODE (op1) != INTEGER_CST)
5444 /* If these are the same operation types, we can associate them
5445 assuming no overflow. */
5447 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5448 fold_convert (ctype, c), 0))
5449 && ! TREE_OVERFLOW (t1))
5450 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5452 /* If these operations "cancel" each other, we have the main
5453 optimizations of this pass, which occur when either constant is a
5454 multiple of the other, in which case we replace this with either an
5455 operation or CODE or TCODE.
5457 If we have an unsigned type that is not a sizetype, we cannot do
5458 this since it will change the result if the original computation
5460 if ((! TYPE_UNSIGNED (ctype)
5461 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5463 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5464 || (tcode == MULT_EXPR
5465 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5466 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5468 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5469 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5470 fold_convert (ctype,
5471 const_binop (TRUNC_DIV_EXPR,
5473 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5474 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5475 fold_convert (ctype,
5476 const_binop (TRUNC_DIV_EXPR,
5488 /* Return a node which has the indicated constant VALUE (either 0 or
5489 1), and is of the indicated TYPE. */
5492 constant_boolean_node (int value, tree type)
5494 if (type == integer_type_node)
5495 return value ? integer_one_node : integer_zero_node;
5496 else if (type == boolean_type_node)
5497 return value ? boolean_true_node : boolean_false_node;
5499 return build_int_cst (type, value);
5503 /* Return true if expr looks like an ARRAY_REF and set base and
5504 offset to the appropriate trees. If there is no offset,
5505 offset is set to NULL_TREE. Base will be canonicalized to
5506 something you can get the element type from using
5507 TREE_TYPE (TREE_TYPE (base)). */
5510 extract_array_ref (tree expr, tree *base, tree *offset)
5512 /* One canonical form is a PLUS_EXPR with the first
5513 argument being an ADDR_EXPR with a possible NOP_EXPR
5515 if (TREE_CODE (expr) == PLUS_EXPR)
5517 tree op0 = TREE_OPERAND (expr, 0);
5518 tree inner_base, dummy1;
5519 /* Strip NOP_EXPRs here because the C frontends and/or
5520 folders present us (int *)&x.a + 4B possibly. */
5522 if (extract_array_ref (op0, &inner_base, &dummy1))
5525 if (dummy1 == NULL_TREE)
5526 *offset = TREE_OPERAND (expr, 1);
5528 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5529 dummy1, TREE_OPERAND (expr, 1));
5533 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5534 which we transform into an ADDR_EXPR with appropriate
5535 offset. For other arguments to the ADDR_EXPR we assume
5536 zero offset and as such do not care about the ADDR_EXPR
5537 type and strip possible nops from it. */
5538 else if (TREE_CODE (expr) == ADDR_EXPR)
5540 tree op0 = TREE_OPERAND (expr, 0);
5541 if (TREE_CODE (op0) == ARRAY_REF)
5543 *base = TREE_OPERAND (op0, 0);
5544 *offset = TREE_OPERAND (op0, 1);
5548 /* Handle array-to-pointer decay as &a. */
5549 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5550 *base = TREE_OPERAND (expr, 0);
5553 *offset = NULL_TREE;
5557 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5558 else if (SSA_VAR_P (expr)
5559 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5562 *offset = NULL_TREE;
5570 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5571 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5572 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5573 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5574 COND is the first argument to CODE; otherwise (as in the example
5575 given here), it is the second argument. TYPE is the type of the
5576 original expression. Return NULL_TREE if no simplification is
5580 fold_binary_op_with_conditional_arg (enum tree_code code,
5581 tree type, tree op0, tree op1,
5582 tree cond, tree arg, int cond_first_p)
5584 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5585 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5586 tree test, true_value, false_value;
5587 tree lhs = NULL_TREE;
5588 tree rhs = NULL_TREE;
5590 /* This transformation is only worthwhile if we don't have to wrap
5591 arg in a SAVE_EXPR, and the operation can be simplified on at least
5592 one of the branches once its pushed inside the COND_EXPR. */
5593 if (!TREE_CONSTANT (arg))
5596 if (TREE_CODE (cond) == COND_EXPR)
5598 test = TREE_OPERAND (cond, 0);
5599 true_value = TREE_OPERAND (cond, 1);
5600 false_value = TREE_OPERAND (cond, 2);
5601 /* If this operand throws an expression, then it does not make
5602 sense to try to perform a logical or arithmetic operation
5604 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5606 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5611 tree testtype = TREE_TYPE (cond);
5613 true_value = constant_boolean_node (true, testtype);
5614 false_value = constant_boolean_node (false, testtype);
5617 arg = fold_convert (arg_type, arg);
5620 true_value = fold_convert (cond_type, true_value);
5622 lhs = fold_build2 (code, type, true_value, arg);
5624 lhs = fold_build2 (code, type, arg, true_value);
5628 false_value = fold_convert (cond_type, false_value);
5630 rhs = fold_build2 (code, type, false_value, arg);
5632 rhs = fold_build2 (code, type, arg, false_value);
5635 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5636 return fold_convert (type, test);
5640 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5642 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5643 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5644 ADDEND is the same as X.
5646 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5647 and finite. The problematic cases are when X is zero, and its mode
5648 has signed zeros. In the case of rounding towards -infinity,
5649 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5650 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5653 fold_real_zero_addition_p (tree type, tree addend, int negate)
5655 if (!real_zerop (addend))
5658 /* Don't allow the fold with -fsignaling-nans. */
5659 if (HONOR_SNANS (TYPE_MODE (type)))
5662 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5663 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5666 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5667 if (TREE_CODE (addend) == REAL_CST
5668 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5671 /* The mode has signed zeros, and we have to honor their sign.
5672 In this situation, there is only one case we can return true for.
5673 X - 0 is the same as X unless rounding towards -infinity is
5675 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5678 /* Subroutine of fold() that checks comparisons of built-in math
5679 functions against real constants.
5681 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5682 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5683 is the type of the result and ARG0 and ARG1 are the operands of the
5684 comparison. ARG1 must be a TREE_REAL_CST.
5686 The function returns the constant folded tree if a simplification
5687 can be made, and NULL_TREE otherwise. */
5690 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5691 tree type, tree arg0, tree arg1)
5695 if (BUILTIN_SQRT_P (fcode))
5697 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5698 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5700 c = TREE_REAL_CST (arg1);
5701 if (REAL_VALUE_NEGATIVE (c))
5703 /* sqrt(x) < y is always false, if y is negative. */
5704 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5705 return omit_one_operand (type, integer_zero_node, arg);
5707 /* sqrt(x) > y is always true, if y is negative and we
5708 don't care about NaNs, i.e. negative values of x. */
5709 if (code == NE_EXPR || !HONOR_NANS (mode))
5710 return omit_one_operand (type, integer_one_node, arg);
5712 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5713 return fold_build2 (GE_EXPR, type, arg,
5714 build_real (TREE_TYPE (arg), dconst0));
5716 else if (code == GT_EXPR || code == GE_EXPR)
5720 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5721 real_convert (&c2, mode, &c2);
5723 if (REAL_VALUE_ISINF (c2))
5725 /* sqrt(x) > y is x == +Inf, when y is very large. */
5726 if (HONOR_INFINITIES (mode))
5727 return fold_build2 (EQ_EXPR, type, arg,
5728 build_real (TREE_TYPE (arg), c2));
5730 /* sqrt(x) > y is always false, when y is very large
5731 and we don't care about infinities. */
5732 return omit_one_operand (type, integer_zero_node, arg);
5735 /* sqrt(x) > c is the same as x > c*c. */
5736 return fold_build2 (code, type, arg,
5737 build_real (TREE_TYPE (arg), c2));
5739 else if (code == LT_EXPR || code == LE_EXPR)
5743 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5744 real_convert (&c2, mode, &c2);
5746 if (REAL_VALUE_ISINF (c2))
5748 /* sqrt(x) < y is always true, when y is a very large
5749 value and we don't care about NaNs or Infinities. */
5750 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5751 return omit_one_operand (type, integer_one_node, arg);
5753 /* sqrt(x) < y is x != +Inf when y is very large and we
5754 don't care about NaNs. */
5755 if (! HONOR_NANS (mode))
5756 return fold_build2 (NE_EXPR, type, arg,
5757 build_real (TREE_TYPE (arg), c2));
5759 /* sqrt(x) < y is x >= 0 when y is very large and we
5760 don't care about Infinities. */
5761 if (! HONOR_INFINITIES (mode))
5762 return fold_build2 (GE_EXPR, type, arg,
5763 build_real (TREE_TYPE (arg), dconst0));
5765 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5766 if (lang_hooks.decls.global_bindings_p () != 0
5767 || CONTAINS_PLACEHOLDER_P (arg))
5770 arg = save_expr (arg);
5771 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5772 fold_build2 (GE_EXPR, type, arg,
5773 build_real (TREE_TYPE (arg),
5775 fold_build2 (NE_EXPR, type, arg,
5776 build_real (TREE_TYPE (arg),
5780 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5781 if (! HONOR_NANS (mode))
5782 return fold_build2 (code, type, arg,
5783 build_real (TREE_TYPE (arg), c2));
5785 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5786 if (lang_hooks.decls.global_bindings_p () == 0
5787 && ! CONTAINS_PLACEHOLDER_P (arg))
5789 arg = save_expr (arg);
5790 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5791 fold_build2 (GE_EXPR, type, arg,
5792 build_real (TREE_TYPE (arg),
5794 fold_build2 (code, type, arg,
5795 build_real (TREE_TYPE (arg),
5804 /* Subroutine of fold() that optimizes comparisons against Infinities,
5805 either +Inf or -Inf.
5807 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5808 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5809 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5811 The function returns the constant folded tree if a simplification
5812 can be made, and NULL_TREE otherwise. */
5815 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5817 enum machine_mode mode;
5818 REAL_VALUE_TYPE max;
5822 mode = TYPE_MODE (TREE_TYPE (arg0));
5824 /* For negative infinity swap the sense of the comparison. */
5825 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5827 code = swap_tree_comparison (code);
5832 /* x > +Inf is always false, if with ignore sNANs. */
5833 if (HONOR_SNANS (mode))
5835 return omit_one_operand (type, integer_zero_node, arg0);
5838 /* x <= +Inf is always true, if we don't case about NaNs. */
5839 if (! HONOR_NANS (mode))
5840 return omit_one_operand (type, integer_one_node, arg0);
5842 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5843 if (lang_hooks.decls.global_bindings_p () == 0
5844 && ! CONTAINS_PLACEHOLDER_P (arg0))
5846 arg0 = save_expr (arg0);
5847 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5853 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5854 real_maxval (&max, neg, mode);
5855 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5856 arg0, build_real (TREE_TYPE (arg0), max));
5859 /* x < +Inf is always equal to x <= DBL_MAX. */
5860 real_maxval (&max, neg, mode);
5861 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5862 arg0, build_real (TREE_TYPE (arg0), max));
5865 /* x != +Inf is always equal to !(x > DBL_MAX). */
5866 real_maxval (&max, neg, mode);
5867 if (! HONOR_NANS (mode))
5868 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5869 arg0, build_real (TREE_TYPE (arg0), max));
5871 /* The transformation below creates non-gimple code and thus is
5872 not appropriate if we are in gimple form. */
5876 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5877 arg0, build_real (TREE_TYPE (arg0), max));
5878 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5887 /* Subroutine of fold() that optimizes comparisons of a division by
5888 a nonzero integer constant against an integer constant, i.e.
5891 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5892 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5893 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5895 The function returns the constant folded tree if a simplification
5896 can be made, and NULL_TREE otherwise. */
5899 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5901 tree prod, tmp, hi, lo;
5902 tree arg00 = TREE_OPERAND (arg0, 0);
5903 tree arg01 = TREE_OPERAND (arg0, 1);
5904 unsigned HOST_WIDE_INT lpart;
5905 HOST_WIDE_INT hpart;
5908 /* We have to do this the hard way to detect unsigned overflow.
5909 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5910 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5911 TREE_INT_CST_HIGH (arg01),
5912 TREE_INT_CST_LOW (arg1),
5913 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5914 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5915 prod = force_fit_type (prod, -1, overflow, false);
5917 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5919 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5922 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5923 overflow = add_double (TREE_INT_CST_LOW (prod),
5924 TREE_INT_CST_HIGH (prod),
5925 TREE_INT_CST_LOW (tmp),
5926 TREE_INT_CST_HIGH (tmp),
5928 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5929 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5930 TREE_CONSTANT_OVERFLOW (prod));
5932 else if (tree_int_cst_sgn (arg01) >= 0)
5934 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5935 switch (tree_int_cst_sgn (arg1))
5938 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5943 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5948 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5958 /* A negative divisor reverses the relational operators. */
5959 code = swap_tree_comparison (code);
5961 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5962 switch (tree_int_cst_sgn (arg1))
5965 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5970 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5975 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5987 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5988 return omit_one_operand (type, integer_zero_node, arg00);
5989 if (TREE_OVERFLOW (hi))
5990 return fold_build2 (GE_EXPR, type, arg00, lo);
5991 if (TREE_OVERFLOW (lo))
5992 return fold_build2 (LE_EXPR, type, arg00, hi);
5993 return build_range_check (type, arg00, 1, lo, hi);
5996 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5997 return omit_one_operand (type, integer_one_node, arg00);
5998 if (TREE_OVERFLOW (hi))
5999 return fold_build2 (LT_EXPR, type, arg00, lo);
6000 if (TREE_OVERFLOW (lo))
6001 return fold_build2 (GT_EXPR, type, arg00, hi);
6002 return build_range_check (type, arg00, 0, lo, hi);
6005 if (TREE_OVERFLOW (lo))
6006 return omit_one_operand (type, integer_zero_node, arg00);
6007 return fold_build2 (LT_EXPR, type, arg00, lo);
6010 if (TREE_OVERFLOW (hi))
6011 return omit_one_operand (type, integer_one_node, arg00);
6012 return fold_build2 (LE_EXPR, type, arg00, hi);
6015 if (TREE_OVERFLOW (hi))
6016 return omit_one_operand (type, integer_zero_node, arg00);
6017 return fold_build2 (GT_EXPR, type, arg00, hi);
6020 if (TREE_OVERFLOW (lo))
6021 return omit_one_operand (type, integer_one_node, arg00);
6022 return fold_build2 (GE_EXPR, type, arg00, lo);
6032 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6033 equality/inequality test, then return a simplified form of the test
6034 using a sign testing. Otherwise return NULL. TYPE is the desired
6038 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6041 /* If this is testing a single bit, we can optimize the test. */
6042 if ((code == NE_EXPR || code == EQ_EXPR)
6043 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6044 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6046 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6047 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6048 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6050 if (arg00 != NULL_TREE
6051 /* This is only a win if casting to a signed type is cheap,
6052 i.e. when arg00's type is not a partial mode. */
6053 && TYPE_PRECISION (TREE_TYPE (arg00))
6054 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6056 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6057 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6058 result_type, fold_convert (stype, arg00),
6059 fold_convert (stype, integer_zero_node));
6066 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6067 equality/inequality test, then return a simplified form of
6068 the test using shifts and logical operations. Otherwise return
6069 NULL. TYPE is the desired result type. */
6072 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6075 /* If this is testing a single bit, we can optimize the test. */
6076 if ((code == NE_EXPR || code == EQ_EXPR)
6077 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6078 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6080 tree inner = TREE_OPERAND (arg0, 0);
6081 tree type = TREE_TYPE (arg0);
6082 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6083 enum machine_mode operand_mode = TYPE_MODE (type);
6085 tree signed_type, unsigned_type, intermediate_type;
6088 /* First, see if we can fold the single bit test into a sign-bit
6090 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6095 /* Otherwise we have (A & C) != 0 where C is a single bit,
6096 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6097 Similarly for (A & C) == 0. */
6099 /* If INNER is a right shift of a constant and it plus BITNUM does
6100 not overflow, adjust BITNUM and INNER. */
6101 if (TREE_CODE (inner) == RSHIFT_EXPR
6102 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6103 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6104 && bitnum < TYPE_PRECISION (type)
6105 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6106 bitnum - TYPE_PRECISION (type)))
6108 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6109 inner = TREE_OPERAND (inner, 0);
6112 /* If we are going to be able to omit the AND below, we must do our
6113 operations as unsigned. If we must use the AND, we have a choice.
6114 Normally unsigned is faster, but for some machines signed is. */
6115 #ifdef LOAD_EXTEND_OP
6116 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6117 && !flag_syntax_only) ? 0 : 1;
6122 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6123 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6124 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6125 inner = fold_convert (intermediate_type, inner);
6128 inner = build2 (RSHIFT_EXPR, intermediate_type,
6129 inner, size_int (bitnum));
6131 if (code == EQ_EXPR)
6132 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6133 inner, integer_one_node);
6135 /* Put the AND last so it can combine with more things. */
6136 inner = build2 (BIT_AND_EXPR, intermediate_type,
6137 inner, integer_one_node);
6139 /* Make sure to return the proper type. */
6140 inner = fold_convert (result_type, inner);
6147 /* Check whether we are allowed to reorder operands arg0 and arg1,
6148 such that the evaluation of arg1 occurs before arg0. */
6151 reorder_operands_p (tree arg0, tree arg1)
6153 if (! flag_evaluation_order)
6155 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6157 return ! TREE_SIDE_EFFECTS (arg0)
6158 && ! TREE_SIDE_EFFECTS (arg1);
6161 /* Test whether it is preferable two swap two operands, ARG0 and
6162 ARG1, for example because ARG0 is an integer constant and ARG1
6163 isn't. If REORDER is true, only recommend swapping if we can
6164 evaluate the operands in reverse order. */
6167 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6169 STRIP_SIGN_NOPS (arg0);
6170 STRIP_SIGN_NOPS (arg1);
6172 if (TREE_CODE (arg1) == INTEGER_CST)
6174 if (TREE_CODE (arg0) == INTEGER_CST)
6177 if (TREE_CODE (arg1) == REAL_CST)
6179 if (TREE_CODE (arg0) == REAL_CST)
6182 if (TREE_CODE (arg1) == COMPLEX_CST)
6184 if (TREE_CODE (arg0) == COMPLEX_CST)
6187 if (TREE_CONSTANT (arg1))
6189 if (TREE_CONSTANT (arg0))
6195 if (reorder && flag_evaluation_order
6196 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6204 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6205 for commutative and comparison operators. Ensuring a canonical
6206 form allows the optimizers to find additional redundancies without
6207 having to explicitly check for both orderings. */
6208 if (TREE_CODE (arg0) == SSA_NAME
6209 && TREE_CODE (arg1) == SSA_NAME
6210 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6216 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6217 ARG0 is extended to a wider type. */
6220 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6222 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6224 tree shorter_type, outer_type;
6228 if (arg0_unw == arg0)
6230 shorter_type = TREE_TYPE (arg0_unw);
6232 #ifdef HAVE_canonicalize_funcptr_for_compare
6233 /* Disable this optimization if we're casting a function pointer
6234 type on targets that require function pointer canonicalization. */
6235 if (HAVE_canonicalize_funcptr_for_compare
6236 && TREE_CODE (shorter_type) == POINTER_TYPE
6237 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6241 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6244 arg1_unw = get_unwidened (arg1, shorter_type);
6248 /* If possible, express the comparison in the shorter mode. */
6249 if ((code == EQ_EXPR || code == NE_EXPR
6250 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6251 && (TREE_TYPE (arg1_unw) == shorter_type
6252 || (TREE_CODE (arg1_unw) == INTEGER_CST
6253 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6254 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6255 && int_fits_type_p (arg1_unw, shorter_type))))
6256 return fold_build2 (code, type, arg0_unw,
6257 fold_convert (shorter_type, arg1_unw));
6259 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6262 /* If we are comparing with the integer that does not fit into the range
6263 of the shorter type, the result is known. */
6264 outer_type = TREE_TYPE (arg1_unw);
6265 min = lower_bound_in_type (outer_type, shorter_type);
6266 max = upper_bound_in_type (outer_type, shorter_type);
6268 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6270 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6277 return omit_one_operand (type, integer_zero_node, arg0);
6282 return omit_one_operand (type, integer_one_node, arg0);
6288 return omit_one_operand (type, integer_one_node, arg0);
6290 return omit_one_operand (type, integer_zero_node, arg0);
6295 return omit_one_operand (type, integer_zero_node, arg0);
6297 return omit_one_operand (type, integer_one_node, arg0);
6306 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6307 ARG0 just the signedness is changed. */
6310 fold_sign_changed_comparison (enum tree_code code, tree type,
6311 tree arg0, tree arg1)
6313 tree arg0_inner, tmp;
6314 tree inner_type, outer_type;
6316 if (TREE_CODE (arg0) != NOP_EXPR
6317 && TREE_CODE (arg0) != CONVERT_EXPR)
6320 outer_type = TREE_TYPE (arg0);
6321 arg0_inner = TREE_OPERAND (arg0, 0);
6322 inner_type = TREE_TYPE (arg0_inner);
6324 #ifdef HAVE_canonicalize_funcptr_for_compare
6325 /* Disable this optimization if we're casting a function pointer
6326 type on targets that require function pointer canonicalization. */
6327 if (HAVE_canonicalize_funcptr_for_compare
6328 && TREE_CODE (inner_type) == POINTER_TYPE
6329 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6333 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6336 if (TREE_CODE (arg1) != INTEGER_CST
6337 && !((TREE_CODE (arg1) == NOP_EXPR
6338 || TREE_CODE (arg1) == CONVERT_EXPR)
6339 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6342 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6347 if (TREE_CODE (arg1) == INTEGER_CST)
6349 tmp = build_int_cst_wide (inner_type,
6350 TREE_INT_CST_LOW (arg1),
6351 TREE_INT_CST_HIGH (arg1));
6352 arg1 = force_fit_type (tmp, 0,
6353 TREE_OVERFLOW (arg1),
6354 TREE_CONSTANT_OVERFLOW (arg1));
6357 arg1 = fold_convert (inner_type, arg1);
6359 return fold_build2 (code, type, arg0_inner, arg1);
6362 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6363 step of the array. Reconstructs s and delta in the case of s * delta
6364 being an integer constant (and thus already folded).
6365 ADDR is the address. MULT is the multiplicative expression.
6366 If the function succeeds, the new address expression is returned. Otherwise
6367 NULL_TREE is returned. */
6370 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6372 tree s, delta, step;
6373 tree ref = TREE_OPERAND (addr, 0), pref;
6377 /* Canonicalize op1 into a possibly non-constant delta
6378 and an INTEGER_CST s. */
6379 if (TREE_CODE (op1) == MULT_EXPR)
6381 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6386 if (TREE_CODE (arg0) == INTEGER_CST)
6391 else if (TREE_CODE (arg1) == INTEGER_CST)
6399 else if (TREE_CODE (op1) == INTEGER_CST)
6406 /* Simulate we are delta * 1. */
6408 s = integer_one_node;
6411 for (;; ref = TREE_OPERAND (ref, 0))
6413 if (TREE_CODE (ref) == ARRAY_REF)
6415 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6419 step = array_ref_element_size (ref);
6420 if (TREE_CODE (step) != INTEGER_CST)
6425 if (! tree_int_cst_equal (step, s))
6430 /* Try if delta is a multiple of step. */
6431 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6440 if (!handled_component_p (ref))
6444 /* We found the suitable array reference. So copy everything up to it,
6445 and replace the index. */
6447 pref = TREE_OPERAND (addr, 0);
6448 ret = copy_node (pref);
6453 pref = TREE_OPERAND (pref, 0);
6454 TREE_OPERAND (pos, 0) = copy_node (pref);
6455 pos = TREE_OPERAND (pos, 0);
6458 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6459 fold_convert (itype,
6460 TREE_OPERAND (pos, 1)),
6461 fold_convert (itype, delta));
6463 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6467 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6468 means A >= Y && A != MAX, but in this case we know that
6469 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6472 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6474 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6476 if (TREE_CODE (bound) == LT_EXPR)
6477 a = TREE_OPERAND (bound, 0);
6478 else if (TREE_CODE (bound) == GT_EXPR)
6479 a = TREE_OPERAND (bound, 1);
6483 typea = TREE_TYPE (a);
6484 if (!INTEGRAL_TYPE_P (typea)
6485 && !POINTER_TYPE_P (typea))
6488 if (TREE_CODE (ineq) == LT_EXPR)
6490 a1 = TREE_OPERAND (ineq, 1);
6491 y = TREE_OPERAND (ineq, 0);
6493 else if (TREE_CODE (ineq) == GT_EXPR)
6495 a1 = TREE_OPERAND (ineq, 0);
6496 y = TREE_OPERAND (ineq, 1);
6501 if (TREE_TYPE (a1) != typea)
6504 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6505 if (!integer_onep (diff))
6508 return fold_build2 (GE_EXPR, type, a, y);
6511 /* Fold a unary expression of code CODE and type TYPE with operand
6512 OP0. Return the folded expression if folding is successful.
6513 Otherwise, return NULL_TREE. */
6516 fold_unary (enum tree_code code, tree type, tree op0)
6520 enum tree_code_class kind = TREE_CODE_CLASS (code);
6522 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6523 && TREE_CODE_LENGTH (code) == 1);
6528 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6530 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6531 STRIP_SIGN_NOPS (arg0);
6535 /* Strip any conversions that don't change the mode. This
6536 is safe for every expression, except for a comparison
6537 expression because its signedness is derived from its
6540 Note that this is done as an internal manipulation within
6541 the constant folder, in order to find the simplest
6542 representation of the arguments so that their form can be
6543 studied. In any cases, the appropriate type conversions
6544 should be put back in the tree that will get out of the
6550 if (TREE_CODE_CLASS (code) == tcc_unary)
6552 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6553 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6554 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6555 else if (TREE_CODE (arg0) == COND_EXPR)
6557 tree arg01 = TREE_OPERAND (arg0, 1);
6558 tree arg02 = TREE_OPERAND (arg0, 2);
6559 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6560 arg01 = fold_build1 (code, type, arg01);
6561 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6562 arg02 = fold_build1 (code, type, arg02);
6563 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6566 /* If this was a conversion, and all we did was to move into
6567 inside the COND_EXPR, bring it back out. But leave it if
6568 it is a conversion from integer to integer and the
6569 result precision is no wider than a word since such a
6570 conversion is cheap and may be optimized away by combine,
6571 while it couldn't if it were outside the COND_EXPR. Then return
6572 so we don't get into an infinite recursion loop taking the
6573 conversion out and then back in. */
6575 if ((code == NOP_EXPR || code == CONVERT_EXPR
6576 || code == NON_LVALUE_EXPR)
6577 && TREE_CODE (tem) == COND_EXPR
6578 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6579 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6580 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6581 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6582 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6583 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6584 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6586 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6587 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6588 || flag_syntax_only))
6589 tem = build1 (code, type,
6591 TREE_TYPE (TREE_OPERAND
6592 (TREE_OPERAND (tem, 1), 0)),
6593 TREE_OPERAND (tem, 0),
6594 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6595 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6598 else if (COMPARISON_CLASS_P (arg0))
6600 if (TREE_CODE (type) == BOOLEAN_TYPE)
6602 arg0 = copy_node (arg0);
6603 TREE_TYPE (arg0) = type;
6606 else if (TREE_CODE (type) != INTEGER_TYPE)
6607 return fold_build3 (COND_EXPR, type, arg0,
6608 fold_build1 (code, type,
6610 fold_build1 (code, type,
6611 integer_zero_node));
6620 case FIX_TRUNC_EXPR:
6622 case FIX_FLOOR_EXPR:
6623 case FIX_ROUND_EXPR:
6624 if (TREE_TYPE (op0) == type)
6627 /* Handle cases of two conversions in a row. */
6628 if (TREE_CODE (op0) == NOP_EXPR
6629 || TREE_CODE (op0) == CONVERT_EXPR)
6631 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6632 tree inter_type = TREE_TYPE (op0);
6633 int inside_int = INTEGRAL_TYPE_P (inside_type);
6634 int inside_ptr = POINTER_TYPE_P (inside_type);
6635 int inside_float = FLOAT_TYPE_P (inside_type);
6636 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6637 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6638 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6639 int inter_int = INTEGRAL_TYPE_P (inter_type);
6640 int inter_ptr = POINTER_TYPE_P (inter_type);
6641 int inter_float = FLOAT_TYPE_P (inter_type);
6642 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6643 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6644 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6645 int final_int = INTEGRAL_TYPE_P (type);
6646 int final_ptr = POINTER_TYPE_P (type);
6647 int final_float = FLOAT_TYPE_P (type);
6648 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6649 unsigned int final_prec = TYPE_PRECISION (type);
6650 int final_unsignedp = TYPE_UNSIGNED (type);
6652 /* In addition to the cases of two conversions in a row
6653 handled below, if we are converting something to its own
6654 type via an object of identical or wider precision, neither
6655 conversion is needed. */
6656 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6657 && ((inter_int && final_int) || (inter_float && final_float))
6658 && inter_prec >= final_prec)
6659 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6661 /* Likewise, if the intermediate and final types are either both
6662 float or both integer, we don't need the middle conversion if
6663 it is wider than the final type and doesn't change the signedness
6664 (for integers). Avoid this if the final type is a pointer
6665 since then we sometimes need the inner conversion. Likewise if
6666 the outer has a precision not equal to the size of its mode. */
6667 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6668 || (inter_float && inside_float)
6669 || (inter_vec && inside_vec))
6670 && inter_prec >= inside_prec
6671 && (inter_float || inter_vec
6672 || inter_unsignedp == inside_unsignedp)
6673 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6674 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6676 && (! final_vec || inter_prec == inside_prec))
6677 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6679 /* If we have a sign-extension of a zero-extended value, we can
6680 replace that by a single zero-extension. */
6681 if (inside_int && inter_int && final_int
6682 && inside_prec < inter_prec && inter_prec < final_prec
6683 && inside_unsignedp && !inter_unsignedp)
6684 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6686 /* Two conversions in a row are not needed unless:
6687 - some conversion is floating-point (overstrict for now), or
6688 - some conversion is a vector (overstrict for now), or
6689 - the intermediate type is narrower than both initial and
6691 - the intermediate type and innermost type differ in signedness,
6692 and the outermost type is wider than the intermediate, or
6693 - the initial type is a pointer type and the precisions of the
6694 intermediate and final types differ, or
6695 - the final type is a pointer type and the precisions of the
6696 initial and intermediate types differ. */
6697 if (! inside_float && ! inter_float && ! final_float
6698 && ! inside_vec && ! inter_vec && ! final_vec
6699 && (inter_prec > inside_prec || inter_prec > final_prec)
6700 && ! (inside_int && inter_int
6701 && inter_unsignedp != inside_unsignedp
6702 && inter_prec < final_prec)
6703 && ((inter_unsignedp && inter_prec > inside_prec)
6704 == (final_unsignedp && final_prec > inter_prec))
6705 && ! (inside_ptr && inter_prec != final_prec)
6706 && ! (final_ptr && inside_prec != inter_prec)
6707 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6708 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6710 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6713 if (TREE_CODE (op0) == MODIFY_EXPR
6714 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6715 /* Detect assigning a bitfield. */
6716 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6717 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6719 /* Don't leave an assignment inside a conversion
6720 unless assigning a bitfield. */
6721 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6722 /* First do the assignment, then return converted constant. */
6723 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6724 TREE_NO_WARNING (tem) = 1;
6725 TREE_USED (tem) = 1;
6729 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6730 constants (if x has signed type, the sign bit cannot be set
6731 in c). This folds extension into the BIT_AND_EXPR. */
6732 if (INTEGRAL_TYPE_P (type)
6733 && TREE_CODE (type) != BOOLEAN_TYPE
6734 && TREE_CODE (op0) == BIT_AND_EXPR
6735 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6738 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6741 if (TYPE_UNSIGNED (TREE_TYPE (and))
6742 || (TYPE_PRECISION (type)
6743 <= TYPE_PRECISION (TREE_TYPE (and))))
6745 else if (TYPE_PRECISION (TREE_TYPE (and1))
6746 <= HOST_BITS_PER_WIDE_INT
6747 && host_integerp (and1, 1))
6749 unsigned HOST_WIDE_INT cst;
6751 cst = tree_low_cst (and1, 1);
6752 cst &= (HOST_WIDE_INT) -1
6753 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6754 change = (cst == 0);
6755 #ifdef LOAD_EXTEND_OP
6757 && !flag_syntax_only
6758 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6761 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6762 and0 = fold_convert (uns, and0);
6763 and1 = fold_convert (uns, and1);
6769 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6770 TREE_INT_CST_HIGH (and1));
6771 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6772 TREE_CONSTANT_OVERFLOW (and1));
6773 return fold_build2 (BIT_AND_EXPR, type,
6774 fold_convert (type, and0), tem);
6778 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6779 T2 being pointers to types of the same size. */
6780 if (POINTER_TYPE_P (type)
6781 && BINARY_CLASS_P (arg0)
6782 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6783 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6785 tree arg00 = TREE_OPERAND (arg0, 0);
6787 tree t1 = TREE_TYPE (arg00);
6788 tree tt0 = TREE_TYPE (t0);
6789 tree tt1 = TREE_TYPE (t1);
6790 tree s0 = TYPE_SIZE (tt0);
6791 tree s1 = TYPE_SIZE (tt1);
6793 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6794 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6795 TREE_OPERAND (arg0, 1));
6798 tem = fold_convert_const (code, type, arg0);
6799 return tem ? tem : NULL_TREE;
6801 case VIEW_CONVERT_EXPR:
6802 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6803 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6807 if (negate_expr_p (arg0))
6808 return fold_convert (type, negate_expr (arg0));
6809 /* Convert - (~A) to A + 1. */
6810 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6811 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6812 build_int_cst (type, 1));
6816 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6817 return fold_abs_const (arg0, type);
6818 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6819 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6820 /* Convert fabs((double)float) into (double)fabsf(float). */
6821 else if (TREE_CODE (arg0) == NOP_EXPR
6822 && TREE_CODE (type) == REAL_TYPE)
6824 tree targ0 = strip_float_extensions (arg0);
6826 return fold_convert (type, fold_build1 (ABS_EXPR,
6830 else if (tree_expr_nonnegative_p (arg0))
6833 /* Strip sign ops from argument. */
6834 if (TREE_CODE (type) == REAL_TYPE)
6836 tem = fold_strip_sign_ops (arg0);
6838 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6843 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6844 return fold_convert (type, arg0);
6845 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6846 return build2 (COMPLEX_EXPR, type,
6847 TREE_OPERAND (arg0, 0),
6848 negate_expr (TREE_OPERAND (arg0, 1)));
6849 else if (TREE_CODE (arg0) == COMPLEX_CST)
6850 return build_complex (type, TREE_REALPART (arg0),
6851 negate_expr (TREE_IMAGPART (arg0)));
6852 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6853 return fold_build2 (TREE_CODE (arg0), type,
6854 fold_build1 (CONJ_EXPR, type,
6855 TREE_OPERAND (arg0, 0)),
6856 fold_build1 (CONJ_EXPR, type,
6857 TREE_OPERAND (arg0, 1)));
6858 else if (TREE_CODE (arg0) == CONJ_EXPR)
6859 return TREE_OPERAND (arg0, 0);
6863 if (TREE_CODE (arg0) == INTEGER_CST)
6864 return fold_not_const (arg0, type);
6865 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6866 return TREE_OPERAND (arg0, 0);
6867 /* Convert ~ (-A) to A - 1. */
6868 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6869 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6870 build_int_cst (type, 1));
6871 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6872 else if (INTEGRAL_TYPE_P (type)
6873 && ((TREE_CODE (arg0) == MINUS_EXPR
6874 && integer_onep (TREE_OPERAND (arg0, 1)))
6875 || (TREE_CODE (arg0) == PLUS_EXPR
6876 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6877 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6878 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6879 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6880 && (tem = fold_unary (BIT_NOT_EXPR, type,
6882 TREE_OPERAND (arg0, 0)))))
6883 return fold_build2 (BIT_XOR_EXPR, type, tem,
6884 fold_convert (type, TREE_OPERAND (arg0, 1)));
6885 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6886 && (tem = fold_unary (BIT_NOT_EXPR, type,
6888 TREE_OPERAND (arg0, 1)))))
6889 return fold_build2 (BIT_XOR_EXPR, type,
6890 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6894 case TRUTH_NOT_EXPR:
6895 /* The argument to invert_truthvalue must have Boolean type. */
6896 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6897 arg0 = fold_convert (boolean_type_node, arg0);
6899 /* Note that the operand of this must be an int
6900 and its values must be 0 or 1.
6901 ("true" is a fixed value perhaps depending on the language,
6902 but we don't handle values other than 1 correctly yet.) */
6903 tem = invert_truthvalue (arg0);
6904 /* Avoid infinite recursion. */
6905 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6907 return fold_convert (type, tem);
6910 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6912 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6913 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6914 TREE_OPERAND (arg0, 1));
6915 else if (TREE_CODE (arg0) == COMPLEX_CST)
6916 return TREE_REALPART (arg0);
6917 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6918 return fold_build2 (TREE_CODE (arg0), type,
6919 fold_build1 (REALPART_EXPR, type,
6920 TREE_OPERAND (arg0, 0)),
6921 fold_build1 (REALPART_EXPR, type,
6922 TREE_OPERAND (arg0, 1)));
6926 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6927 return fold_convert (type, integer_zero_node);
6928 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6929 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6930 TREE_OPERAND (arg0, 0));
6931 else if (TREE_CODE (arg0) == COMPLEX_CST)
6932 return TREE_IMAGPART (arg0);
6933 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6934 return fold_build2 (TREE_CODE (arg0), type,
6935 fold_build1 (IMAGPART_EXPR, type,
6936 TREE_OPERAND (arg0, 0)),
6937 fold_build1 (IMAGPART_EXPR, type,
6938 TREE_OPERAND (arg0, 1)));
6943 } /* switch (code) */
6946 /* Fold a binary expression of code CODE and type TYPE with operands
6947 OP0 and OP1. Return the folded expression if folding is
6948 successful. Otherwise, return NULL_TREE. */
6951 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
6953 tree t1 = NULL_TREE;
6955 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6956 enum tree_code_class kind = TREE_CODE_CLASS (code);
6958 /* WINS will be nonzero when the switch is done
6959 if all operands are constant. */
6962 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6963 && TREE_CODE_LENGTH (code) == 2);
6972 /* Strip any conversions that don't change the mode. This is
6973 safe for every expression, except for a comparison expression
6974 because its signedness is derived from its operands. So, in
6975 the latter case, only strip conversions that don't change the
6978 Note that this is done as an internal manipulation within the
6979 constant folder, in order to find the simplest representation
6980 of the arguments so that their form can be studied. In any
6981 cases, the appropriate type conversions should be put back in
6982 the tree that will get out of the constant folder. */
6983 if (kind == tcc_comparison)
6984 STRIP_SIGN_NOPS (arg0);
6988 if (TREE_CODE (arg0) == COMPLEX_CST)
6989 subop = TREE_REALPART (arg0);
6993 if (TREE_CODE (subop) != INTEGER_CST
6994 && TREE_CODE (subop) != REAL_CST)
6995 /* Note that TREE_CONSTANT isn't enough:
6996 static var addresses are constant but we can't
6997 do arithmetic on them. */
7005 /* Strip any conversions that don't change the mode. This is
7006 safe for every expression, except for a comparison expression
7007 because its signedness is derived from its operands. So, in
7008 the latter case, only strip conversions that don't change the
7011 Note that this is done as an internal manipulation within the
7012 constant folder, in order to find the simplest representation
7013 of the arguments so that their form can be studied. In any
7014 cases, the appropriate type conversions should be put back in
7015 the tree that will get out of the constant folder. */
7016 if (kind == tcc_comparison)
7017 STRIP_SIGN_NOPS (arg1);
7021 if (TREE_CODE (arg1) == COMPLEX_CST)
7022 subop = TREE_REALPART (arg1);
7026 if (TREE_CODE (subop) != INTEGER_CST
7027 && TREE_CODE (subop) != REAL_CST)
7028 /* Note that TREE_CONSTANT isn't enough:
7029 static var addresses are constant but we can't
7030 do arithmetic on them. */
7034 /* If this is a commutative operation, and ARG0 is a constant, move it
7035 to ARG1 to reduce the number of tests below. */
7036 if (commutative_tree_code (code)
7037 && tree_swap_operands_p (arg0, arg1, true))
7038 return fold_build2 (code, type, op1, op0);
7040 /* Now WINS is set as described above,
7041 ARG0 is the first operand of EXPR,
7042 and ARG1 is the second operand (if it has more than one operand).
7044 First check for cases where an arithmetic operation is applied to a
7045 compound, conditional, or comparison operation. Push the arithmetic
7046 operation inside the compound or conditional to see if any folding
7047 can then be done. Convert comparison to conditional for this purpose.
7048 The also optimizes non-constant cases that used to be done in
7051 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7052 one of the operands is a comparison and the other is a comparison, a
7053 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7054 code below would make the expression more complex. Change it to a
7055 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7056 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7058 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7059 || code == EQ_EXPR || code == NE_EXPR)
7060 && ((truth_value_p (TREE_CODE (arg0))
7061 && (truth_value_p (TREE_CODE (arg1))
7062 || (TREE_CODE (arg1) == BIT_AND_EXPR
7063 && integer_onep (TREE_OPERAND (arg1, 1)))))
7064 || (truth_value_p (TREE_CODE (arg1))
7065 && (truth_value_p (TREE_CODE (arg0))
7066 || (TREE_CODE (arg0) == BIT_AND_EXPR
7067 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7069 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7070 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7073 fold_convert (boolean_type_node, arg0),
7074 fold_convert (boolean_type_node, arg1));
7076 if (code == EQ_EXPR)
7077 tem = invert_truthvalue (tem);
7079 return fold_convert (type, tem);
7082 if (TREE_CODE_CLASS (code) == tcc_comparison
7083 && TREE_CODE (arg0) == COMPOUND_EXPR)
7084 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7085 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7086 else if (TREE_CODE_CLASS (code) == tcc_comparison
7087 && TREE_CODE (arg1) == COMPOUND_EXPR)
7088 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7089 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7090 else if (TREE_CODE_CLASS (code) == tcc_binary
7091 || TREE_CODE_CLASS (code) == tcc_comparison)
7093 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7094 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7095 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7097 if (TREE_CODE (arg1) == COMPOUND_EXPR
7098 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7099 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7100 fold_build2 (code, type,
7101 arg0, TREE_OPERAND (arg1, 1)));
7103 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7105 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7107 /*cond_first_p=*/1);
7108 if (tem != NULL_TREE)
7112 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7114 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7116 /*cond_first_p=*/0);
7117 if (tem != NULL_TREE)
7125 /* A + (-B) -> A - B */
7126 if (TREE_CODE (arg1) == NEGATE_EXPR)
7127 return fold_build2 (MINUS_EXPR, type,
7128 fold_convert (type, arg0),
7129 fold_convert (type, TREE_OPERAND (arg1, 0)));
7130 /* (-A) + B -> B - A */
7131 if (TREE_CODE (arg0) == NEGATE_EXPR
7132 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7133 return fold_build2 (MINUS_EXPR, type,
7134 fold_convert (type, arg1),
7135 fold_convert (type, TREE_OPERAND (arg0, 0)));
7136 /* Convert ~A + 1 to -A. */
7137 if (INTEGRAL_TYPE_P (type)
7138 && TREE_CODE (arg0) == BIT_NOT_EXPR
7139 && integer_onep (arg1))
7140 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7142 if (! FLOAT_TYPE_P (type))
7144 if (integer_zerop (arg1))
7145 return non_lvalue (fold_convert (type, arg0));
7147 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7148 with a constant, and the two constants have no bits in common,
7149 we should treat this as a BIT_IOR_EXPR since this may produce more
7151 if (TREE_CODE (arg0) == BIT_AND_EXPR
7152 && TREE_CODE (arg1) == BIT_AND_EXPR
7153 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7154 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7155 && integer_zerop (const_binop (BIT_AND_EXPR,
7156 TREE_OPERAND (arg0, 1),
7157 TREE_OPERAND (arg1, 1), 0)))
7159 code = BIT_IOR_EXPR;
7163 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7164 (plus (plus (mult) (mult)) (foo)) so that we can
7165 take advantage of the factoring cases below. */
7166 if (((TREE_CODE (arg0) == PLUS_EXPR
7167 || TREE_CODE (arg0) == MINUS_EXPR)
7168 && TREE_CODE (arg1) == MULT_EXPR)
7169 || ((TREE_CODE (arg1) == PLUS_EXPR
7170 || TREE_CODE (arg1) == MINUS_EXPR)
7171 && TREE_CODE (arg0) == MULT_EXPR))
7173 tree parg0, parg1, parg, marg;
7174 enum tree_code pcode;
7176 if (TREE_CODE (arg1) == MULT_EXPR)
7177 parg = arg0, marg = arg1;
7179 parg = arg1, marg = arg0;
7180 pcode = TREE_CODE (parg);
7181 parg0 = TREE_OPERAND (parg, 0);
7182 parg1 = TREE_OPERAND (parg, 1);
7186 if (TREE_CODE (parg0) == MULT_EXPR
7187 && TREE_CODE (parg1) != MULT_EXPR)
7188 return fold_build2 (pcode, type,
7189 fold_build2 (PLUS_EXPR, type,
7190 fold_convert (type, parg0),
7191 fold_convert (type, marg)),
7192 fold_convert (type, parg1));
7193 if (TREE_CODE (parg0) != MULT_EXPR
7194 && TREE_CODE (parg1) == MULT_EXPR)
7195 return fold_build2 (PLUS_EXPR, type,
7196 fold_convert (type, parg0),
7197 fold_build2 (pcode, type,
7198 fold_convert (type, marg),
7203 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7205 tree arg00, arg01, arg10, arg11;
7206 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7208 /* (A * C) + (B * C) -> (A+B) * C.
7209 We are most concerned about the case where C is a constant,
7210 but other combinations show up during loop reduction. Since
7211 it is not difficult, try all four possibilities. */
7213 arg00 = TREE_OPERAND (arg0, 0);
7214 arg01 = TREE_OPERAND (arg0, 1);
7215 arg10 = TREE_OPERAND (arg1, 0);
7216 arg11 = TREE_OPERAND (arg1, 1);
7219 if (operand_equal_p (arg01, arg11, 0))
7220 same = arg01, alt0 = arg00, alt1 = arg10;
7221 else if (operand_equal_p (arg00, arg10, 0))
7222 same = arg00, alt0 = arg01, alt1 = arg11;
7223 else if (operand_equal_p (arg00, arg11, 0))
7224 same = arg00, alt0 = arg01, alt1 = arg10;
7225 else if (operand_equal_p (arg01, arg10, 0))
7226 same = arg01, alt0 = arg00, alt1 = arg11;
7228 /* No identical multiplicands; see if we can find a common
7229 power-of-two factor in non-power-of-two multiplies. This
7230 can help in multi-dimensional array access. */
7231 else if (TREE_CODE (arg01) == INTEGER_CST
7232 && TREE_CODE (arg11) == INTEGER_CST
7233 && TREE_INT_CST_HIGH (arg01) == 0
7234 && TREE_INT_CST_HIGH (arg11) == 0)
7236 HOST_WIDE_INT int01, int11, tmp;
7237 int01 = TREE_INT_CST_LOW (arg01);
7238 int11 = TREE_INT_CST_LOW (arg11);
7240 /* Move min of absolute values to int11. */
7241 if ((int01 >= 0 ? int01 : -int01)
7242 < (int11 >= 0 ? int11 : -int11))
7244 tmp = int01, int01 = int11, int11 = tmp;
7245 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7246 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7249 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7251 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7252 build_int_cst (NULL_TREE,
7260 return fold_build2 (MULT_EXPR, type,
7261 fold_build2 (PLUS_EXPR, type,
7262 fold_convert (type, alt0),
7263 fold_convert (type, alt1)),
7264 fold_convert (type, same));
7267 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7268 of the array. Loop optimizer sometimes produce this type of
7270 if (TREE_CODE (arg0) == ADDR_EXPR)
7272 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7274 return fold_convert (type, fold (tem));
7276 else if (TREE_CODE (arg1) == ADDR_EXPR)
7278 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7280 return fold_convert (type, fold (tem));
7285 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7286 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7287 return non_lvalue (fold_convert (type, arg0));
7289 /* Likewise if the operands are reversed. */
7290 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7291 return non_lvalue (fold_convert (type, arg1));
7293 /* Convert X + -C into X - C. */
7294 if (TREE_CODE (arg1) == REAL_CST
7295 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7297 tem = fold_negate_const (arg1, type);
7298 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7299 return fold_build2 (MINUS_EXPR, type,
7300 fold_convert (type, arg0),
7301 fold_convert (type, tem));
7304 if (flag_unsafe_math_optimizations
7305 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7306 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7307 && (tem = distribute_real_division (code, type, arg0, arg1)))
7310 /* Convert x+x into x*2.0. */
7311 if (operand_equal_p (arg0, arg1, 0)
7312 && SCALAR_FLOAT_TYPE_P (type))
7313 return fold_build2 (MULT_EXPR, type, arg0,
7314 build_real (type, dconst2));
7316 /* Convert x*c+x into x*(c+1). */
7317 if (flag_unsafe_math_optimizations
7318 && TREE_CODE (arg0) == MULT_EXPR
7319 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7320 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7321 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7325 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7326 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7327 return fold_build2 (MULT_EXPR, type, arg1,
7328 build_real (type, c));
7331 /* Convert x+x*c into x*(c+1). */
7332 if (flag_unsafe_math_optimizations
7333 && TREE_CODE (arg1) == MULT_EXPR
7334 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7335 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7336 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7340 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7341 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7342 return fold_build2 (MULT_EXPR, type, arg0,
7343 build_real (type, c));
7346 /* Convert x*c1+x*c2 into x*(c1+c2). */
7347 if (flag_unsafe_math_optimizations
7348 && TREE_CODE (arg0) == MULT_EXPR
7349 && TREE_CODE (arg1) == MULT_EXPR
7350 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7351 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7353 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7354 && operand_equal_p (TREE_OPERAND (arg0, 0),
7355 TREE_OPERAND (arg1, 0), 0))
7357 REAL_VALUE_TYPE c1, c2;
7359 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7360 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7361 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7362 return fold_build2 (MULT_EXPR, type,
7363 TREE_OPERAND (arg0, 0),
7364 build_real (type, c1));
7366 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7367 if (flag_unsafe_math_optimizations
7368 && TREE_CODE (arg1) == PLUS_EXPR
7369 && TREE_CODE (arg0) != MULT_EXPR)
7371 tree tree10 = TREE_OPERAND (arg1, 0);
7372 tree tree11 = TREE_OPERAND (arg1, 1);
7373 if (TREE_CODE (tree11) == MULT_EXPR
7374 && TREE_CODE (tree10) == MULT_EXPR)
7377 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7378 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7381 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7382 if (flag_unsafe_math_optimizations
7383 && TREE_CODE (arg0) == PLUS_EXPR
7384 && TREE_CODE (arg1) != MULT_EXPR)
7386 tree tree00 = TREE_OPERAND (arg0, 0);
7387 tree tree01 = TREE_OPERAND (arg0, 1);
7388 if (TREE_CODE (tree01) == MULT_EXPR
7389 && TREE_CODE (tree00) == MULT_EXPR)
7392 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7393 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7399 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7400 is a rotate of A by C1 bits. */
7401 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7402 is a rotate of A by B bits. */
7404 enum tree_code code0, code1;
7405 code0 = TREE_CODE (arg0);
7406 code1 = TREE_CODE (arg1);
7407 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7408 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7409 && operand_equal_p (TREE_OPERAND (arg0, 0),
7410 TREE_OPERAND (arg1, 0), 0)
7411 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7413 tree tree01, tree11;
7414 enum tree_code code01, code11;
7416 tree01 = TREE_OPERAND (arg0, 1);
7417 tree11 = TREE_OPERAND (arg1, 1);
7418 STRIP_NOPS (tree01);
7419 STRIP_NOPS (tree11);
7420 code01 = TREE_CODE (tree01);
7421 code11 = TREE_CODE (tree11);
7422 if (code01 == INTEGER_CST
7423 && code11 == INTEGER_CST
7424 && TREE_INT_CST_HIGH (tree01) == 0
7425 && TREE_INT_CST_HIGH (tree11) == 0
7426 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7427 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7428 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7429 code0 == LSHIFT_EXPR ? tree01 : tree11);
7430 else if (code11 == MINUS_EXPR)
7432 tree tree110, tree111;
7433 tree110 = TREE_OPERAND (tree11, 0);
7434 tree111 = TREE_OPERAND (tree11, 1);
7435 STRIP_NOPS (tree110);
7436 STRIP_NOPS (tree111);
7437 if (TREE_CODE (tree110) == INTEGER_CST
7438 && 0 == compare_tree_int (tree110,
7440 (TREE_TYPE (TREE_OPERAND
7442 && operand_equal_p (tree01, tree111, 0))
7443 return build2 ((code0 == LSHIFT_EXPR
7446 type, TREE_OPERAND (arg0, 0), tree01);
7448 else if (code01 == MINUS_EXPR)
7450 tree tree010, tree011;
7451 tree010 = TREE_OPERAND (tree01, 0);
7452 tree011 = TREE_OPERAND (tree01, 1);
7453 STRIP_NOPS (tree010);
7454 STRIP_NOPS (tree011);
7455 if (TREE_CODE (tree010) == INTEGER_CST
7456 && 0 == compare_tree_int (tree010,
7458 (TREE_TYPE (TREE_OPERAND
7460 && operand_equal_p (tree11, tree011, 0))
7461 return build2 ((code0 != LSHIFT_EXPR
7464 type, TREE_OPERAND (arg0, 0), tree11);
7470 /* In most languages, can't associate operations on floats through
7471 parentheses. Rather than remember where the parentheses were, we
7472 don't associate floats at all, unless the user has specified
7473 -funsafe-math-optimizations. */
7476 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7478 tree var0, con0, lit0, minus_lit0;
7479 tree var1, con1, lit1, minus_lit1;
7481 /* Split both trees into variables, constants, and literals. Then
7482 associate each group together, the constants with literals,
7483 then the result with variables. This increases the chances of
7484 literals being recombined later and of generating relocatable
7485 expressions for the sum of a constant and literal. */
7486 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7487 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7488 code == MINUS_EXPR);
7490 /* Only do something if we found more than two objects. Otherwise,
7491 nothing has changed and we risk infinite recursion. */
7492 if (2 < ((var0 != 0) + (var1 != 0)
7493 + (con0 != 0) + (con1 != 0)
7494 + (lit0 != 0) + (lit1 != 0)
7495 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7497 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7498 if (code == MINUS_EXPR)
7501 var0 = associate_trees (var0, var1, code, type);
7502 con0 = associate_trees (con0, con1, code, type);
7503 lit0 = associate_trees (lit0, lit1, code, type);
7504 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7506 /* Preserve the MINUS_EXPR if the negative part of the literal is
7507 greater than the positive part. Otherwise, the multiplicative
7508 folding code (i.e extract_muldiv) may be fooled in case
7509 unsigned constants are subtracted, like in the following
7510 example: ((X*2 + 4) - 8U)/2. */
7511 if (minus_lit0 && lit0)
7513 if (TREE_CODE (lit0) == INTEGER_CST
7514 && TREE_CODE (minus_lit0) == INTEGER_CST
7515 && tree_int_cst_lt (lit0, minus_lit0))
7517 minus_lit0 = associate_trees (minus_lit0, lit0,
7523 lit0 = associate_trees (lit0, minus_lit0,
7531 return fold_convert (type,
7532 associate_trees (var0, minus_lit0,
7536 con0 = associate_trees (con0, minus_lit0,
7538 return fold_convert (type,
7539 associate_trees (var0, con0,
7544 con0 = associate_trees (con0, lit0, code, type);
7545 return fold_convert (type, associate_trees (var0, con0,
7552 t1 = const_binop (code, arg0, arg1, 0);
7553 if (t1 != NULL_TREE)
7555 /* The return value should always have
7556 the same type as the original expression. */
7557 if (TREE_TYPE (t1) != type)
7558 t1 = fold_convert (type, t1);
7565 /* A - (-B) -> A + B */
7566 if (TREE_CODE (arg1) == NEGATE_EXPR)
7567 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7568 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7569 if (TREE_CODE (arg0) == NEGATE_EXPR
7570 && (FLOAT_TYPE_P (type)
7571 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7572 && negate_expr_p (arg1)
7573 && reorder_operands_p (arg0, arg1))
7574 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7575 TREE_OPERAND (arg0, 0));
7576 /* Convert -A - 1 to ~A. */
7577 if (INTEGRAL_TYPE_P (type)
7578 && TREE_CODE (arg0) == NEGATE_EXPR
7579 && integer_onep (arg1))
7580 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7582 /* Convert -1 - A to ~A. */
7583 if (INTEGRAL_TYPE_P (type)
7584 && integer_all_onesp (arg0))
7585 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7587 if (! FLOAT_TYPE_P (type))
7589 if (! wins && integer_zerop (arg0))
7590 return negate_expr (fold_convert (type, arg1));
7591 if (integer_zerop (arg1))
7592 return non_lvalue (fold_convert (type, arg0));
7594 /* Fold A - (A & B) into ~B & A. */
7595 if (!TREE_SIDE_EFFECTS (arg0)
7596 && TREE_CODE (arg1) == BIT_AND_EXPR)
7598 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7599 return fold_build2 (BIT_AND_EXPR, type,
7600 fold_build1 (BIT_NOT_EXPR, type,
7601 TREE_OPERAND (arg1, 0)),
7603 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7604 return fold_build2 (BIT_AND_EXPR, type,
7605 fold_build1 (BIT_NOT_EXPR, type,
7606 TREE_OPERAND (arg1, 1)),
7610 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7611 any power of 2 minus 1. */
7612 if (TREE_CODE (arg0) == BIT_AND_EXPR
7613 && TREE_CODE (arg1) == BIT_AND_EXPR
7614 && operand_equal_p (TREE_OPERAND (arg0, 0),
7615 TREE_OPERAND (arg1, 0), 0))
7617 tree mask0 = TREE_OPERAND (arg0, 1);
7618 tree mask1 = TREE_OPERAND (arg1, 1);
7619 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7621 if (operand_equal_p (tem, mask1, 0))
7623 tem = fold_build2 (BIT_XOR_EXPR, type,
7624 TREE_OPERAND (arg0, 0), mask1);
7625 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7630 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7631 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7632 return non_lvalue (fold_convert (type, arg0));
7634 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7635 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7636 (-ARG1 + ARG0) reduces to -ARG1. */
7637 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7638 return negate_expr (fold_convert (type, arg1));
7640 /* Fold &x - &x. This can happen from &x.foo - &x.
7641 This is unsafe for certain floats even in non-IEEE formats.
7642 In IEEE, it is unsafe because it does wrong for NaNs.
7643 Also note that operand_equal_p is always false if an operand
7646 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7647 && operand_equal_p (arg0, arg1, 0))
7648 return fold_convert (type, integer_zero_node);
7650 /* A - B -> A + (-B) if B is easily negatable. */
7651 if (!wins && negate_expr_p (arg1)
7652 && ((FLOAT_TYPE_P (type)
7653 /* Avoid this transformation if B is a positive REAL_CST. */
7654 && (TREE_CODE (arg1) != REAL_CST
7655 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7656 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7657 return fold_build2 (PLUS_EXPR, type,
7658 fold_convert (type, arg0),
7659 fold_convert (type, negate_expr (arg1)));
7661 /* Try folding difference of addresses. */
7665 if ((TREE_CODE (arg0) == ADDR_EXPR
7666 || TREE_CODE (arg1) == ADDR_EXPR)
7667 && ptr_difference_const (arg0, arg1, &diff))
7668 return build_int_cst_type (type, diff);
7671 /* Fold &a[i] - &a[j] to i-j. */
7672 if (TREE_CODE (arg0) == ADDR_EXPR
7673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7674 && TREE_CODE (arg1) == ADDR_EXPR
7675 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7677 tree aref0 = TREE_OPERAND (arg0, 0);
7678 tree aref1 = TREE_OPERAND (arg1, 0);
7679 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7680 TREE_OPERAND (aref1, 0), 0))
7682 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7683 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7684 tree esz = array_ref_element_size (aref0);
7685 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7686 return fold_build2 (MULT_EXPR, type, diff,
7687 fold_convert (type, esz));
7692 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7693 of the array. Loop optimizer sometimes produce this type of
7695 if (TREE_CODE (arg0) == ADDR_EXPR)
7697 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7699 return fold_convert (type, fold (tem));
7702 if (flag_unsafe_math_optimizations
7703 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7704 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7705 && (tem = distribute_real_division (code, type, arg0, arg1)))
7708 if (TREE_CODE (arg0) == MULT_EXPR
7709 && TREE_CODE (arg1) == MULT_EXPR
7710 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7712 /* (A * C) - (B * C) -> (A-B) * C. */
7713 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7714 TREE_OPERAND (arg1, 1), 0))
7715 return fold_build2 (MULT_EXPR, type,
7716 fold_build2 (MINUS_EXPR, type,
7717 TREE_OPERAND (arg0, 0),
7718 TREE_OPERAND (arg1, 0)),
7719 TREE_OPERAND (arg0, 1));
7720 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7721 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7722 TREE_OPERAND (arg1, 0), 0))
7723 return fold_build2 (MULT_EXPR, type,
7724 TREE_OPERAND (arg0, 0),
7725 fold_build2 (MINUS_EXPR, type,
7726 TREE_OPERAND (arg0, 1),
7727 TREE_OPERAND (arg1, 1)));
7733 /* (-A) * (-B) -> A * B */
7734 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7735 return fold_build2 (MULT_EXPR, type,
7736 TREE_OPERAND (arg0, 0),
7737 negate_expr (arg1));
7738 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7739 return fold_build2 (MULT_EXPR, type,
7741 TREE_OPERAND (arg1, 0));
7743 if (! FLOAT_TYPE_P (type))
7745 if (integer_zerop (arg1))
7746 return omit_one_operand (type, arg1, arg0);
7747 if (integer_onep (arg1))
7748 return non_lvalue (fold_convert (type, arg0));
7749 /* Transform x * -1 into -x. */
7750 if (integer_all_onesp (arg1))
7751 return fold_convert (type, negate_expr (arg0));
7753 /* (a * (1 << b)) is (a << b) */
7754 if (TREE_CODE (arg1) == LSHIFT_EXPR
7755 && integer_onep (TREE_OPERAND (arg1, 0)))
7756 return fold_build2 (LSHIFT_EXPR, type, arg0,
7757 TREE_OPERAND (arg1, 1));
7758 if (TREE_CODE (arg0) == LSHIFT_EXPR
7759 && integer_onep (TREE_OPERAND (arg0, 0)))
7760 return fold_build2 (LSHIFT_EXPR, type, arg1,
7761 TREE_OPERAND (arg0, 1));
7763 if (TREE_CODE (arg1) == INTEGER_CST
7764 && 0 != (tem = extract_muldiv (op0,
7765 fold_convert (type, arg1),
7767 return fold_convert (type, tem);
7772 /* Maybe fold x * 0 to 0. The expressions aren't the same
7773 when x is NaN, since x * 0 is also NaN. Nor are they the
7774 same in modes with signed zeros, since multiplying a
7775 negative value by 0 gives -0, not +0. */
7776 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7777 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7778 && real_zerop (arg1))
7779 return omit_one_operand (type, arg1, arg0);
7780 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7781 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7782 && real_onep (arg1))
7783 return non_lvalue (fold_convert (type, arg0));
7785 /* Transform x * -1.0 into -x. */
7786 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7787 && real_minus_onep (arg1))
7788 return fold_convert (type, negate_expr (arg0));
7790 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7791 if (flag_unsafe_math_optimizations
7792 && TREE_CODE (arg0) == RDIV_EXPR
7793 && TREE_CODE (arg1) == REAL_CST
7794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7796 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7799 return fold_build2 (RDIV_EXPR, type, tem,
7800 TREE_OPERAND (arg0, 1));
7803 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7804 if (operand_equal_p (arg0, arg1, 0))
7806 tree tem = fold_strip_sign_ops (arg0);
7807 if (tem != NULL_TREE)
7809 tem = fold_convert (type, tem);
7810 return fold_build2 (MULT_EXPR, type, tem, tem);
7814 if (flag_unsafe_math_optimizations)
7816 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7817 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7819 /* Optimizations of root(...)*root(...). */
7820 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7822 tree rootfn, arg, arglist;
7823 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7824 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7826 /* Optimize sqrt(x)*sqrt(x) as x. */
7827 if (BUILTIN_SQRT_P (fcode0)
7828 && operand_equal_p (arg00, arg10, 0)
7829 && ! HONOR_SNANS (TYPE_MODE (type)))
7832 /* Optimize root(x)*root(y) as root(x*y). */
7833 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7834 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7835 arglist = build_tree_list (NULL_TREE, arg);
7836 return build_function_call_expr (rootfn, arglist);
7839 /* Optimize expN(x)*expN(y) as expN(x+y). */
7840 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7842 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7843 tree arg = fold_build2 (PLUS_EXPR, type,
7844 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7845 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7846 tree arglist = build_tree_list (NULL_TREE, arg);
7847 return build_function_call_expr (expfn, arglist);
7850 /* Optimizations of pow(...)*pow(...). */
7851 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7852 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7853 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7855 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7856 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7858 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7859 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7862 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7863 if (operand_equal_p (arg01, arg11, 0))
7865 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7866 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7867 tree arglist = tree_cons (NULL_TREE, arg,
7868 build_tree_list (NULL_TREE,
7870 return build_function_call_expr (powfn, arglist);
7873 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7874 if (operand_equal_p (arg00, arg10, 0))
7876 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7877 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7878 tree arglist = tree_cons (NULL_TREE, arg00,
7879 build_tree_list (NULL_TREE,
7881 return build_function_call_expr (powfn, arglist);
7885 /* Optimize tan(x)*cos(x) as sin(x). */
7886 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7887 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7888 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7889 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7890 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7891 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7892 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7893 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7895 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7897 if (sinfn != NULL_TREE)
7898 return build_function_call_expr (sinfn,
7899 TREE_OPERAND (arg0, 1));
7902 /* Optimize x*pow(x,c) as pow(x,c+1). */
7903 if (fcode1 == BUILT_IN_POW
7904 || fcode1 == BUILT_IN_POWF
7905 || fcode1 == BUILT_IN_POWL)
7907 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7908 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7910 if (TREE_CODE (arg11) == REAL_CST
7911 && ! TREE_CONSTANT_OVERFLOW (arg11)
7912 && operand_equal_p (arg0, arg10, 0))
7914 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7918 c = TREE_REAL_CST (arg11);
7919 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7920 arg = build_real (type, c);
7921 arglist = build_tree_list (NULL_TREE, arg);
7922 arglist = tree_cons (NULL_TREE, arg0, arglist);
7923 return build_function_call_expr (powfn, arglist);
7927 /* Optimize pow(x,c)*x as pow(x,c+1). */
7928 if (fcode0 == BUILT_IN_POW
7929 || fcode0 == BUILT_IN_POWF
7930 || fcode0 == BUILT_IN_POWL)
7932 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7933 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7935 if (TREE_CODE (arg01) == REAL_CST
7936 && ! TREE_CONSTANT_OVERFLOW (arg01)
7937 && operand_equal_p (arg1, arg00, 0))
7939 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7943 c = TREE_REAL_CST (arg01);
7944 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7945 arg = build_real (type, c);
7946 arglist = build_tree_list (NULL_TREE, arg);
7947 arglist = tree_cons (NULL_TREE, arg1, arglist);
7948 return build_function_call_expr (powfn, arglist);
7952 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7954 && operand_equal_p (arg0, arg1, 0))
7956 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7960 tree arg = build_real (type, dconst2);
7961 tree arglist = build_tree_list (NULL_TREE, arg);
7962 arglist = tree_cons (NULL_TREE, arg0, arglist);
7963 return build_function_call_expr (powfn, arglist);
7972 if (integer_all_onesp (arg1))
7973 return omit_one_operand (type, arg1, arg0);
7974 if (integer_zerop (arg1))
7975 return non_lvalue (fold_convert (type, arg0));
7976 if (operand_equal_p (arg0, arg1, 0))
7977 return non_lvalue (fold_convert (type, arg0));
7980 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7981 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7983 t1 = build_int_cst (type, -1);
7984 t1 = force_fit_type (t1, 0, false, false);
7985 return omit_one_operand (type, t1, arg1);
7989 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7990 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7992 t1 = build_int_cst (type, -1);
7993 t1 = force_fit_type (t1, 0, false, false);
7994 return omit_one_operand (type, t1, arg0);
7997 t1 = distribute_bit_expr (code, type, arg0, arg1);
7998 if (t1 != NULL_TREE)
8001 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8003 This results in more efficient code for machines without a NAND
8004 instruction. Combine will canonicalize to the first form
8005 which will allow use of NAND instructions provided by the
8006 backend if they exist. */
8007 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8008 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8010 return fold_build1 (BIT_NOT_EXPR, type,
8011 build2 (BIT_AND_EXPR, type,
8012 TREE_OPERAND (arg0, 0),
8013 TREE_OPERAND (arg1, 0)));
8016 /* See if this can be simplified into a rotate first. If that
8017 is unsuccessful continue in the association code. */
8021 if (integer_zerop (arg1))
8022 return non_lvalue (fold_convert (type, arg0));
8023 if (integer_all_onesp (arg1))
8024 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8025 if (operand_equal_p (arg0, arg1, 0))
8026 return omit_one_operand (type, integer_zero_node, arg0);
8029 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8030 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8032 t1 = build_int_cst (type, -1);
8033 t1 = force_fit_type (t1, 0, false, false);
8034 return omit_one_operand (type, t1, arg1);
8038 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8039 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8041 t1 = build_int_cst (type, -1);
8042 t1 = force_fit_type (t1, 0, false, false);
8043 return omit_one_operand (type, t1, arg0);
8046 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8047 with a constant, and the two constants have no bits in common,
8048 we should treat this as a BIT_IOR_EXPR since this may produce more
8050 if (TREE_CODE (arg0) == BIT_AND_EXPR
8051 && TREE_CODE (arg1) == BIT_AND_EXPR
8052 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8053 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8054 && integer_zerop (const_binop (BIT_AND_EXPR,
8055 TREE_OPERAND (arg0, 1),
8056 TREE_OPERAND (arg1, 1), 0)))
8058 code = BIT_IOR_EXPR;
8062 /* Convert ~X ^ ~Y to X ^ Y. */
8063 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8064 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8065 return fold_build2 (code, type,
8066 fold_convert (type, TREE_OPERAND (arg0, 0)),
8067 fold_convert (type, TREE_OPERAND (arg1, 0)));
8069 /* See if this can be simplified into a rotate first. If that
8070 is unsuccessful continue in the association code. */
8074 if (integer_all_onesp (arg1))
8075 return non_lvalue (fold_convert (type, arg0));
8076 if (integer_zerop (arg1))
8077 return omit_one_operand (type, arg1, arg0);
8078 if (operand_equal_p (arg0, arg1, 0))
8079 return non_lvalue (fold_convert (type, arg0));
8081 /* ~X & X is always zero. */
8082 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8083 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8084 return omit_one_operand (type, integer_zero_node, arg1);
8086 /* X & ~X is always zero. */
8087 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8088 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8089 return omit_one_operand (type, integer_zero_node, arg0);
8091 t1 = distribute_bit_expr (code, type, arg0, arg1);
8092 if (t1 != NULL_TREE)
8094 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8095 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8096 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8099 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8101 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8102 && (~TREE_INT_CST_LOW (arg1)
8103 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8104 return fold_convert (type, TREE_OPERAND (arg0, 0));
8107 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8109 This results in more efficient code for machines without a NOR
8110 instruction. Combine will canonicalize to the first form
8111 which will allow use of NOR instructions provided by the
8112 backend if they exist. */
8113 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8114 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8116 return fold_build1 (BIT_NOT_EXPR, type,
8117 build2 (BIT_IOR_EXPR, type,
8118 TREE_OPERAND (arg0, 0),
8119 TREE_OPERAND (arg1, 0)));
8125 /* Don't touch a floating-point divide by zero unless the mode
8126 of the constant can represent infinity. */
8127 if (TREE_CODE (arg1) == REAL_CST
8128 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8129 && real_zerop (arg1))
8132 /* (-A) / (-B) -> A / B */
8133 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8134 return fold_build2 (RDIV_EXPR, type,
8135 TREE_OPERAND (arg0, 0),
8136 negate_expr (arg1));
8137 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8138 return fold_build2 (RDIV_EXPR, type,
8140 TREE_OPERAND (arg1, 0));
8142 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8143 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8144 && real_onep (arg1))
8145 return non_lvalue (fold_convert (type, arg0));
8147 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8148 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8149 && real_minus_onep (arg1))
8150 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8152 /* If ARG1 is a constant, we can convert this to a multiply by the
8153 reciprocal. This does not have the same rounding properties,
8154 so only do this if -funsafe-math-optimizations. We can actually
8155 always safely do it if ARG1 is a power of two, but it's hard to
8156 tell if it is or not in a portable manner. */
8157 if (TREE_CODE (arg1) == REAL_CST)
8159 if (flag_unsafe_math_optimizations
8160 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8162 return fold_build2 (MULT_EXPR, type, arg0, tem);
8163 /* Find the reciprocal if optimizing and the result is exact. */
8167 r = TREE_REAL_CST (arg1);
8168 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8170 tem = build_real (type, r);
8171 return fold_build2 (MULT_EXPR, type,
8172 fold_convert (type, arg0), tem);
8176 /* Convert A/B/C to A/(B*C). */
8177 if (flag_unsafe_math_optimizations
8178 && TREE_CODE (arg0) == RDIV_EXPR)
8179 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8180 fold_build2 (MULT_EXPR, type,
8181 TREE_OPERAND (arg0, 1), arg1));
8183 /* Convert A/(B/C) to (A/B)*C. */
8184 if (flag_unsafe_math_optimizations
8185 && TREE_CODE (arg1) == RDIV_EXPR)
8186 return fold_build2 (MULT_EXPR, type,
8187 fold_build2 (RDIV_EXPR, type, arg0,
8188 TREE_OPERAND (arg1, 0)),
8189 TREE_OPERAND (arg1, 1));
8191 /* Convert C1/(X*C2) into (C1/C2)/X. */
8192 if (flag_unsafe_math_optimizations
8193 && TREE_CODE (arg1) == MULT_EXPR
8194 && TREE_CODE (arg0) == REAL_CST
8195 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8197 tree tem = const_binop (RDIV_EXPR, arg0,
8198 TREE_OPERAND (arg1, 1), 0);
8200 return fold_build2 (RDIV_EXPR, type, tem,
8201 TREE_OPERAND (arg1, 0));
8204 if (flag_unsafe_math_optimizations)
8206 enum built_in_function fcode = builtin_mathfn_code (arg1);
8207 /* Optimize x/expN(y) into x*expN(-y). */
8208 if (BUILTIN_EXPONENT_P (fcode))
8210 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8211 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8212 tree arglist = build_tree_list (NULL_TREE,
8213 fold_convert (type, arg));
8214 arg1 = build_function_call_expr (expfn, arglist);
8215 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8218 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8219 if (fcode == BUILT_IN_POW
8220 || fcode == BUILT_IN_POWF
8221 || fcode == BUILT_IN_POWL)
8223 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8224 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8225 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8226 tree neg11 = fold_convert (type, negate_expr (arg11));
8227 tree arglist = tree_cons(NULL_TREE, arg10,
8228 build_tree_list (NULL_TREE, neg11));
8229 arg1 = build_function_call_expr (powfn, arglist);
8230 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8234 if (flag_unsafe_math_optimizations)
8236 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8237 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8239 /* Optimize sin(x)/cos(x) as tan(x). */
8240 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8241 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8242 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8243 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8244 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8246 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8248 if (tanfn != NULL_TREE)
8249 return build_function_call_expr (tanfn,
8250 TREE_OPERAND (arg0, 1));
8253 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8254 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8255 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8256 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8257 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8258 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8260 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8262 if (tanfn != NULL_TREE)
8264 tree tmp = TREE_OPERAND (arg0, 1);
8265 tmp = build_function_call_expr (tanfn, tmp);
8266 return fold_build2 (RDIV_EXPR, type,
8267 build_real (type, dconst1), tmp);
8271 /* Optimize pow(x,c)/x as pow(x,c-1). */
8272 if (fcode0 == BUILT_IN_POW
8273 || fcode0 == BUILT_IN_POWF
8274 || fcode0 == BUILT_IN_POWL)
8276 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8277 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8278 if (TREE_CODE (arg01) == REAL_CST
8279 && ! TREE_CONSTANT_OVERFLOW (arg01)
8280 && operand_equal_p (arg1, arg00, 0))
8282 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8286 c = TREE_REAL_CST (arg01);
8287 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8288 arg = build_real (type, c);
8289 arglist = build_tree_list (NULL_TREE, arg);
8290 arglist = tree_cons (NULL_TREE, arg1, arglist);
8291 return build_function_call_expr (powfn, arglist);
8297 case TRUNC_DIV_EXPR:
8298 case ROUND_DIV_EXPR:
8299 case FLOOR_DIV_EXPR:
8301 case EXACT_DIV_EXPR:
8302 if (integer_onep (arg1))
8303 return non_lvalue (fold_convert (type, arg0));
8304 if (integer_zerop (arg1))
8307 if (!TYPE_UNSIGNED (type)
8308 && TREE_CODE (arg1) == INTEGER_CST
8309 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8310 && TREE_INT_CST_HIGH (arg1) == -1)
8311 return fold_convert (type, negate_expr (arg0));
8313 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8314 operation, EXACT_DIV_EXPR.
8316 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8317 At one time others generated faster code, it's not clear if they do
8318 after the last round to changes to the DIV code in expmed.c. */
8319 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8320 && multiple_of_p (type, arg0, arg1))
8321 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8323 if (TREE_CODE (arg1) == INTEGER_CST
8324 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8325 return fold_convert (type, tem);
8330 case FLOOR_MOD_EXPR:
8331 case ROUND_MOD_EXPR:
8332 case TRUNC_MOD_EXPR:
8333 /* X % 1 is always zero, but be sure to preserve any side
8335 if (integer_onep (arg1))
8336 return omit_one_operand (type, integer_zero_node, arg0);
8338 /* X % 0, return X % 0 unchanged so that we can get the
8339 proper warnings and errors. */
8340 if (integer_zerop (arg1))
8343 /* 0 % X is always zero, but be sure to preserve any side
8344 effects in X. Place this after checking for X == 0. */
8345 if (integer_zerop (arg0))
8346 return omit_one_operand (type, integer_zero_node, arg1);
8348 /* X % -1 is zero. */
8349 if (!TYPE_UNSIGNED (type)
8350 && TREE_CODE (arg1) == INTEGER_CST
8351 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8352 && TREE_INT_CST_HIGH (arg1) == -1)
8353 return omit_one_operand (type, integer_zero_node, arg0);
8355 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8356 i.e. "X % C" into "X & C2", if X and C are positive. */
8357 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8358 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8359 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8361 unsigned HOST_WIDE_INT high, low;
8365 l = tree_log2 (arg1);
8366 if (l >= HOST_BITS_PER_WIDE_INT)
8368 high = ((unsigned HOST_WIDE_INT) 1
8369 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8375 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8378 mask = build_int_cst_wide (type, low, high);
8379 return fold_build2 (BIT_AND_EXPR, type,
8380 fold_convert (type, arg0), mask);
8383 /* X % -C is the same as X % C. */
8384 if (code == TRUNC_MOD_EXPR
8385 && !TYPE_UNSIGNED (type)
8386 && TREE_CODE (arg1) == INTEGER_CST
8387 && !TREE_CONSTANT_OVERFLOW (arg1)
8388 && TREE_INT_CST_HIGH (arg1) < 0
8390 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8391 && !sign_bit_p (arg1, arg1))
8392 return fold_build2 (code, type, fold_convert (type, arg0),
8393 fold_convert (type, negate_expr (arg1)));
8395 /* X % -Y is the same as X % Y. */
8396 if (code == TRUNC_MOD_EXPR
8397 && !TYPE_UNSIGNED (type)
8398 && TREE_CODE (arg1) == NEGATE_EXPR
8400 return fold_build2 (code, type, fold_convert (type, arg0),
8401 fold_convert (type, TREE_OPERAND (arg1, 0)));
8403 if (TREE_CODE (arg1) == INTEGER_CST
8404 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8405 return fold_convert (type, tem);
8411 if (integer_all_onesp (arg0))
8412 return omit_one_operand (type, arg0, arg1);
8416 /* Optimize -1 >> x for arithmetic right shifts. */
8417 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8418 return omit_one_operand (type, arg0, arg1);
8419 /* ... fall through ... */
8423 if (integer_zerop (arg1))
8424 return non_lvalue (fold_convert (type, arg0));
8425 if (integer_zerop (arg0))
8426 return omit_one_operand (type, arg0, arg1);
8428 /* Since negative shift count is not well-defined,
8429 don't try to compute it in the compiler. */
8430 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8433 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8434 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8435 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8436 && host_integerp (TREE_OPERAND (arg0, 1), false)
8437 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8439 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8440 + TREE_INT_CST_LOW (arg1));
8442 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8443 being well defined. */
8444 if (low >= TYPE_PRECISION (type))
8446 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8447 low = low % TYPE_PRECISION (type);
8448 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8449 return build_int_cst (type, 0);
8451 low = TYPE_PRECISION (type) - 1;
8454 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8455 build_int_cst (type, low));
8458 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8459 into x & ((unsigned)-1 >> c) for unsigned types. */
8460 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8461 || (TYPE_UNSIGNED (type)
8462 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8463 && host_integerp (arg1, false)
8464 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8465 && host_integerp (TREE_OPERAND (arg0, 1), false)
8466 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8468 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8469 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8475 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8477 lshift = build_int_cst (type, -1);
8478 lshift = int_const_binop (code, lshift, arg1, 0);
8480 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8484 /* Rewrite an LROTATE_EXPR by a constant into an
8485 RROTATE_EXPR by a new constant. */
8486 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8488 tree tem = build_int_cst (NULL_TREE,
8489 GET_MODE_BITSIZE (TYPE_MODE (type)));
8490 tem = fold_convert (TREE_TYPE (arg1), tem);
8491 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8492 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8495 /* If we have a rotate of a bit operation with the rotate count and
8496 the second operand of the bit operation both constant,
8497 permute the two operations. */
8498 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8499 && (TREE_CODE (arg0) == BIT_AND_EXPR
8500 || TREE_CODE (arg0) == BIT_IOR_EXPR
8501 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8503 return fold_build2 (TREE_CODE (arg0), type,
8504 fold_build2 (code, type,
8505 TREE_OPERAND (arg0, 0), arg1),
8506 fold_build2 (code, type,
8507 TREE_OPERAND (arg0, 1), arg1));
8509 /* Two consecutive rotates adding up to the width of the mode can
8511 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8512 && TREE_CODE (arg0) == RROTATE_EXPR
8513 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8514 && TREE_INT_CST_HIGH (arg1) == 0
8515 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8516 && ((TREE_INT_CST_LOW (arg1)
8517 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8518 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8519 return TREE_OPERAND (arg0, 0);
8524 if (operand_equal_p (arg0, arg1, 0))
8525 return omit_one_operand (type, arg0, arg1);
8526 if (INTEGRAL_TYPE_P (type)
8527 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8528 return omit_one_operand (type, arg1, arg0);
8532 if (operand_equal_p (arg0, arg1, 0))
8533 return omit_one_operand (type, arg0, arg1);
8534 if (INTEGRAL_TYPE_P (type)
8535 && TYPE_MAX_VALUE (type)
8536 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8537 return omit_one_operand (type, arg1, arg0);
8540 case TRUTH_ANDIF_EXPR:
8541 /* Note that the operands of this must be ints
8542 and their values must be 0 or 1.
8543 ("true" is a fixed value perhaps depending on the language.) */
8544 /* If first arg is constant zero, return it. */
8545 if (integer_zerop (arg0))
8546 return fold_convert (type, arg0);
8547 case TRUTH_AND_EXPR:
8548 /* If either arg is constant true, drop it. */
8549 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8550 return non_lvalue (fold_convert (type, arg1));
8551 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8552 /* Preserve sequence points. */
8553 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8554 return non_lvalue (fold_convert (type, arg0));
8555 /* If second arg is constant zero, result is zero, but first arg
8556 must be evaluated. */
8557 if (integer_zerop (arg1))
8558 return omit_one_operand (type, arg1, arg0);
8559 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8560 case will be handled here. */
8561 if (integer_zerop (arg0))
8562 return omit_one_operand (type, arg0, arg1);
8564 /* !X && X is always false. */
8565 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8566 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8567 return omit_one_operand (type, integer_zero_node, arg1);
8568 /* X && !X is always false. */
8569 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8570 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8571 return omit_one_operand (type, integer_zero_node, arg0);
8573 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8574 means A >= Y && A != MAX, but in this case we know that
8577 if (!TREE_SIDE_EFFECTS (arg0)
8578 && !TREE_SIDE_EFFECTS (arg1))
8580 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8581 if (tem && !operand_equal_p (tem, arg0, 0))
8582 return fold_build2 (code, type, tem, arg1);
8584 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8585 if (tem && !operand_equal_p (tem, arg1, 0))
8586 return fold_build2 (code, type, arg0, tem);
8590 /* We only do these simplifications if we are optimizing. */
8594 /* Check for things like (A || B) && (A || C). We can convert this
8595 to A || (B && C). Note that either operator can be any of the four
8596 truth and/or operations and the transformation will still be
8597 valid. Also note that we only care about order for the
8598 ANDIF and ORIF operators. If B contains side effects, this
8599 might change the truth-value of A. */
8600 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8601 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8602 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8603 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8604 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8605 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8607 tree a00 = TREE_OPERAND (arg0, 0);
8608 tree a01 = TREE_OPERAND (arg0, 1);
8609 tree a10 = TREE_OPERAND (arg1, 0);
8610 tree a11 = TREE_OPERAND (arg1, 1);
8611 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8612 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8613 && (code == TRUTH_AND_EXPR
8614 || code == TRUTH_OR_EXPR));
8616 if (operand_equal_p (a00, a10, 0))
8617 return fold_build2 (TREE_CODE (arg0), type, a00,
8618 fold_build2 (code, type, a01, a11));
8619 else if (commutative && operand_equal_p (a00, a11, 0))
8620 return fold_build2 (TREE_CODE (arg0), type, a00,
8621 fold_build2 (code, type, a01, a10));
8622 else if (commutative && operand_equal_p (a01, a10, 0))
8623 return fold_build2 (TREE_CODE (arg0), type, a01,
8624 fold_build2 (code, type, a00, a11));
8626 /* This case if tricky because we must either have commutative
8627 operators or else A10 must not have side-effects. */
8629 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8630 && operand_equal_p (a01, a11, 0))
8631 return fold_build2 (TREE_CODE (arg0), type,
8632 fold_build2 (code, type, a00, a10),
8636 /* See if we can build a range comparison. */
8637 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8640 /* Check for the possibility of merging component references. If our
8641 lhs is another similar operation, try to merge its rhs with our
8642 rhs. Then try to merge our lhs and rhs. */
8643 if (TREE_CODE (arg0) == code
8644 && 0 != (tem = fold_truthop (code, type,
8645 TREE_OPERAND (arg0, 1), arg1)))
8646 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8648 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8653 case TRUTH_ORIF_EXPR:
8654 /* Note that the operands of this must be ints
8655 and their values must be 0 or true.
8656 ("true" is a fixed value perhaps depending on the language.) */
8657 /* If first arg is constant true, return it. */
8658 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8659 return fold_convert (type, arg0);
8661 /* If either arg is constant zero, drop it. */
8662 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8663 return non_lvalue (fold_convert (type, arg1));
8664 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8665 /* Preserve sequence points. */
8666 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8667 return non_lvalue (fold_convert (type, arg0));
8668 /* If second arg is constant true, result is true, but we must
8669 evaluate first arg. */
8670 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8671 return omit_one_operand (type, arg1, arg0);
8672 /* Likewise for first arg, but note this only occurs here for
8674 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8675 return omit_one_operand (type, arg0, arg1);
8677 /* !X || X is always true. */
8678 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8679 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8680 return omit_one_operand (type, integer_one_node, arg1);
8681 /* X || !X is always true. */
8682 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8683 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8684 return omit_one_operand (type, integer_one_node, arg0);
8688 case TRUTH_XOR_EXPR:
8689 /* If the second arg is constant zero, drop it. */
8690 if (integer_zerop (arg1))
8691 return non_lvalue (fold_convert (type, arg0));
8692 /* If the second arg is constant true, this is a logical inversion. */
8693 if (integer_onep (arg1))
8695 /* Only call invert_truthvalue if operand is a truth value. */
8696 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8697 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8699 tem = invert_truthvalue (arg0);
8700 return non_lvalue (fold_convert (type, tem));
8702 /* Identical arguments cancel to zero. */
8703 if (operand_equal_p (arg0, arg1, 0))
8704 return omit_one_operand (type, integer_zero_node, arg0);
8706 /* !X ^ X is always true. */
8707 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8708 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8709 return omit_one_operand (type, integer_one_node, arg1);
8711 /* X ^ !X is always true. */
8712 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8713 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8714 return omit_one_operand (type, integer_one_node, arg0);
8724 /* If one arg is a real or integer constant, put it last. */
8725 if (tree_swap_operands_p (arg0, arg1, true))
8726 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8728 /* bool_var != 0 becomes bool_var. */
8729 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8731 return non_lvalue (fold_convert (type, arg0));
8733 /* bool_var == 1 becomes bool_var. */
8734 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8736 return non_lvalue (fold_convert (type, arg0));
8738 /* If this is an equality comparison of the address of a non-weak
8739 object against zero, then we know the result. */
8740 if ((code == EQ_EXPR || code == NE_EXPR)
8741 && TREE_CODE (arg0) == ADDR_EXPR
8742 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8743 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8744 && integer_zerop (arg1))
8745 return constant_boolean_node (code != EQ_EXPR, type);
8747 /* If this is an equality comparison of the address of two non-weak,
8748 unaliased symbols neither of which are extern (since we do not
8749 have access to attributes for externs), then we know the result. */
8750 if ((code == EQ_EXPR || code == NE_EXPR)
8751 && TREE_CODE (arg0) == ADDR_EXPR
8752 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8753 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8754 && ! lookup_attribute ("alias",
8755 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8756 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8757 && TREE_CODE (arg1) == ADDR_EXPR
8758 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8759 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8760 && ! lookup_attribute ("alias",
8761 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8762 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8764 /* We know that we're looking at the address of two
8765 non-weak, unaliased, static _DECL nodes.
8767 It is both wasteful and incorrect to call operand_equal_p
8768 to compare the two ADDR_EXPR nodes. It is wasteful in that
8769 all we need to do is test pointer equality for the arguments
8770 to the two ADDR_EXPR nodes. It is incorrect to use
8771 operand_equal_p as that function is NOT equivalent to a
8772 C equality test. It can in fact return false for two
8773 objects which would test as equal using the C equality
8775 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8776 return constant_boolean_node (equal
8777 ? code == EQ_EXPR : code != EQ_EXPR,
8781 /* If this is a comparison of two exprs that look like an
8782 ARRAY_REF of the same object, then we can fold this to a
8783 comparison of the two offsets. */
8784 if (TREE_CODE_CLASS (code) == tcc_comparison)
8786 tree base0, offset0, base1, offset1;
8788 if (extract_array_ref (arg0, &base0, &offset0)
8789 && extract_array_ref (arg1, &base1, &offset1)
8790 && operand_equal_p (base0, base1, 0))
8792 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8793 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8794 offset0 = NULL_TREE;
8795 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8796 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8797 offset1 = NULL_TREE;
8798 if (offset0 == NULL_TREE
8799 && offset1 == NULL_TREE)
8801 offset0 = integer_zero_node;
8802 offset1 = integer_zero_node;
8804 else if (offset0 == NULL_TREE)
8805 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8806 else if (offset1 == NULL_TREE)
8807 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8809 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8810 return fold_build2 (code, type, offset0, offset1);
8814 /* Transform comparisons of the form X +- C CMP X. */
8815 if ((code != EQ_EXPR && code != NE_EXPR)
8816 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8817 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8818 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8819 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8820 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8821 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8822 && !(flag_wrapv || flag_trapv))))
8824 tree arg01 = TREE_OPERAND (arg0, 1);
8825 enum tree_code code0 = TREE_CODE (arg0);
8828 if (TREE_CODE (arg01) == REAL_CST)
8829 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8831 is_positive = tree_int_cst_sgn (arg01);
8833 /* (X - c) > X becomes false. */
8835 && ((code0 == MINUS_EXPR && is_positive >= 0)
8836 || (code0 == PLUS_EXPR && is_positive <= 0)))
8837 return constant_boolean_node (0, type);
8839 /* Likewise (X + c) < X becomes false. */
8841 && ((code0 == PLUS_EXPR && is_positive >= 0)
8842 || (code0 == MINUS_EXPR && is_positive <= 0)))
8843 return constant_boolean_node (0, type);
8845 /* Convert (X - c) <= X to true. */
8846 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8848 && ((code0 == MINUS_EXPR && is_positive >= 0)
8849 || (code0 == PLUS_EXPR && is_positive <= 0)))
8850 return constant_boolean_node (1, type);
8852 /* Convert (X + c) >= X to true. */
8853 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8855 && ((code0 == PLUS_EXPR && is_positive >= 0)
8856 || (code0 == MINUS_EXPR && is_positive <= 0)))
8857 return constant_boolean_node (1, type);
8859 if (TREE_CODE (arg01) == INTEGER_CST)
8861 /* Convert X + c > X and X - c < X to true for integers. */
8863 && ((code0 == PLUS_EXPR && is_positive > 0)
8864 || (code0 == MINUS_EXPR && is_positive < 0)))
8865 return constant_boolean_node (1, type);
8868 && ((code0 == MINUS_EXPR && is_positive > 0)
8869 || (code0 == PLUS_EXPR && is_positive < 0)))
8870 return constant_boolean_node (1, type);
8872 /* Convert X + c <= X and X - c >= X to false for integers. */
8874 && ((code0 == PLUS_EXPR && is_positive > 0)
8875 || (code0 == MINUS_EXPR && is_positive < 0)))
8876 return constant_boolean_node (0, type);
8879 && ((code0 == MINUS_EXPR && is_positive > 0)
8880 || (code0 == PLUS_EXPR && is_positive < 0)))
8881 return constant_boolean_node (0, type);
8885 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8886 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8887 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8888 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8889 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8890 && !(flag_wrapv || flag_trapv))
8891 && (TREE_CODE (arg1) == INTEGER_CST
8892 && !TREE_OVERFLOW (arg1)))
8894 tree const1 = TREE_OPERAND (arg0, 1);
8896 tree variable = TREE_OPERAND (arg0, 0);
8899 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8901 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8902 TREE_TYPE (arg1), const2, const1);
8903 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8904 && (TREE_CODE (lhs) != INTEGER_CST
8905 || !TREE_OVERFLOW (lhs)))
8906 return fold_build2 (code, type, variable, lhs);
8909 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8911 tree targ0 = strip_float_extensions (arg0);
8912 tree targ1 = strip_float_extensions (arg1);
8913 tree newtype = TREE_TYPE (targ0);
8915 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8916 newtype = TREE_TYPE (targ1);
8918 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8919 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8920 return fold_build2 (code, type, fold_convert (newtype, targ0),
8921 fold_convert (newtype, targ1));
8923 /* (-a) CMP (-b) -> b CMP a */
8924 if (TREE_CODE (arg0) == NEGATE_EXPR
8925 && TREE_CODE (arg1) == NEGATE_EXPR)
8926 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8927 TREE_OPERAND (arg0, 0));
8929 if (TREE_CODE (arg1) == REAL_CST)
8931 REAL_VALUE_TYPE cst;
8932 cst = TREE_REAL_CST (arg1);
8934 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8935 if (TREE_CODE (arg0) == NEGATE_EXPR)
8937 fold_build2 (swap_tree_comparison (code), type,
8938 TREE_OPERAND (arg0, 0),
8939 build_real (TREE_TYPE (arg1),
8940 REAL_VALUE_NEGATE (cst)));
8942 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8943 /* a CMP (-0) -> a CMP 0 */
8944 if (REAL_VALUE_MINUS_ZERO (cst))
8945 return fold_build2 (code, type, arg0,
8946 build_real (TREE_TYPE (arg1), dconst0));
8948 /* x != NaN is always true, other ops are always false. */
8949 if (REAL_VALUE_ISNAN (cst)
8950 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8952 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8953 return omit_one_operand (type, tem, arg0);
8956 /* Fold comparisons against infinity. */
8957 if (REAL_VALUE_ISINF (cst))
8959 tem = fold_inf_compare (code, type, arg0, arg1);
8960 if (tem != NULL_TREE)
8965 /* If this is a comparison of a real constant with a PLUS_EXPR
8966 or a MINUS_EXPR of a real constant, we can convert it into a
8967 comparison with a revised real constant as long as no overflow
8968 occurs when unsafe_math_optimizations are enabled. */
8969 if (flag_unsafe_math_optimizations
8970 && TREE_CODE (arg1) == REAL_CST
8971 && (TREE_CODE (arg0) == PLUS_EXPR
8972 || TREE_CODE (arg0) == MINUS_EXPR)
8973 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8974 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8975 ? MINUS_EXPR : PLUS_EXPR,
8976 arg1, TREE_OPERAND (arg0, 1), 0))
8977 && ! TREE_CONSTANT_OVERFLOW (tem))
8978 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8980 /* Likewise, we can simplify a comparison of a real constant with
8981 a MINUS_EXPR whose first operand is also a real constant, i.e.
8982 (c1 - x) < c2 becomes x > c1-c2. */
8983 if (flag_unsafe_math_optimizations
8984 && TREE_CODE (arg1) == REAL_CST
8985 && TREE_CODE (arg0) == MINUS_EXPR
8986 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8987 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8989 && ! TREE_CONSTANT_OVERFLOW (tem))
8990 return fold_build2 (swap_tree_comparison (code), type,
8991 TREE_OPERAND (arg0, 1), tem);
8993 /* Fold comparisons against built-in math functions. */
8994 if (TREE_CODE (arg1) == REAL_CST
8995 && flag_unsafe_math_optimizations
8996 && ! flag_errno_math)
8998 enum built_in_function fcode = builtin_mathfn_code (arg0);
9000 if (fcode != END_BUILTINS)
9002 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9003 if (tem != NULL_TREE)
9009 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9010 if (TREE_CONSTANT (arg1)
9011 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9012 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9013 /* This optimization is invalid for ordered comparisons
9014 if CONST+INCR overflows or if foo+incr might overflow.
9015 This optimization is invalid for floating point due to rounding.
9016 For pointer types we assume overflow doesn't happen. */
9017 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9018 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9019 && (code == EQ_EXPR || code == NE_EXPR))))
9021 tree varop, newconst;
9023 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9025 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9026 arg1, TREE_OPERAND (arg0, 1));
9027 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9028 TREE_OPERAND (arg0, 0),
9029 TREE_OPERAND (arg0, 1));
9033 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9034 arg1, TREE_OPERAND (arg0, 1));
9035 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9036 TREE_OPERAND (arg0, 0),
9037 TREE_OPERAND (arg0, 1));
9041 /* If VAROP is a reference to a bitfield, we must mask
9042 the constant by the width of the field. */
9043 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9044 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9045 && host_integerp (DECL_SIZE (TREE_OPERAND
9046 (TREE_OPERAND (varop, 0), 1)), 1))
9048 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9049 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9050 tree folded_compare, shift;
9052 /* First check whether the comparison would come out
9053 always the same. If we don't do that we would
9054 change the meaning with the masking. */
9055 folded_compare = fold_build2 (code, type,
9056 TREE_OPERAND (varop, 0), arg1);
9057 if (integer_zerop (folded_compare)
9058 || integer_onep (folded_compare))
9059 return omit_one_operand (type, folded_compare, varop);
9061 shift = build_int_cst (NULL_TREE,
9062 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9063 shift = fold_convert (TREE_TYPE (varop), shift);
9064 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9066 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9070 return fold_build2 (code, type, varop, newconst);
9073 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9074 This transformation affects the cases which are handled in later
9075 optimizations involving comparisons with non-negative constants. */
9076 if (TREE_CODE (arg1) == INTEGER_CST
9077 && TREE_CODE (arg0) != INTEGER_CST
9078 && tree_int_cst_sgn (arg1) > 0)
9083 arg1 = const_binop (MINUS_EXPR, arg1,
9084 build_int_cst (TREE_TYPE (arg1), 1), 0);
9085 return fold_build2 (GT_EXPR, type, arg0,
9086 fold_convert (TREE_TYPE (arg0), arg1));
9089 arg1 = const_binop (MINUS_EXPR, arg1,
9090 build_int_cst (TREE_TYPE (arg1), 1), 0);
9091 return fold_build2 (LE_EXPR, type, arg0,
9092 fold_convert (TREE_TYPE (arg0), arg1));
9099 /* Comparisons with the highest or lowest possible integer of
9100 the specified size will have known values. */
9102 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9104 if (TREE_CODE (arg1) == INTEGER_CST
9105 && ! TREE_CONSTANT_OVERFLOW (arg1)
9106 && width <= 2 * HOST_BITS_PER_WIDE_INT
9107 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9108 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9110 HOST_WIDE_INT signed_max_hi;
9111 unsigned HOST_WIDE_INT signed_max_lo;
9112 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9114 if (width <= HOST_BITS_PER_WIDE_INT)
9116 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9121 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9123 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9129 max_lo = signed_max_lo;
9130 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9136 width -= HOST_BITS_PER_WIDE_INT;
9138 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9143 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9145 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9150 max_hi = signed_max_hi;
9151 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9155 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9156 && TREE_INT_CST_LOW (arg1) == max_lo)
9160 return omit_one_operand (type, integer_zero_node, arg0);
9163 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9166 return omit_one_operand (type, integer_one_node, arg0);
9169 return fold_build2 (NE_EXPR, type, arg0, arg1);
9171 /* The GE_EXPR and LT_EXPR cases above are not normally
9172 reached because of previous transformations. */
9177 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9179 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9183 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9184 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9186 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9187 return fold_build2 (NE_EXPR, type, arg0, arg1);
9191 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9193 && TREE_INT_CST_LOW (arg1) == min_lo)
9197 return omit_one_operand (type, integer_zero_node, arg0);
9200 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9203 return omit_one_operand (type, integer_one_node, arg0);
9206 return fold_build2 (NE_EXPR, type, arg0, arg1);
9211 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9213 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9217 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9218 return fold_build2 (NE_EXPR, type, arg0, arg1);
9220 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9221 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9226 else if (!in_gimple_form
9227 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9228 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9229 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9230 /* signed_type does not work on pointer types. */
9231 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9233 /* The following case also applies to X < signed_max+1
9234 and X >= signed_max+1 because previous transformations. */
9235 if (code == LE_EXPR || code == GT_EXPR)
9238 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9239 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9241 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9242 type, fold_convert (st0, arg0),
9243 fold_convert (st1, integer_zero_node)));
9249 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9250 a MINUS_EXPR of a constant, we can convert it into a comparison with
9251 a revised constant as long as no overflow occurs. */
9252 if ((code == EQ_EXPR || code == NE_EXPR)
9253 && TREE_CODE (arg1) == INTEGER_CST
9254 && (TREE_CODE (arg0) == PLUS_EXPR
9255 || TREE_CODE (arg0) == MINUS_EXPR)
9256 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9257 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9258 ? MINUS_EXPR : PLUS_EXPR,
9259 arg1, TREE_OPERAND (arg0, 1), 0))
9260 && ! TREE_CONSTANT_OVERFLOW (tem))
9261 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9263 /* Similarly for a NEGATE_EXPR. */
9264 else if ((code == EQ_EXPR || code == NE_EXPR)
9265 && TREE_CODE (arg0) == NEGATE_EXPR
9266 && TREE_CODE (arg1) == INTEGER_CST
9267 && 0 != (tem = negate_expr (arg1))
9268 && TREE_CODE (tem) == INTEGER_CST
9269 && ! TREE_CONSTANT_OVERFLOW (tem))
9270 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9272 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9273 for !=. Don't do this for ordered comparisons due to overflow. */
9274 else if ((code == NE_EXPR || code == EQ_EXPR)
9275 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9276 return fold_build2 (code, type,
9277 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9279 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9280 && (TREE_CODE (arg0) == NOP_EXPR
9281 || TREE_CODE (arg0) == CONVERT_EXPR))
9283 /* If we are widening one operand of an integer comparison,
9284 see if the other operand is similarly being widened. Perhaps we
9285 can do the comparison in the narrower type. */
9286 tem = fold_widened_comparison (code, type, arg0, arg1);
9290 /* Or if we are changing signedness. */
9291 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9296 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9297 constant, we can simplify it. */
9298 else if (TREE_CODE (arg1) == INTEGER_CST
9299 && (TREE_CODE (arg0) == MIN_EXPR
9300 || TREE_CODE (arg0) == MAX_EXPR)
9301 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9303 tem = optimize_minmax_comparison (code, type, op0, op1);
9310 /* If we are comparing an ABS_EXPR with a constant, we can
9311 convert all the cases into explicit comparisons, but they may
9312 well not be faster than doing the ABS and one comparison.
9313 But ABS (X) <= C is a range comparison, which becomes a subtraction
9314 and a comparison, and is probably faster. */
9315 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9316 && TREE_CODE (arg0) == ABS_EXPR
9317 && ! TREE_SIDE_EFFECTS (arg0)
9318 && (0 != (tem = negate_expr (arg1)))
9319 && TREE_CODE (tem) == INTEGER_CST
9320 && ! TREE_CONSTANT_OVERFLOW (tem))
9321 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9322 build2 (GE_EXPR, type,
9323 TREE_OPERAND (arg0, 0), tem),
9324 build2 (LE_EXPR, type,
9325 TREE_OPERAND (arg0, 0), arg1));
9327 /* Convert ABS_EXPR<x> >= 0 to true. */
9328 else if (code == GE_EXPR
9329 && tree_expr_nonnegative_p (arg0)
9330 && (integer_zerop (arg1)
9331 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9332 && real_zerop (arg1))))
9333 return omit_one_operand (type, integer_one_node, arg0);
9335 /* Convert ABS_EXPR<x> < 0 to false. */
9336 else if (code == LT_EXPR
9337 && tree_expr_nonnegative_p (arg0)
9338 && (integer_zerop (arg1) || real_zerop (arg1)))
9339 return omit_one_operand (type, integer_zero_node, arg0);
9341 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9342 else if ((code == EQ_EXPR || code == NE_EXPR)
9343 && TREE_CODE (arg0) == ABS_EXPR
9344 && (integer_zerop (arg1) || real_zerop (arg1)))
9345 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9347 /* If this is an EQ or NE comparison with zero and ARG0 is
9348 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9349 two operations, but the latter can be done in one less insn
9350 on machines that have only two-operand insns or on which a
9351 constant cannot be the first operand. */
9352 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9353 && TREE_CODE (arg0) == BIT_AND_EXPR)
9355 tree arg00 = TREE_OPERAND (arg0, 0);
9356 tree arg01 = TREE_OPERAND (arg0, 1);
9357 if (TREE_CODE (arg00) == LSHIFT_EXPR
9358 && integer_onep (TREE_OPERAND (arg00, 0)))
9360 fold_build2 (code, type,
9361 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9362 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9363 arg01, TREE_OPERAND (arg00, 1)),
9364 fold_convert (TREE_TYPE (arg0),
9367 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9368 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9370 fold_build2 (code, type,
9371 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9372 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9373 arg00, TREE_OPERAND (arg01, 1)),
9374 fold_convert (TREE_TYPE (arg0),
9379 /* If this is an NE or EQ comparison of zero against the result of a
9380 signed MOD operation whose second operand is a power of 2, make
9381 the MOD operation unsigned since it is simpler and equivalent. */
9382 if ((code == NE_EXPR || code == EQ_EXPR)
9383 && integer_zerop (arg1)
9384 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9385 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9386 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9387 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9388 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9389 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9391 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9392 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9393 fold_convert (newtype,
9394 TREE_OPERAND (arg0, 0)),
9395 fold_convert (newtype,
9396 TREE_OPERAND (arg0, 1)));
9398 return fold_build2 (code, type, newmod,
9399 fold_convert (newtype, arg1));
9402 /* If this is an NE comparison of zero with an AND of one, remove the
9403 comparison since the AND will give the correct value. */
9404 if (code == NE_EXPR && integer_zerop (arg1)
9405 && TREE_CODE (arg0) == BIT_AND_EXPR
9406 && integer_onep (TREE_OPERAND (arg0, 1)))
9407 return fold_convert (type, arg0);
9409 /* If we have (A & C) == C where C is a power of 2, convert this into
9410 (A & C) != 0. Similarly for NE_EXPR. */
9411 if ((code == EQ_EXPR || code == NE_EXPR)
9412 && TREE_CODE (arg0) == BIT_AND_EXPR
9413 && integer_pow2p (TREE_OPERAND (arg0, 1))
9414 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9415 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9416 arg0, fold_convert (TREE_TYPE (arg0),
9417 integer_zero_node));
9419 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9420 bit, then fold the expression into A < 0 or A >= 0. */
9421 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9425 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9426 Similarly for NE_EXPR. */
9427 if ((code == EQ_EXPR || code == NE_EXPR)
9428 && TREE_CODE (arg0) == BIT_AND_EXPR
9429 && TREE_CODE (arg1) == INTEGER_CST
9430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9432 tree notc = fold_build1 (BIT_NOT_EXPR,
9433 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9434 TREE_OPERAND (arg0, 1));
9435 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9437 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9438 if (integer_nonzerop (dandnotc))
9439 return omit_one_operand (type, rslt, arg0);
9442 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9443 Similarly for NE_EXPR. */
9444 if ((code == EQ_EXPR || code == NE_EXPR)
9445 && TREE_CODE (arg0) == BIT_IOR_EXPR
9446 && TREE_CODE (arg1) == INTEGER_CST
9447 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9449 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9450 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9451 TREE_OPERAND (arg0, 1), notd);
9452 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9453 if (integer_nonzerop (candnotd))
9454 return omit_one_operand (type, rslt, arg0);
9457 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9458 and similarly for >= into !=. */
9459 if ((code == LT_EXPR || code == GE_EXPR)
9460 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9461 && TREE_CODE (arg1) == LSHIFT_EXPR
9462 && integer_onep (TREE_OPERAND (arg1, 0)))
9463 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9464 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9465 TREE_OPERAND (arg1, 1)),
9466 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9468 else if ((code == LT_EXPR || code == GE_EXPR)
9469 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9470 && (TREE_CODE (arg1) == NOP_EXPR
9471 || TREE_CODE (arg1) == CONVERT_EXPR)
9472 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9473 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9475 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9476 fold_convert (TREE_TYPE (arg0),
9477 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9478 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9480 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9482 /* Simplify comparison of something with itself. (For IEEE
9483 floating-point, we can only do some of these simplifications.) */
9484 if (operand_equal_p (arg0, arg1, 0))
9489 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9490 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9491 return constant_boolean_node (1, type);
9496 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9497 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9498 return constant_boolean_node (1, type);
9499 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9502 /* For NE, we can only do this simplification if integer
9503 or we don't honor IEEE floating point NaNs. */
9504 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9505 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9507 /* ... fall through ... */
9510 return constant_boolean_node (0, type);
9516 /* If we are comparing an expression that just has comparisons
9517 of two integer values, arithmetic expressions of those comparisons,
9518 and constants, we can simplify it. There are only three cases
9519 to check: the two values can either be equal, the first can be
9520 greater, or the second can be greater. Fold the expression for
9521 those three values. Since each value must be 0 or 1, we have
9522 eight possibilities, each of which corresponds to the constant 0
9523 or 1 or one of the six possible comparisons.
9525 This handles common cases like (a > b) == 0 but also handles
9526 expressions like ((x > y) - (y > x)) > 0, which supposedly
9527 occur in macroized code. */
9529 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9531 tree cval1 = 0, cval2 = 0;
9534 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9535 /* Don't handle degenerate cases here; they should already
9536 have been handled anyway. */
9537 && cval1 != 0 && cval2 != 0
9538 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9539 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9540 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9541 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9542 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9543 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9544 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9546 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9547 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9549 /* We can't just pass T to eval_subst in case cval1 or cval2
9550 was the same as ARG1. */
9553 = fold_build2 (code, type,
9554 eval_subst (arg0, cval1, maxval,
9558 = fold_build2 (code, type,
9559 eval_subst (arg0, cval1, maxval,
9563 = fold_build2 (code, type,
9564 eval_subst (arg0, cval1, minval,
9568 /* All three of these results should be 0 or 1. Confirm they
9569 are. Then use those values to select the proper code
9572 if ((integer_zerop (high_result)
9573 || integer_onep (high_result))
9574 && (integer_zerop (equal_result)
9575 || integer_onep (equal_result))
9576 && (integer_zerop (low_result)
9577 || integer_onep (low_result)))
9579 /* Make a 3-bit mask with the high-order bit being the
9580 value for `>', the next for '=', and the low for '<'. */
9581 switch ((integer_onep (high_result) * 4)
9582 + (integer_onep (equal_result) * 2)
9583 + integer_onep (low_result))
9587 return omit_one_operand (type, integer_zero_node, arg0);
9608 return omit_one_operand (type, integer_one_node, arg0);
9612 return save_expr (build2 (code, type, cval1, cval2));
9614 return fold_build2 (code, type, cval1, cval2);
9619 /* If this is a comparison of a field, we may be able to simplify it. */
9620 if (((TREE_CODE (arg0) == COMPONENT_REF
9621 && lang_hooks.can_use_bit_fields_p ())
9622 || TREE_CODE (arg0) == BIT_FIELD_REF)
9623 && (code == EQ_EXPR || code == NE_EXPR)
9624 /* Handle the constant case even without -O
9625 to make sure the warnings are given. */
9626 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9628 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9633 /* Fold a comparison of the address of COMPONENT_REFs with the same
9634 type and component to a comparison of the address of the base
9635 object. In short, &x->a OP &y->a to x OP y and
9636 &x->a OP &y.a to x OP &y */
9637 if (TREE_CODE (arg0) == ADDR_EXPR
9638 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9639 && TREE_CODE (arg1) == ADDR_EXPR
9640 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9642 tree cref0 = TREE_OPERAND (arg0, 0);
9643 tree cref1 = TREE_OPERAND (arg1, 0);
9644 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9646 tree op0 = TREE_OPERAND (cref0, 0);
9647 tree op1 = TREE_OPERAND (cref1, 0);
9648 return fold_build2 (code, type,
9649 build_fold_addr_expr (op0),
9650 build_fold_addr_expr (op1));
9654 /* Optimize comparisons of strlen vs zero to a compare of the
9655 first character of the string vs zero. To wit,
9656 strlen(ptr) == 0 => *ptr == 0
9657 strlen(ptr) != 0 => *ptr != 0
9658 Other cases should reduce to one of these two (or a constant)
9659 due to the return value of strlen being unsigned. */
9660 if ((code == EQ_EXPR || code == NE_EXPR)
9661 && integer_zerop (arg1)
9662 && TREE_CODE (arg0) == CALL_EXPR)
9664 tree fndecl = get_callee_fndecl (arg0);
9668 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9669 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9670 && (arglist = TREE_OPERAND (arg0, 1))
9671 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9672 && ! TREE_CHAIN (arglist))
9674 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9675 return fold_build2 (code, type, iref,
9676 build_int_cst (TREE_TYPE (iref), 0));
9680 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9681 into a single range test. */
9682 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9683 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9684 && TREE_CODE (arg1) == INTEGER_CST
9685 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9686 && !integer_zerop (TREE_OPERAND (arg0, 1))
9687 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9688 && !TREE_OVERFLOW (arg1))
9690 t1 = fold_div_compare (code, type, arg0, arg1);
9691 if (t1 != NULL_TREE)
9695 if ((code == EQ_EXPR || code == NE_EXPR)
9696 && !TREE_SIDE_EFFECTS (arg0)
9697 && integer_zerop (arg1)
9698 && tree_expr_nonzero_p (arg0))
9699 return constant_boolean_node (code==NE_EXPR, type);
9701 t1 = fold_relational_const (code, type, arg0, arg1);
9702 return t1 == NULL_TREE ? NULL_TREE : t1;
9704 case UNORDERED_EXPR:
9712 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9714 t1 = fold_relational_const (code, type, arg0, arg1);
9715 if (t1 != NULL_TREE)
9719 /* If the first operand is NaN, the result is constant. */
9720 if (TREE_CODE (arg0) == REAL_CST
9721 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9722 && (code != LTGT_EXPR || ! flag_trapping_math))
9724 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9727 return omit_one_operand (type, t1, arg1);
9730 /* If the second operand is NaN, the result is constant. */
9731 if (TREE_CODE (arg1) == REAL_CST
9732 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9733 && (code != LTGT_EXPR || ! flag_trapping_math))
9735 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9738 return omit_one_operand (type, t1, arg0);
9741 /* Simplify unordered comparison of something with itself. */
9742 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9743 && operand_equal_p (arg0, arg1, 0))
9744 return constant_boolean_node (1, type);
9746 if (code == LTGT_EXPR
9747 && !flag_trapping_math
9748 && operand_equal_p (arg0, arg1, 0))
9749 return constant_boolean_node (0, type);
9751 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9753 tree targ0 = strip_float_extensions (arg0);
9754 tree targ1 = strip_float_extensions (arg1);
9755 tree newtype = TREE_TYPE (targ0);
9757 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9758 newtype = TREE_TYPE (targ1);
9760 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9761 return fold_build2 (code, type, fold_convert (newtype, targ0),
9762 fold_convert (newtype, targ1));
9768 /* When pedantic, a compound expression can be neither an lvalue
9769 nor an integer constant expression. */
9770 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9772 /* Don't let (0, 0) be null pointer constant. */
9773 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9774 : fold_convert (type, arg1);
9775 return pedantic_non_lvalue (tem);
9779 return build_complex (type, arg0, arg1);
9783 /* An ASSERT_EXPR should never be passed to fold_binary. */
9788 } /* switch (code) */
9791 /* Callback for walk_tree, looking for LABEL_EXPR.
9792 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9793 Do not check the sub-tree of GOTO_EXPR. */
9796 contains_label_1 (tree *tp,
9798 void *data ATTRIBUTE_UNUSED)
9800 switch (TREE_CODE (*tp))
9812 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9813 accessible from outside the sub-tree. Returns NULL_TREE if no
9814 addressable label is found. */
9817 contains_label_p (tree st)
9819 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9822 /* Fold a ternary expression of code CODE and type TYPE with operands
9823 OP0, OP1, and OP2. Return the folded expression if folding is
9824 successful. Otherwise, return NULL_TREE. */
9827 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9830 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9831 enum tree_code_class kind = TREE_CODE_CLASS (code);
9833 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9834 && TREE_CODE_LENGTH (code) == 3);
9836 /* Strip any conversions that don't change the mode. This is safe
9837 for every expression, except for a comparison expression because
9838 its signedness is derived from its operands. So, in the latter
9839 case, only strip conversions that don't change the signedness.
9841 Note that this is done as an internal manipulation within the
9842 constant folder, in order to find the simplest representation of
9843 the arguments so that their form can be studied. In any cases,
9844 the appropriate type conversions should be put back in the tree
9845 that will get out of the constant folder. */
9861 if (TREE_CODE (arg0) == CONSTRUCTOR
9862 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9864 unsigned HOST_WIDE_INT idx;
9866 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
9873 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9874 so all simple results must be passed through pedantic_non_lvalue. */
9875 if (TREE_CODE (arg0) == INTEGER_CST)
9877 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9878 tem = integer_zerop (arg0) ? op2 : op1;
9879 /* Only optimize constant conditions when the selected branch
9880 has the same type as the COND_EXPR. This avoids optimizing
9881 away "c ? x : throw", where the throw has a void type.
9882 Avoid throwing away that operand which contains label. */
9883 if ((!TREE_SIDE_EFFECTS (unused_op)
9884 || !contains_label_p (unused_op))
9885 && (! VOID_TYPE_P (TREE_TYPE (tem))
9886 || VOID_TYPE_P (type)))
9887 return pedantic_non_lvalue (tem);
9890 if (operand_equal_p (arg1, op2, 0))
9891 return pedantic_omit_one_operand (type, arg1, arg0);
9893 /* If we have A op B ? A : C, we may be able to convert this to a
9894 simpler expression, depending on the operation and the values
9895 of B and C. Signed zeros prevent all of these transformations,
9896 for reasons given above each one.
9898 Also try swapping the arguments and inverting the conditional. */
9899 if (COMPARISON_CLASS_P (arg0)
9900 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9901 arg1, TREE_OPERAND (arg0, 1))
9902 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9904 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9909 if (COMPARISON_CLASS_P (arg0)
9910 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9912 TREE_OPERAND (arg0, 1))
9913 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9915 tem = invert_truthvalue (arg0);
9916 if (COMPARISON_CLASS_P (tem))
9918 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9924 /* If the second operand is simpler than the third, swap them
9925 since that produces better jump optimization results. */
9926 if (tree_swap_operands_p (op1, op2, false))
9928 /* See if this can be inverted. If it can't, possibly because
9929 it was a floating-point inequality comparison, don't do
9931 tem = invert_truthvalue (arg0);
9933 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9934 return fold_build3 (code, type, tem, op2, op1);
9937 /* Convert A ? 1 : 0 to simply A. */
9938 if (integer_onep (op1)
9939 && integer_zerop (op2)
9940 /* If we try to convert OP0 to our type, the
9941 call to fold will try to move the conversion inside
9942 a COND, which will recurse. In that case, the COND_EXPR
9943 is probably the best choice, so leave it alone. */
9944 && type == TREE_TYPE (arg0))
9945 return pedantic_non_lvalue (arg0);
9947 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9948 over COND_EXPR in cases such as floating point comparisons. */
9949 if (integer_zerop (op1)
9950 && integer_onep (op2)
9951 && truth_value_p (TREE_CODE (arg0)))
9952 return pedantic_non_lvalue (fold_convert (type,
9953 invert_truthvalue (arg0)));
9955 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9956 if (TREE_CODE (arg0) == LT_EXPR
9957 && integer_zerop (TREE_OPERAND (arg0, 1))
9958 && integer_zerop (op2)
9959 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9960 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9961 TREE_TYPE (tem), tem, arg1));
9963 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9964 already handled above. */
9965 if (TREE_CODE (arg0) == BIT_AND_EXPR
9966 && integer_onep (TREE_OPERAND (arg0, 1))
9967 && integer_zerop (op2)
9968 && integer_pow2p (arg1))
9970 tree tem = TREE_OPERAND (arg0, 0);
9972 if (TREE_CODE (tem) == RSHIFT_EXPR
9973 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9974 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9975 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9976 return fold_build2 (BIT_AND_EXPR, type,
9977 TREE_OPERAND (tem, 0), arg1);
9980 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9981 is probably obsolete because the first operand should be a
9982 truth value (that's why we have the two cases above), but let's
9983 leave it in until we can confirm this for all front-ends. */
9984 if (integer_zerop (op2)
9985 && TREE_CODE (arg0) == NE_EXPR
9986 && integer_zerop (TREE_OPERAND (arg0, 1))
9987 && integer_pow2p (arg1)
9988 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9989 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9990 arg1, OEP_ONLY_CONST))
9991 return pedantic_non_lvalue (fold_convert (type,
9992 TREE_OPERAND (arg0, 0)));
9994 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9995 if (integer_zerop (op2)
9996 && truth_value_p (TREE_CODE (arg0))
9997 && truth_value_p (TREE_CODE (arg1)))
9998 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10000 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10001 if (integer_onep (op2)
10002 && truth_value_p (TREE_CODE (arg0))
10003 && truth_value_p (TREE_CODE (arg1)))
10005 /* Only perform transformation if ARG0 is easily inverted. */
10006 tem = invert_truthvalue (arg0);
10007 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10008 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10011 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10012 if (integer_zerop (arg1)
10013 && truth_value_p (TREE_CODE (arg0))
10014 && truth_value_p (TREE_CODE (op2)))
10016 /* Only perform transformation if ARG0 is easily inverted. */
10017 tem = invert_truthvalue (arg0);
10018 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10019 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10022 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10023 if (integer_onep (arg1)
10024 && truth_value_p (TREE_CODE (arg0))
10025 && truth_value_p (TREE_CODE (op2)))
10026 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10031 /* Check for a built-in function. */
10032 if (TREE_CODE (op0) == ADDR_EXPR
10033 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10034 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10036 tree fndecl = TREE_OPERAND (op0, 0);
10037 tree arglist = op1;
10038 tree tmp = fold_builtin (fndecl, arglist, false);
10044 case BIT_FIELD_REF:
10045 if (TREE_CODE (arg0) == VECTOR_CST
10046 && type == TREE_TYPE (TREE_TYPE (arg0))
10047 && host_integerp (arg1, 1)
10048 && host_integerp (op2, 1))
10050 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10051 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10054 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10055 && (idx % width) == 0
10056 && (idx = idx / width)
10057 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10059 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10060 while (idx-- > 0 && elements)
10061 elements = TREE_CHAIN (elements);
10063 return TREE_VALUE (elements);
10065 return fold_convert (type, integer_zero_node);
10072 } /* switch (code) */
10075 /* Perform constant folding and related simplification of EXPR.
10076 The related simplifications include x*1 => x, x*0 => 0, etc.,
10077 and application of the associative law.
10078 NOP_EXPR conversions may be removed freely (as long as we
10079 are careful not to change the type of the overall expression).
10080 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10081 but we can constant-fold them if they have constant operands. */
10083 #ifdef ENABLE_FOLD_CHECKING
10084 # define fold(x) fold_1 (x)
10085 static tree fold_1 (tree);
10091 const tree t = expr;
10092 enum tree_code code = TREE_CODE (t);
10093 enum tree_code_class kind = TREE_CODE_CLASS (code);
10096 /* Return right away if a constant. */
10097 if (kind == tcc_constant)
10100 if (IS_EXPR_CODE_CLASS (kind))
10102 tree type = TREE_TYPE (t);
10103 tree op0, op1, op2;
10105 switch (TREE_CODE_LENGTH (code))
10108 op0 = TREE_OPERAND (t, 0);
10109 tem = fold_unary (code, type, op0);
10110 return tem ? tem : expr;
10112 op0 = TREE_OPERAND (t, 0);
10113 op1 = TREE_OPERAND (t, 1);
10114 tem = fold_binary (code, type, op0, op1);
10115 return tem ? tem : expr;
10117 op0 = TREE_OPERAND (t, 0);
10118 op1 = TREE_OPERAND (t, 1);
10119 op2 = TREE_OPERAND (t, 2);
10120 tem = fold_ternary (code, type, op0, op1, op2);
10121 return tem ? tem : expr;
10130 return fold (DECL_INITIAL (t));
10134 } /* switch (code) */
10137 #ifdef ENABLE_FOLD_CHECKING
10140 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10141 static void fold_check_failed (tree, tree);
10142 void print_fold_checksum (tree);
10144 /* When --enable-checking=fold, compute a digest of expr before
10145 and after actual fold call to see if fold did not accidentally
10146 change original expr. */
10152 struct md5_ctx ctx;
10153 unsigned char checksum_before[16], checksum_after[16];
10156 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10157 md5_init_ctx (&ctx);
10158 fold_checksum_tree (expr, &ctx, ht);
10159 md5_finish_ctx (&ctx, checksum_before);
10162 ret = fold_1 (expr);
10164 md5_init_ctx (&ctx);
10165 fold_checksum_tree (expr, &ctx, ht);
10166 md5_finish_ctx (&ctx, checksum_after);
10169 if (memcmp (checksum_before, checksum_after, 16))
10170 fold_check_failed (expr, ret);
10176 print_fold_checksum (tree expr)
10178 struct md5_ctx ctx;
10179 unsigned char checksum[16], cnt;
10182 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10183 md5_init_ctx (&ctx);
10184 fold_checksum_tree (expr, &ctx, ht);
10185 md5_finish_ctx (&ctx, checksum);
10187 for (cnt = 0; cnt < 16; ++cnt)
10188 fprintf (stderr, "%02x", checksum[cnt]);
10189 putc ('\n', stderr);
10193 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10195 internal_error ("fold check: original tree changed by fold");
10199 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10202 enum tree_code code;
10203 char buf[sizeof (struct tree_decl_non_common)];
10208 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10209 <= sizeof (struct tree_decl_non_common))
10210 && sizeof (struct tree_type) <= sizeof (struct tree_decl_non_common));
10213 slot = htab_find_slot (ht, expr, INSERT);
10217 code = TREE_CODE (expr);
10218 if (TREE_CODE_CLASS (code) == tcc_declaration
10219 && DECL_ASSEMBLER_NAME_SET_P (expr))
10221 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10222 memcpy (buf, expr, tree_size (expr));
10224 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10226 else if (TREE_CODE_CLASS (code) == tcc_type
10227 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10228 || TYPE_CACHED_VALUES_P (expr)
10229 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10231 /* Allow these fields to be modified. */
10232 memcpy (buf, expr, tree_size (expr));
10234 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10235 TYPE_POINTER_TO (expr) = NULL;
10236 TYPE_REFERENCE_TO (expr) = NULL;
10237 if (TYPE_CACHED_VALUES_P (expr))
10239 TYPE_CACHED_VALUES_P (expr) = 0;
10240 TYPE_CACHED_VALUES (expr) = NULL;
10243 md5_process_bytes (expr, tree_size (expr), ctx);
10244 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10245 if (TREE_CODE_CLASS (code) != tcc_type
10246 && TREE_CODE_CLASS (code) != tcc_declaration
10247 && code != TREE_LIST)
10248 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10249 switch (TREE_CODE_CLASS (code))
10255 md5_process_bytes (TREE_STRING_POINTER (expr),
10256 TREE_STRING_LENGTH (expr), ctx);
10259 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10260 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10263 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10269 case tcc_exceptional:
10273 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10274 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10275 expr = TREE_CHAIN (expr);
10276 goto recursive_label;
10279 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10280 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10286 case tcc_expression:
10287 case tcc_reference:
10288 case tcc_comparison:
10291 case tcc_statement:
10292 len = TREE_CODE_LENGTH (code);
10293 for (i = 0; i < len; ++i)
10294 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10296 case tcc_declaration:
10297 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10298 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10299 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10300 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10301 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10302 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10303 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10304 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10305 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10306 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10307 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10310 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10311 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10312 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10313 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10314 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10315 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10316 if (INTEGRAL_TYPE_P (expr)
10317 || SCALAR_FLOAT_TYPE_P (expr))
10319 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10320 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10322 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10323 if (TREE_CODE (expr) == RECORD_TYPE
10324 || TREE_CODE (expr) == UNION_TYPE
10325 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10326 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10327 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10336 /* Fold a unary tree expression with code CODE of type TYPE with an
10337 operand OP0. Return a folded expression if successful. Otherwise,
10338 return a tree expression with code CODE of type TYPE with an
10342 fold_build1 (enum tree_code code, tree type, tree op0)
10344 tree tem = fold_unary (code, type, op0);
10348 return build1 (code, type, op0);
10351 /* Fold a binary tree expression with code CODE of type TYPE with
10352 operands OP0 and OP1. Return a folded expression if successful.
10353 Otherwise, return a tree expression with code CODE of type TYPE
10354 with operands OP0 and OP1. */
10357 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10359 tree tem = fold_binary (code, type, op0, op1);
10363 return build2 (code, type, op0, op1);
10366 /* Fold a ternary tree expression with code CODE of type TYPE with
10367 operands OP0, OP1, and OP2. Return a folded expression if
10368 successful. Otherwise, return a tree expression with code CODE of
10369 type TYPE with operands OP0, OP1, and OP2. */
10372 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10374 tree tem = fold_ternary (code, type, op0, op1, op2);
10378 return build3 (code, type, op0, op1, op2);
10381 /* Perform constant folding and related simplification of initializer
10382 expression EXPR. This behaves identically to "fold" but ignores
10383 potential run-time traps and exceptions that fold must preserve. */
10386 fold_initializer (tree expr)
10388 int saved_signaling_nans = flag_signaling_nans;
10389 int saved_trapping_math = flag_trapping_math;
10390 int saved_rounding_math = flag_rounding_math;
10391 int saved_trapv = flag_trapv;
10394 flag_signaling_nans = 0;
10395 flag_trapping_math = 0;
10396 flag_rounding_math = 0;
10399 result = fold (expr);
10401 flag_signaling_nans = saved_signaling_nans;
10402 flag_trapping_math = saved_trapping_math;
10403 flag_rounding_math = saved_rounding_math;
10404 flag_trapv = saved_trapv;
10409 /* Determine if first argument is a multiple of second argument. Return 0 if
10410 it is not, or we cannot easily determined it to be.
10412 An example of the sort of thing we care about (at this point; this routine
10413 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10414 fold cases do now) is discovering that
10416 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10422 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10424 This code also handles discovering that
10426 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10428 is a multiple of 8 so we don't have to worry about dealing with a
10429 possible remainder.
10431 Note that we *look* inside a SAVE_EXPR only to determine how it was
10432 calculated; it is not safe for fold to do much of anything else with the
10433 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10434 at run time. For example, the latter example above *cannot* be implemented
10435 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10436 evaluation time of the original SAVE_EXPR is not necessarily the same at
10437 the time the new expression is evaluated. The only optimization of this
10438 sort that would be valid is changing
10440 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10444 SAVE_EXPR (I) * SAVE_EXPR (J)
10446 (where the same SAVE_EXPR (J) is used in the original and the
10447 transformed version). */
10450 multiple_of_p (tree type, tree top, tree bottom)
10452 if (operand_equal_p (top, bottom, 0))
10455 if (TREE_CODE (type) != INTEGER_TYPE)
10458 switch (TREE_CODE (top))
10461 /* Bitwise and provides a power of two multiple. If the mask is
10462 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10463 if (!integer_pow2p (bottom))
10468 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10469 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10473 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10474 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10477 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10481 op1 = TREE_OPERAND (top, 1);
10482 /* const_binop may not detect overflow correctly,
10483 so check for it explicitly here. */
10484 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10485 > TREE_INT_CST_LOW (op1)
10486 && TREE_INT_CST_HIGH (op1) == 0
10487 && 0 != (t1 = fold_convert (type,
10488 const_binop (LSHIFT_EXPR,
10491 && ! TREE_OVERFLOW (t1))
10492 return multiple_of_p (type, t1, bottom);
10497 /* Can't handle conversions from non-integral or wider integral type. */
10498 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10499 || (TYPE_PRECISION (type)
10500 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10503 /* .. fall through ... */
10506 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10509 if (TREE_CODE (bottom) != INTEGER_CST
10510 || (TYPE_UNSIGNED (type)
10511 && (tree_int_cst_sgn (top) < 0
10512 || tree_int_cst_sgn (bottom) < 0)))
10514 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10522 /* Return true if `t' is known to be non-negative. */
10525 tree_expr_nonnegative_p (tree t)
10527 switch (TREE_CODE (t))
10533 return tree_int_cst_sgn (t) >= 0;
10536 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10539 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10540 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10541 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10543 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10544 both unsigned and at least 2 bits shorter than the result. */
10545 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10546 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10547 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10549 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10550 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10551 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10552 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10554 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10555 TYPE_PRECISION (inner2)) + 1;
10556 return prec < TYPE_PRECISION (TREE_TYPE (t));
10562 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10564 /* x * x for floating point x is always non-negative. */
10565 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10567 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10568 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10571 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10572 both unsigned and their total bits is shorter than the result. */
10573 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10574 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10575 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10577 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10578 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10579 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10580 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10581 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10582 < TYPE_PRECISION (TREE_TYPE (t));
10586 case TRUNC_DIV_EXPR:
10587 case CEIL_DIV_EXPR:
10588 case FLOOR_DIV_EXPR:
10589 case ROUND_DIV_EXPR:
10590 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10591 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10593 case TRUNC_MOD_EXPR:
10594 case CEIL_MOD_EXPR:
10595 case FLOOR_MOD_EXPR:
10596 case ROUND_MOD_EXPR:
10597 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10600 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10601 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10604 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10605 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10608 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10609 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10613 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10614 tree outer_type = TREE_TYPE (t);
10616 if (TREE_CODE (outer_type) == REAL_TYPE)
10618 if (TREE_CODE (inner_type) == REAL_TYPE)
10619 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10620 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10622 if (TYPE_UNSIGNED (inner_type))
10624 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10627 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10629 if (TREE_CODE (inner_type) == REAL_TYPE)
10630 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10631 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10632 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10633 && TYPE_UNSIGNED (inner_type);
10639 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10640 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10641 case COMPOUND_EXPR:
10642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10644 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10645 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10647 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10648 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10650 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10652 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10654 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10655 case NON_LVALUE_EXPR:
10656 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10658 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10662 tree temp = TARGET_EXPR_SLOT (t);
10663 t = TARGET_EXPR_INITIAL (t);
10665 /* If the initializer is non-void, then it's a normal expression
10666 that will be assigned to the slot. */
10667 if (!VOID_TYPE_P (t))
10668 return tree_expr_nonnegative_p (t);
10670 /* Otherwise, the initializer sets the slot in some way. One common
10671 way is an assignment statement at the end of the initializer. */
10674 if (TREE_CODE (t) == BIND_EXPR)
10675 t = expr_last (BIND_EXPR_BODY (t));
10676 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10677 || TREE_CODE (t) == TRY_CATCH_EXPR)
10678 t = expr_last (TREE_OPERAND (t, 0));
10679 else if (TREE_CODE (t) == STATEMENT_LIST)
10684 if (TREE_CODE (t) == MODIFY_EXPR
10685 && TREE_OPERAND (t, 0) == temp)
10686 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10693 tree fndecl = get_callee_fndecl (t);
10694 tree arglist = TREE_OPERAND (t, 1);
10695 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10696 switch (DECL_FUNCTION_CODE (fndecl))
10698 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10699 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10700 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10701 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10703 CASE_BUILTIN_F (BUILT_IN_ACOS)
10704 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10705 CASE_BUILTIN_F (BUILT_IN_CABS)
10706 CASE_BUILTIN_F (BUILT_IN_COSH)
10707 CASE_BUILTIN_F (BUILT_IN_ERFC)
10708 CASE_BUILTIN_F (BUILT_IN_EXP)
10709 CASE_BUILTIN_F (BUILT_IN_EXP10)
10710 CASE_BUILTIN_F (BUILT_IN_EXP2)
10711 CASE_BUILTIN_F (BUILT_IN_FABS)
10712 CASE_BUILTIN_F (BUILT_IN_FDIM)
10713 CASE_BUILTIN_F (BUILT_IN_FREXP)
10714 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10715 CASE_BUILTIN_F (BUILT_IN_POW10)
10716 CASE_BUILTIN_I (BUILT_IN_FFS)
10717 CASE_BUILTIN_I (BUILT_IN_PARITY)
10718 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10722 CASE_BUILTIN_F (BUILT_IN_SQRT)
10723 /* sqrt(-0.0) is -0.0. */
10724 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10726 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10728 CASE_BUILTIN_F (BUILT_IN_ASINH)
10729 CASE_BUILTIN_F (BUILT_IN_ATAN)
10730 CASE_BUILTIN_F (BUILT_IN_ATANH)
10731 CASE_BUILTIN_F (BUILT_IN_CBRT)
10732 CASE_BUILTIN_F (BUILT_IN_CEIL)
10733 CASE_BUILTIN_F (BUILT_IN_ERF)
10734 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10735 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10736 CASE_BUILTIN_F (BUILT_IN_FMOD)
10737 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10738 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10739 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10740 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10741 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10742 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10743 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10744 CASE_BUILTIN_F (BUILT_IN_LRINT)
10745 CASE_BUILTIN_F (BUILT_IN_LROUND)
10746 CASE_BUILTIN_F (BUILT_IN_MODF)
10747 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10748 CASE_BUILTIN_F (BUILT_IN_POW)
10749 CASE_BUILTIN_F (BUILT_IN_RINT)
10750 CASE_BUILTIN_F (BUILT_IN_ROUND)
10751 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10752 CASE_BUILTIN_F (BUILT_IN_SINH)
10753 CASE_BUILTIN_F (BUILT_IN_TANH)
10754 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10755 /* True if the 1st argument is nonnegative. */
10756 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10758 CASE_BUILTIN_F (BUILT_IN_FMAX)
10759 /* True if the 1st OR 2nd arguments are nonnegative. */
10760 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10761 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10763 CASE_BUILTIN_F (BUILT_IN_FMIN)
10764 /* True if the 1st AND 2nd arguments are nonnegative. */
10765 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10766 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10768 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10769 /* True if the 2nd argument is nonnegative. */
10770 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10774 #undef CASE_BUILTIN_F
10775 #undef CASE_BUILTIN_I
10779 /* ... fall through ... */
10782 if (truth_value_p (TREE_CODE (t)))
10783 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10787 /* We don't know sign of `t', so be conservative and return false. */
10791 /* Return true when T is an address and is known to be nonzero.
10792 For floating point we further ensure that T is not denormal.
10793 Similar logic is present in nonzero_address in rtlanal.h. */
10796 tree_expr_nonzero_p (tree t)
10798 tree type = TREE_TYPE (t);
10800 /* Doing something useful for floating point would need more work. */
10801 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10804 switch (TREE_CODE (t))
10807 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10808 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10811 /* We used to test for !integer_zerop here. This does not work correctly
10812 if TREE_CONSTANT_OVERFLOW (t). */
10813 return (TREE_INT_CST_LOW (t) != 0
10814 || TREE_INT_CST_HIGH (t) != 0);
10817 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10819 /* With the presence of negative values it is hard
10820 to say something. */
10821 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10822 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10824 /* One of operands must be positive and the other non-negative. */
10825 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10826 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10831 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10833 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10834 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10840 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10841 tree outer_type = TREE_TYPE (t);
10843 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10844 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10850 tree base = get_base_address (TREE_OPERAND (t, 0));
10855 /* Weak declarations may link to NULL. */
10856 if (VAR_OR_FUNCTION_DECL_P (base))
10857 return !DECL_WEAK (base);
10859 /* Constants are never weak. */
10860 if (CONSTANT_CLASS_P (base))
10867 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10868 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10871 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10872 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10875 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10877 /* When both operands are nonzero, then MAX must be too. */
10878 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10881 /* MAX where operand 0 is positive is positive. */
10882 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10884 /* MAX where operand 1 is positive is positive. */
10885 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10886 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10890 case COMPOUND_EXPR:
10893 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10896 case NON_LVALUE_EXPR:
10897 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10900 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10901 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10909 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10910 attempt to fold the expression to a constant without modifying TYPE,
10913 If the expression could be simplified to a constant, then return
10914 the constant. If the expression would not be simplified to a
10915 constant, then return NULL_TREE. */
10918 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10920 tree tem = fold_binary (code, type, op0, op1);
10921 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
10924 /* Given the components of a unary expression CODE, TYPE and OP0,
10925 attempt to fold the expression to a constant without modifying
10928 If the expression could be simplified to a constant, then return
10929 the constant. If the expression would not be simplified to a
10930 constant, then return NULL_TREE. */
10933 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10935 tree tem = fold_unary (code, type, op0);
10936 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
10939 /* If EXP represents referencing an element in a constant string
10940 (either via pointer arithmetic or array indexing), return the
10941 tree representing the value accessed, otherwise return NULL. */
10944 fold_read_from_constant_string (tree exp)
10946 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10948 tree exp1 = TREE_OPERAND (exp, 0);
10952 if (TREE_CODE (exp) == INDIRECT_REF)
10953 string = string_constant (exp1, &index);
10956 tree low_bound = array_ref_low_bound (exp);
10957 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10959 /* Optimize the special-case of a zero lower bound.
10961 We convert the low_bound to sizetype to avoid some problems
10962 with constant folding. (E.g. suppose the lower bound is 1,
10963 and its mode is QI. Without the conversion,l (ARRAY
10964 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10965 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10966 if (! integer_zerop (low_bound))
10967 index = size_diffop (index, fold_convert (sizetype, low_bound));
10973 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10974 && TREE_CODE (string) == STRING_CST
10975 && TREE_CODE (index) == INTEGER_CST
10976 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10977 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10979 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10980 return fold_convert (TREE_TYPE (exp),
10981 build_int_cst (NULL_TREE,
10982 (TREE_STRING_POINTER (string)
10983 [TREE_INT_CST_LOW (index)])));
10988 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10989 an integer constant or real constant.
10991 TYPE is the type of the result. */
10994 fold_negate_const (tree arg0, tree type)
10996 tree t = NULL_TREE;
10998 switch (TREE_CODE (arg0))
11002 unsigned HOST_WIDE_INT low;
11003 HOST_WIDE_INT high;
11004 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11005 TREE_INT_CST_HIGH (arg0),
11007 t = build_int_cst_wide (type, low, high);
11008 t = force_fit_type (t, 1,
11009 (overflow | TREE_OVERFLOW (arg0))
11010 && !TYPE_UNSIGNED (type),
11011 TREE_CONSTANT_OVERFLOW (arg0));
11016 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11020 gcc_unreachable ();
11026 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11027 an integer constant or real constant.
11029 TYPE is the type of the result. */
11032 fold_abs_const (tree arg0, tree type)
11034 tree t = NULL_TREE;
11036 switch (TREE_CODE (arg0))
11039 /* If the value is unsigned, then the absolute value is
11040 the same as the ordinary value. */
11041 if (TYPE_UNSIGNED (type))
11043 /* Similarly, if the value is non-negative. */
11044 else if (INT_CST_LT (integer_minus_one_node, arg0))
11046 /* If the value is negative, then the absolute value is
11050 unsigned HOST_WIDE_INT low;
11051 HOST_WIDE_INT high;
11052 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11053 TREE_INT_CST_HIGH (arg0),
11055 t = build_int_cst_wide (type, low, high);
11056 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11057 TREE_CONSTANT_OVERFLOW (arg0));
11062 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11063 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11069 gcc_unreachable ();
11075 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11076 constant. TYPE is the type of the result. */
11079 fold_not_const (tree arg0, tree type)
11081 tree t = NULL_TREE;
11083 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11085 t = build_int_cst_wide (type,
11086 ~ TREE_INT_CST_LOW (arg0),
11087 ~ TREE_INT_CST_HIGH (arg0));
11088 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11089 TREE_CONSTANT_OVERFLOW (arg0));
11094 /* Given CODE, a relational operator, the target type, TYPE and two
11095 constant operands OP0 and OP1, return the result of the
11096 relational operation. If the result is not a compile time
11097 constant, then return NULL_TREE. */
11100 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11102 int result, invert;
11104 /* From here on, the only cases we handle are when the result is
11105 known to be a constant. */
11107 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11109 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11110 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11112 /* Handle the cases where either operand is a NaN. */
11113 if (real_isnan (c0) || real_isnan (c1))
11123 case UNORDERED_EXPR:
11137 if (flag_trapping_math)
11143 gcc_unreachable ();
11146 return constant_boolean_node (result, type);
11149 return constant_boolean_node (real_compare (code, c0, c1), type);
11152 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11154 To compute GT, swap the arguments and do LT.
11155 To compute GE, do LT and invert the result.
11156 To compute LE, swap the arguments, do LT and invert the result.
11157 To compute NE, do EQ and invert the result.
11159 Therefore, the code below must handle only EQ and LT. */
11161 if (code == LE_EXPR || code == GT_EXPR)
11166 code = swap_tree_comparison (code);
11169 /* Note that it is safe to invert for real values here because we
11170 have already handled the one case that it matters. */
11173 if (code == NE_EXPR || code == GE_EXPR)
11176 code = invert_tree_comparison (code, false);
11179 /* Compute a result for LT or EQ if args permit;
11180 Otherwise return T. */
11181 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11183 if (code == EQ_EXPR)
11184 result = tree_int_cst_equal (op0, op1);
11185 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11186 result = INT_CST_LT_UNSIGNED (op0, op1);
11188 result = INT_CST_LT (op0, op1);
11195 return constant_boolean_node (result, type);
11198 /* Build an expression for the a clean point containing EXPR with type TYPE.
11199 Don't build a cleanup point expression for EXPR which don't have side
11203 fold_build_cleanup_point_expr (tree type, tree expr)
11205 /* If the expression does not have side effects then we don't have to wrap
11206 it with a cleanup point expression. */
11207 if (!TREE_SIDE_EFFECTS (expr))
11210 /* If the expression is a return, check to see if the expression inside the
11211 return has no side effects or the right hand side of the modify expression
11212 inside the return. If either don't have side effects set we don't need to
11213 wrap the expression in a cleanup point expression. Note we don't check the
11214 left hand side of the modify because it should always be a return decl. */
11215 if (TREE_CODE (expr) == RETURN_EXPR)
11217 tree op = TREE_OPERAND (expr, 0);
11218 if (!op || !TREE_SIDE_EFFECTS (op))
11220 op = TREE_OPERAND (op, 1);
11221 if (!TREE_SIDE_EFFECTS (op))
11225 return build1 (CLEANUP_POINT_EXPR, type, expr);
11228 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11229 avoid confusing the gimplify process. */
11232 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11234 /* The size of the object is not relevant when talking about its address. */
11235 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11236 t = TREE_OPERAND (t, 0);
11238 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11239 if (TREE_CODE (t) == INDIRECT_REF
11240 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11242 t = TREE_OPERAND (t, 0);
11243 if (TREE_TYPE (t) != ptrtype)
11244 t = build1 (NOP_EXPR, ptrtype, t);
11250 while (handled_component_p (base))
11251 base = TREE_OPERAND (base, 0);
11253 TREE_ADDRESSABLE (base) = 1;
11255 t = build1 (ADDR_EXPR, ptrtype, t);
11262 build_fold_addr_expr (tree t)
11264 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11267 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11268 of an indirection through OP0, or NULL_TREE if no simplification is
11272 fold_indirect_ref_1 (tree type, tree op0)
11278 subtype = TREE_TYPE (sub);
11279 if (!POINTER_TYPE_P (subtype))
11282 if (TREE_CODE (sub) == ADDR_EXPR)
11284 tree op = TREE_OPERAND (sub, 0);
11285 tree optype = TREE_TYPE (op);
11287 if (type == optype)
11289 /* *(foo *)&fooarray => fooarray[0] */
11290 else if (TREE_CODE (optype) == ARRAY_TYPE
11291 && type == TREE_TYPE (optype))
11293 tree type_domain = TYPE_DOMAIN (optype);
11294 tree min_val = size_zero_node;
11295 if (type_domain && TYPE_MIN_VALUE (type_domain))
11296 min_val = TYPE_MIN_VALUE (type_domain);
11297 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11301 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11302 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11303 && type == TREE_TYPE (TREE_TYPE (subtype)))
11306 tree min_val = size_zero_node;
11307 sub = build_fold_indirect_ref (sub);
11308 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11309 if (type_domain && TYPE_MIN_VALUE (type_domain))
11310 min_val = TYPE_MIN_VALUE (type_domain);
11311 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11317 /* Builds an expression for an indirection through T, simplifying some
11321 build_fold_indirect_ref (tree t)
11323 tree type = TREE_TYPE (TREE_TYPE (t));
11324 tree sub = fold_indirect_ref_1 (type, t);
11329 return build1 (INDIRECT_REF, type, t);
11332 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11335 fold_indirect_ref (tree t)
11337 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11345 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11346 whose result is ignored. The type of the returned tree need not be
11347 the same as the original expression. */
11350 fold_ignored_result (tree t)
11352 if (!TREE_SIDE_EFFECTS (t))
11353 return integer_zero_node;
11356 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11359 t = TREE_OPERAND (t, 0);
11363 case tcc_comparison:
11364 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11365 t = TREE_OPERAND (t, 0);
11366 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11367 t = TREE_OPERAND (t, 1);
11372 case tcc_expression:
11373 switch (TREE_CODE (t))
11375 case COMPOUND_EXPR:
11376 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11378 t = TREE_OPERAND (t, 0);
11382 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11383 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11385 t = TREE_OPERAND (t, 0);
11398 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11399 This can only be applied to objects of a sizetype. */
11402 round_up (tree value, int divisor)
11404 tree div = NULL_TREE;
11406 gcc_assert (divisor > 0);
11410 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11411 have to do anything. Only do this when we are not given a const,
11412 because in that case, this check is more expensive than just
11414 if (TREE_CODE (value) != INTEGER_CST)
11416 div = build_int_cst (TREE_TYPE (value), divisor);
11418 if (multiple_of_p (TREE_TYPE (value), value, div))
11422 /* If divisor is a power of two, simplify this to bit manipulation. */
11423 if (divisor == (divisor & -divisor))
11427 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11428 value = size_binop (PLUS_EXPR, value, t);
11429 t = build_int_cst (TREE_TYPE (value), -divisor);
11430 value = size_binop (BIT_AND_EXPR, value, t);
11435 div = build_int_cst (TREE_TYPE (value), divisor);
11436 value = size_binop (CEIL_DIV_EXPR, value, div);
11437 value = size_binop (MULT_EXPR, value, div);
11443 /* Likewise, but round down. */
11446 round_down (tree value, int divisor)
11448 tree div = NULL_TREE;
11450 gcc_assert (divisor > 0);
11454 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11455 have to do anything. Only do this when we are not given a const,
11456 because in that case, this check is more expensive than just
11458 if (TREE_CODE (value) != INTEGER_CST)
11460 div = build_int_cst (TREE_TYPE (value), divisor);
11462 if (multiple_of_p (TREE_TYPE (value), value, div))
11466 /* If divisor is a power of two, simplify this to bit manipulation. */
11467 if (divisor == (divisor & -divisor))
11471 t = build_int_cst (TREE_TYPE (value), -divisor);
11472 value = size_binop (BIT_AND_EXPR, value, t);
11477 div = build_int_cst (TREE_TYPE (value), divisor);
11478 value = size_binop (FLOOR_DIV_EXPR, value, div);
11479 value = size_binop (MULT_EXPR, value, div);
11485 /* Returns the pointer to the base of the object addressed by EXP and
11486 extracts the information about the offset of the access, storing it
11487 to PBITPOS and POFFSET. */
11490 split_address_to_core_and_offset (tree exp,
11491 HOST_WIDE_INT *pbitpos, tree *poffset)
11494 enum machine_mode mode;
11495 int unsignedp, volatilep;
11496 HOST_WIDE_INT bitsize;
11498 if (TREE_CODE (exp) == ADDR_EXPR)
11500 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11501 poffset, &mode, &unsignedp, &volatilep,
11503 core = build_fold_addr_expr (core);
11509 *poffset = NULL_TREE;
11515 /* Returns true if addresses of E1 and E2 differ by a constant, false
11516 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11519 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11522 HOST_WIDE_INT bitpos1, bitpos2;
11523 tree toffset1, toffset2, tdiff, type;
11525 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11526 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11528 if (bitpos1 % BITS_PER_UNIT != 0
11529 || bitpos2 % BITS_PER_UNIT != 0
11530 || !operand_equal_p (core1, core2, 0))
11533 if (toffset1 && toffset2)
11535 type = TREE_TYPE (toffset1);
11536 if (type != TREE_TYPE (toffset2))
11537 toffset2 = fold_convert (type, toffset2);
11539 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11540 if (!host_integerp (tdiff, 0))
11543 *diff = tree_low_cst (tdiff, 0);
11545 else if (toffset1 || toffset2)
11547 /* If only one of the offsets is non-constant, the difference cannot
11554 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11558 /* Simplify the floating point expression EXP when the sign of the
11559 result is not significant. Return NULL_TREE if no simplification
11563 fold_strip_sign_ops (tree exp)
11567 switch (TREE_CODE (exp))
11571 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11572 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11576 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11578 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11579 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11580 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11581 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11582 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11583 arg1 ? arg1 : TREE_OPERAND (exp, 1));