1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
48 #include "coretypes.h"
59 #include "langhooks.h"
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135 static bool tree_expr_nonzero_p (tree);
137 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
138 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
139 and SUM1. Then this yields nonzero if overflow occurred during the
142 Overflow occurs if A and B have the same sign, but A and SUM differ in
143 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
145 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
147 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
148 We do that by representing the two-word integer in 4 words, with only
149 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
150 number. The value of the word is LOWPART + HIGHPART * BASE. */
153 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
154 #define HIGHPART(x) \
155 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
156 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
158 /* Unpack a two-word integer into 4 words.
159 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
160 WORDS points to the array of HOST_WIDE_INTs. */
163 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
165 words[0] = LOWPART (low);
166 words[1] = HIGHPART (low);
167 words[2] = LOWPART (hi);
168 words[3] = HIGHPART (hi);
171 /* Pack an array of 4 words into a two-word integer.
172 WORDS points to the array of words.
173 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
176 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
179 *low = words[0] + words[1] * BASE;
180 *hi = words[2] + words[3] * BASE;
183 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
184 in overflow of the value, when >0 we are only interested in signed
185 overflow, for <0 we are interested in any overflow. OVERFLOWED
186 indicates whether overflow has already occurred. CONST_OVERFLOWED
187 indicates whether constant overflow has already occurred. We force
188 T's value to be within range of T's type (by setting to 0 or 1 all
189 the bits outside the type's range). We set TREE_OVERFLOWED if,
190 OVERFLOWED is nonzero,
191 or OVERFLOWABLE is >0 and signed overflow occurs
192 or OVERFLOWABLE is <0 and any overflow occurs
193 We set TREE_CONSTANT_OVERFLOWED if,
194 CONST_OVERFLOWED is nonzero
195 or we set TREE_OVERFLOWED.
196 We return either the original T, or a copy. */
199 force_fit_type (tree t, int overflowable,
200 bool overflowed, bool overflowed_const)
202 unsigned HOST_WIDE_INT low;
205 int sign_extended_type;
207 gcc_assert (TREE_CODE (t) == INTEGER_CST);
209 low = TREE_INT_CST_LOW (t);
210 high = TREE_INT_CST_HIGH (t);
212 if (POINTER_TYPE_P (TREE_TYPE (t))
213 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
216 prec = TYPE_PRECISION (TREE_TYPE (t));
217 /* Size types *are* sign extended. */
218 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
219 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
220 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
222 /* First clear all bits that are beyond the type's precision. */
224 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
226 else if (prec > HOST_BITS_PER_WIDE_INT)
227 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
231 if (prec < HOST_BITS_PER_WIDE_INT)
232 low &= ~((HOST_WIDE_INT) (-1) << prec);
235 if (!sign_extended_type)
236 /* No sign extension */;
237 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238 /* Correct width already. */;
239 else if (prec > HOST_BITS_PER_WIDE_INT)
241 /* Sign extend top half? */
242 if (high & ((unsigned HOST_WIDE_INT)1
243 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
246 else if (prec == HOST_BITS_PER_WIDE_INT)
248 if ((HOST_WIDE_INT)low < 0)
253 /* Sign extend bottom half? */
254 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
257 low |= (HOST_WIDE_INT)(-1) << prec;
261 /* If the value changed, return a new node. */
262 if (overflowed || overflowed_const
263 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
265 t = build_int_cst_wide (TREE_TYPE (t), low, high);
269 || (overflowable > 0 && sign_extended_type))
272 TREE_OVERFLOW (t) = 1;
273 TREE_CONSTANT_OVERFLOW (t) = 1;
275 else if (overflowed_const)
278 TREE_CONSTANT_OVERFLOW (t) = 1;
285 /* Add two doubleword integers with doubleword result.
286 Each argument is given as two `HOST_WIDE_INT' pieces.
287 One argument is L1 and H1; the other, L2 and H2.
288 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
291 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
292 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
293 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
295 unsigned HOST_WIDE_INT l;
299 h = h1 + h2 + (l < l1);
303 return OVERFLOW_SUM_SIGN (h1, h2, h);
306 /* Negate a doubleword integer with doubleword result.
307 Return nonzero if the operation overflows, assuming it's signed.
308 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
309 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
312 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
313 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
319 return (*hv & h1) < 0;
329 /* Multiply two doubleword integers with doubleword result.
330 Return nonzero if the operation overflows, assuming it's signed.
331 Each argument is given as two `HOST_WIDE_INT' pieces.
332 One argument is L1 and H1; the other, L2 and H2.
333 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
336 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
337 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
338 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
340 HOST_WIDE_INT arg1[4];
341 HOST_WIDE_INT arg2[4];
342 HOST_WIDE_INT prod[4 * 2];
343 unsigned HOST_WIDE_INT carry;
345 unsigned HOST_WIDE_INT toplow, neglow;
346 HOST_WIDE_INT tophigh, neghigh;
348 encode (arg1, l1, h1);
349 encode (arg2, l2, h2);
351 memset (prod, 0, sizeof prod);
353 for (i = 0; i < 4; i++)
356 for (j = 0; j < 4; j++)
359 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
360 carry += arg1[i] * arg2[j];
361 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
363 prod[k] = LOWPART (carry);
364 carry = HIGHPART (carry);
369 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
371 /* Check for overflow by calculating the top half of the answer in full;
372 it should agree with the low half's sign bit. */
373 decode (prod + 4, &toplow, &tophigh);
376 neg_double (l2, h2, &neglow, &neghigh);
377 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
381 neg_double (l1, h1, &neglow, &neghigh);
382 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
384 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
387 /* Shift the doubleword integer in L1, H1 left by COUNT places
388 keeping only PREC bits of result.
389 Shift right if COUNT is negative.
390 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
391 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
394 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
395 HOST_WIDE_INT count, unsigned int prec,
396 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
398 unsigned HOST_WIDE_INT signmask;
402 rshift_double (l1, h1, -count, prec, lv, hv, arith);
406 if (SHIFT_COUNT_TRUNCATED)
409 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
411 /* Shifting by the host word size is undefined according to the
412 ANSI standard, so we must handle this as a special case. */
416 else if (count >= HOST_BITS_PER_WIDE_INT)
418 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
423 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
424 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
428 /* Sign extend all bits that are beyond the precision. */
430 signmask = -((prec > HOST_BITS_PER_WIDE_INT
431 ? ((unsigned HOST_WIDE_INT) *hv
432 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
433 : (*lv >> (prec - 1))) & 1);
435 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
437 else if (prec >= HOST_BITS_PER_WIDE_INT)
439 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
440 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
445 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
446 *lv |= signmask << prec;
450 /* Shift the doubleword integer in L1, H1 right by COUNT places
451 keeping only PREC bits of result. COUNT must be positive.
452 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
453 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
456 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
457 HOST_WIDE_INT count, unsigned int prec,
458 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
461 unsigned HOST_WIDE_INT signmask;
464 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
467 if (SHIFT_COUNT_TRUNCATED)
470 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
472 /* Shifting by the host word size is undefined according to the
473 ANSI standard, so we must handle this as a special case. */
477 else if (count >= HOST_BITS_PER_WIDE_INT)
480 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
484 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
486 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
489 /* Zero / sign extend all bits that are beyond the precision. */
491 if (count >= (HOST_WIDE_INT)prec)
496 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
498 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
500 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
501 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
506 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
507 *lv |= signmask << (prec - count);
511 /* Rotate the doubleword integer in L1, H1 left by COUNT places
512 keeping only PREC bits of result.
513 Rotate right if COUNT is negative.
514 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
517 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
518 HOST_WIDE_INT count, unsigned int prec,
519 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
521 unsigned HOST_WIDE_INT s1l, s2l;
522 HOST_WIDE_INT s1h, s2h;
528 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
529 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
534 /* Rotate the doubleword integer in L1, H1 left by COUNT places
535 keeping only PREC bits of result. COUNT must be positive.
536 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
539 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
540 HOST_WIDE_INT count, unsigned int prec,
541 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
543 unsigned HOST_WIDE_INT s1l, s2l;
544 HOST_WIDE_INT s1h, s2h;
550 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
551 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
556 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
557 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
558 CODE is a tree code for a kind of division, one of
559 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
561 It controls how the quotient is rounded to an integer.
562 Return nonzero if the operation overflows.
563 UNS nonzero says do unsigned division. */
566 div_and_round_double (enum tree_code code, int uns,
567 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
568 HOST_WIDE_INT hnum_orig,
569 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
570 HOST_WIDE_INT hden_orig,
571 unsigned HOST_WIDE_INT *lquo,
572 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
576 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
577 HOST_WIDE_INT den[4], quo[4];
579 unsigned HOST_WIDE_INT work;
580 unsigned HOST_WIDE_INT carry = 0;
581 unsigned HOST_WIDE_INT lnum = lnum_orig;
582 HOST_WIDE_INT hnum = hnum_orig;
583 unsigned HOST_WIDE_INT lden = lden_orig;
584 HOST_WIDE_INT hden = hden_orig;
587 if (hden == 0 && lden == 0)
588 overflow = 1, lden = 1;
590 /* Calculate quotient sign and convert operands to unsigned. */
596 /* (minimum integer) / (-1) is the only overflow case. */
597 if (neg_double (lnum, hnum, &lnum, &hnum)
598 && ((HOST_WIDE_INT) lden & hden) == -1)
604 neg_double (lden, hden, &lden, &hden);
608 if (hnum == 0 && hden == 0)
609 { /* single precision */
611 /* This unsigned division rounds toward zero. */
617 { /* trivial case: dividend < divisor */
618 /* hden != 0 already checked. */
625 memset (quo, 0, sizeof quo);
627 memset (num, 0, sizeof num); /* to zero 9th element */
628 memset (den, 0, sizeof den);
630 encode (num, lnum, hnum);
631 encode (den, lden, hden);
633 /* Special code for when the divisor < BASE. */
634 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
636 /* hnum != 0 already checked. */
637 for (i = 4 - 1; i >= 0; i--)
639 work = num[i] + carry * BASE;
640 quo[i] = work / lden;
646 /* Full double precision division,
647 with thanks to Don Knuth's "Seminumerical Algorithms". */
648 int num_hi_sig, den_hi_sig;
649 unsigned HOST_WIDE_INT quo_est, scale;
651 /* Find the highest nonzero divisor digit. */
652 for (i = 4 - 1;; i--)
659 /* Insure that the first digit of the divisor is at least BASE/2.
660 This is required by the quotient digit estimation algorithm. */
662 scale = BASE / (den[den_hi_sig] + 1);
664 { /* scale divisor and dividend */
666 for (i = 0; i <= 4 - 1; i++)
668 work = (num[i] * scale) + carry;
669 num[i] = LOWPART (work);
670 carry = HIGHPART (work);
675 for (i = 0; i <= 4 - 1; i++)
677 work = (den[i] * scale) + carry;
678 den[i] = LOWPART (work);
679 carry = HIGHPART (work);
680 if (den[i] != 0) den_hi_sig = i;
687 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
689 /* Guess the next quotient digit, quo_est, by dividing the first
690 two remaining dividend digits by the high order quotient digit.
691 quo_est is never low and is at most 2 high. */
692 unsigned HOST_WIDE_INT tmp;
694 num_hi_sig = i + den_hi_sig + 1;
695 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
696 if (num[num_hi_sig] != den[den_hi_sig])
697 quo_est = work / den[den_hi_sig];
701 /* Refine quo_est so it's usually correct, and at most one high. */
702 tmp = work - quo_est * den[den_hi_sig];
704 && (den[den_hi_sig - 1] * quo_est
705 > (tmp * BASE + num[num_hi_sig - 2])))
708 /* Try QUO_EST as the quotient digit, by multiplying the
709 divisor by QUO_EST and subtracting from the remaining dividend.
710 Keep in mind that QUO_EST is the I - 1st digit. */
713 for (j = 0; j <= den_hi_sig; j++)
715 work = quo_est * den[j] + carry;
716 carry = HIGHPART (work);
717 work = num[i + j] - LOWPART (work);
718 num[i + j] = LOWPART (work);
719 carry += HIGHPART (work) != 0;
722 /* If quo_est was high by one, then num[i] went negative and
723 we need to correct things. */
724 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
727 carry = 0; /* add divisor back in */
728 for (j = 0; j <= den_hi_sig; j++)
730 work = num[i + j] + den[j] + carry;
731 carry = HIGHPART (work);
732 num[i + j] = LOWPART (work);
735 num [num_hi_sig] += carry;
738 /* Store the quotient digit. */
743 decode (quo, lquo, hquo);
746 /* If result is negative, make it so. */
748 neg_double (*lquo, *hquo, lquo, hquo);
750 /* Compute trial remainder: rem = num - (quo * den) */
751 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
752 neg_double (*lrem, *hrem, lrem, hrem);
753 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
758 case TRUNC_MOD_EXPR: /* round toward zero */
759 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
763 case FLOOR_MOD_EXPR: /* round toward negative infinity */
764 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
767 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
775 case CEIL_MOD_EXPR: /* round toward positive infinity */
776 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
778 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
786 case ROUND_MOD_EXPR: /* round to closest integer */
788 unsigned HOST_WIDE_INT labs_rem = *lrem;
789 HOST_WIDE_INT habs_rem = *hrem;
790 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
791 HOST_WIDE_INT habs_den = hden, htwice;
793 /* Get absolute values. */
795 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
797 neg_double (lden, hden, &labs_den, &habs_den);
799 /* If (2 * abs (lrem) >= abs (lden)) */
800 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
801 labs_rem, habs_rem, <wice, &htwice);
803 if (((unsigned HOST_WIDE_INT) habs_den
804 < (unsigned HOST_WIDE_INT) htwice)
805 || (((unsigned HOST_WIDE_INT) habs_den
806 == (unsigned HOST_WIDE_INT) htwice)
807 && (labs_den < ltwice)))
811 add_double (*lquo, *hquo,
812 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
815 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827 /* Compute true remainder: rem = num - (quo * den) */
828 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
829 neg_double (*lrem, *hrem, lrem, hrem);
830 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
834 /* If ARG2 divides ARG1 with zero remainder, carries out the division
835 of type CODE and returns the quotient.
836 Otherwise returns NULL_TREE. */
839 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
841 unsigned HOST_WIDE_INT int1l, int2l;
842 HOST_WIDE_INT int1h, int2h;
843 unsigned HOST_WIDE_INT quol, reml;
844 HOST_WIDE_INT quoh, remh;
845 tree type = TREE_TYPE (arg1);
846 int uns = TYPE_UNSIGNED (type);
848 int1l = TREE_INT_CST_LOW (arg1);
849 int1h = TREE_INT_CST_HIGH (arg1);
850 int2l = TREE_INT_CST_LOW (arg2);
851 int2h = TREE_INT_CST_HIGH (arg2);
853 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
854 &quol, &quoh, &reml, &remh);
855 if (remh != 0 || reml != 0)
858 return build_int_cst_wide (type, quol, quoh);
861 /* Return true if built-in mathematical function specified by CODE
862 preserves the sign of it argument, i.e. -f(x) == f(-x). */
865 negate_mathfn_p (enum built_in_function code)
889 /* Check whether we may negate an integer constant T without causing
893 may_negate_without_overflow_p (tree t)
895 unsigned HOST_WIDE_INT val;
899 gcc_assert (TREE_CODE (t) == INTEGER_CST);
901 type = TREE_TYPE (t);
902 if (TYPE_UNSIGNED (type))
905 prec = TYPE_PRECISION (type);
906 if (prec > HOST_BITS_PER_WIDE_INT)
908 if (TREE_INT_CST_LOW (t) != 0)
910 prec -= HOST_BITS_PER_WIDE_INT;
911 val = TREE_INT_CST_HIGH (t);
914 val = TREE_INT_CST_LOW (t);
915 if (prec < HOST_BITS_PER_WIDE_INT)
916 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
917 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
920 /* Determine whether an expression T can be cheaply negated using
921 the function negate_expr. */
924 negate_expr_p (tree t)
931 type = TREE_TYPE (t);
934 switch (TREE_CODE (t))
937 if (TYPE_UNSIGNED (type) || ! flag_trapv)
940 /* Check that -CST will not overflow type. */
941 return may_negate_without_overflow_p (t);
948 return negate_expr_p (TREE_REALPART (t))
949 && negate_expr_p (TREE_IMAGPART (t));
952 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
954 /* -(A + B) -> (-B) - A. */
955 if (negate_expr_p (TREE_OPERAND (t, 1))
956 && reorder_operands_p (TREE_OPERAND (t, 0),
957 TREE_OPERAND (t, 1)))
959 /* -(A + B) -> (-A) - B. */
960 return negate_expr_p (TREE_OPERAND (t, 0));
963 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
964 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
965 && reorder_operands_p (TREE_OPERAND (t, 0),
966 TREE_OPERAND (t, 1));
969 if (TYPE_UNSIGNED (TREE_TYPE (t)))
975 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
976 return negate_expr_p (TREE_OPERAND (t, 1))
977 || negate_expr_p (TREE_OPERAND (t, 0));
981 /* Negate -((double)float) as (double)(-float). */
982 if (TREE_CODE (type) == REAL_TYPE)
984 tree tem = strip_float_extensions (t);
986 return negate_expr_p (tem);
991 /* Negate -f(x) as f(-x). */
992 if (negate_mathfn_p (builtin_mathfn_code (t)))
993 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
997 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
998 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1000 tree op1 = TREE_OPERAND (t, 1);
1001 if (TREE_INT_CST_HIGH (op1) == 0
1002 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1003 == TREE_INT_CST_LOW (op1))
1014 /* Given T, an expression, return the negation of T. Allow for T to be
1015 null, in which case return null. */
1018 negate_expr (tree t)
1026 type = TREE_TYPE (t);
1027 STRIP_SIGN_NOPS (t);
1029 switch (TREE_CODE (t))
1032 tem = fold_negate_const (t, type);
1033 if (! TREE_OVERFLOW (tem)
1034 || TYPE_UNSIGNED (type)
1040 tem = fold_negate_const (t, type);
1041 /* Two's complement FP formats, such as c4x, may overflow. */
1042 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1043 return fold_convert (type, tem);
1048 tree rpart = negate_expr (TREE_REALPART (t));
1049 tree ipart = negate_expr (TREE_IMAGPART (t));
1051 if ((TREE_CODE (rpart) == REAL_CST
1052 && TREE_CODE (ipart) == REAL_CST)
1053 || (TREE_CODE (rpart) == INTEGER_CST
1054 && TREE_CODE (ipart) == INTEGER_CST))
1055 return build_complex (type, rpart, ipart);
1060 return fold_convert (type, TREE_OPERAND (t, 0));
1063 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1065 /* -(A + B) -> (-B) - A. */
1066 if (negate_expr_p (TREE_OPERAND (t, 1))
1067 && reorder_operands_p (TREE_OPERAND (t, 0),
1068 TREE_OPERAND (t, 1)))
1070 tem = negate_expr (TREE_OPERAND (t, 1));
1071 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1072 tem, TREE_OPERAND (t, 0));
1073 return fold_convert (type, tem);
1076 /* -(A + B) -> (-A) - B. */
1077 if (negate_expr_p (TREE_OPERAND (t, 0)))
1079 tem = negate_expr (TREE_OPERAND (t, 0));
1080 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1081 tem, TREE_OPERAND (t, 1));
1082 return fold_convert (type, tem);
1088 /* - (A - B) -> B - A */
1089 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1090 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1091 return fold_convert (type,
1092 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1093 TREE_OPERAND (t, 1),
1094 TREE_OPERAND (t, 0)));
1098 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1104 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1106 tem = TREE_OPERAND (t, 1);
1107 if (negate_expr_p (tem))
1108 return fold_convert (type,
1109 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1110 TREE_OPERAND (t, 0),
1111 negate_expr (tem)));
1112 tem = TREE_OPERAND (t, 0);
1113 if (negate_expr_p (tem))
1114 return fold_convert (type,
1115 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1117 TREE_OPERAND (t, 1)));
1122 /* Convert -((double)float) into (double)(-float). */
1123 if (TREE_CODE (type) == REAL_TYPE)
1125 tem = strip_float_extensions (t);
1126 if (tem != t && negate_expr_p (tem))
1127 return fold_convert (type, negate_expr (tem));
1132 /* Negate -f(x) as f(-x). */
1133 if (negate_mathfn_p (builtin_mathfn_code (t))
1134 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1136 tree fndecl, arg, arglist;
1138 fndecl = get_callee_fndecl (t);
1139 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1140 arglist = build_tree_list (NULL_TREE, arg);
1141 return build_function_call_expr (fndecl, arglist);
1146 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1147 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1149 tree op1 = TREE_OPERAND (t, 1);
1150 if (TREE_INT_CST_HIGH (op1) == 0
1151 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1152 == TREE_INT_CST_LOW (op1))
1154 tree ntype = TYPE_UNSIGNED (type)
1155 ? lang_hooks.types.signed_type (type)
1156 : lang_hooks.types.unsigned_type (type);
1157 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1158 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1159 return fold_convert (type, temp);
1168 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1169 return fold_convert (type, tem);
1172 /* Split a tree IN into a constant, literal and variable parts that could be
1173 combined with CODE to make IN. "constant" means an expression with
1174 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1175 commutative arithmetic operation. Store the constant part into *CONP,
1176 the literal in *LITP and return the variable part. If a part isn't
1177 present, set it to null. If the tree does not decompose in this way,
1178 return the entire tree as the variable part and the other parts as null.
1180 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1181 case, we negate an operand that was subtracted. Except if it is a
1182 literal for which we use *MINUS_LITP instead.
1184 If NEGATE_P is true, we are negating all of IN, again except a literal
1185 for which we use *MINUS_LITP instead.
1187 If IN is itself a literal or constant, return it as appropriate.
1189 Note that we do not guarantee that any of the three values will be the
1190 same type as IN, but they will have the same signedness and mode. */
1193 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1194 tree *minus_litp, int negate_p)
1202 /* Strip any conversions that don't change the machine mode or signedness. */
1203 STRIP_SIGN_NOPS (in);
1205 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1207 else if (TREE_CODE (in) == code
1208 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1209 /* We can associate addition and subtraction together (even
1210 though the C standard doesn't say so) for integers because
1211 the value is not affected. For reals, the value might be
1212 affected, so we can't. */
1213 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1214 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1216 tree op0 = TREE_OPERAND (in, 0);
1217 tree op1 = TREE_OPERAND (in, 1);
1218 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1219 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1221 /* First see if either of the operands is a literal, then a constant. */
1222 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1223 *litp = op0, op0 = 0;
1224 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1225 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1227 if (op0 != 0 && TREE_CONSTANT (op0))
1228 *conp = op0, op0 = 0;
1229 else if (op1 != 0 && TREE_CONSTANT (op1))
1230 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1232 /* If we haven't dealt with either operand, this is not a case we can
1233 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1234 if (op0 != 0 && op1 != 0)
1239 var = op1, neg_var_p = neg1_p;
1241 /* Now do any needed negations. */
1243 *minus_litp = *litp, *litp = 0;
1245 *conp = negate_expr (*conp);
1247 var = negate_expr (var);
1249 else if (TREE_CONSTANT (in))
1257 *minus_litp = *litp, *litp = 0;
1258 else if (*minus_litp)
1259 *litp = *minus_litp, *minus_litp = 0;
1260 *conp = negate_expr (*conp);
1261 var = negate_expr (var);
1267 /* Re-associate trees split by the above function. T1 and T2 are either
1268 expressions to associate or null. Return the new expression, if any. If
1269 we build an operation, do it in TYPE and with CODE. */
1272 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1279 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1280 try to fold this since we will have infinite recursion. But do
1281 deal with any NEGATE_EXPRs. */
1282 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1283 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1285 if (code == PLUS_EXPR)
1287 if (TREE_CODE (t1) == NEGATE_EXPR)
1288 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1289 fold_convert (type, TREE_OPERAND (t1, 0)));
1290 else if (TREE_CODE (t2) == NEGATE_EXPR)
1291 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1292 fold_convert (type, TREE_OPERAND (t2, 0)));
1293 else if (integer_zerop (t2))
1294 return fold_convert (type, t1);
1296 else if (code == MINUS_EXPR)
1298 if (integer_zerop (t2))
1299 return fold_convert (type, t1);
1302 return build2 (code, type, fold_convert (type, t1),
1303 fold_convert (type, t2));
1306 return fold_build2 (code, type, fold_convert (type, t1),
1307 fold_convert (type, t2));
1310 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1311 to produce a new constant.
1313 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1316 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1318 unsigned HOST_WIDE_INT int1l, int2l;
1319 HOST_WIDE_INT int1h, int2h;
1320 unsigned HOST_WIDE_INT low;
1322 unsigned HOST_WIDE_INT garbagel;
1323 HOST_WIDE_INT garbageh;
1325 tree type = TREE_TYPE (arg1);
1326 int uns = TYPE_UNSIGNED (type);
1328 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1331 int1l = TREE_INT_CST_LOW (arg1);
1332 int1h = TREE_INT_CST_HIGH (arg1);
1333 int2l = TREE_INT_CST_LOW (arg2);
1334 int2h = TREE_INT_CST_HIGH (arg2);
1339 low = int1l | int2l, hi = int1h | int2h;
1343 low = int1l ^ int2l, hi = int1h ^ int2h;
1347 low = int1l & int2l, hi = int1h & int2h;
1353 /* It's unclear from the C standard whether shifts can overflow.
1354 The following code ignores overflow; perhaps a C standard
1355 interpretation ruling is needed. */
1356 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1363 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1368 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1372 neg_double (int2l, int2h, &low, &hi);
1373 add_double (int1l, int1h, low, hi, &low, &hi);
1374 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1378 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1381 case TRUNC_DIV_EXPR:
1382 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1383 case EXACT_DIV_EXPR:
1384 /* This is a shortcut for a common special case. */
1385 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1386 && ! TREE_CONSTANT_OVERFLOW (arg1)
1387 && ! TREE_CONSTANT_OVERFLOW (arg2)
1388 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1390 if (code == CEIL_DIV_EXPR)
1393 low = int1l / int2l, hi = 0;
1397 /* ... fall through ... */
1399 case ROUND_DIV_EXPR:
1400 if (int2h == 0 && int2l == 1)
1402 low = int1l, hi = int1h;
1405 if (int1l == int2l && int1h == int2h
1406 && ! (int1l == 0 && int1h == 0))
1411 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1412 &low, &hi, &garbagel, &garbageh);
1415 case TRUNC_MOD_EXPR:
1416 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1417 /* This is a shortcut for a common special case. */
1418 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1419 && ! TREE_CONSTANT_OVERFLOW (arg1)
1420 && ! TREE_CONSTANT_OVERFLOW (arg2)
1421 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1423 if (code == CEIL_MOD_EXPR)
1425 low = int1l % int2l, hi = 0;
1429 /* ... fall through ... */
1431 case ROUND_MOD_EXPR:
1432 overflow = div_and_round_double (code, uns,
1433 int1l, int1h, int2l, int2h,
1434 &garbagel, &garbageh, &low, &hi);
1440 low = (((unsigned HOST_WIDE_INT) int1h
1441 < (unsigned HOST_WIDE_INT) int2h)
1442 || (((unsigned HOST_WIDE_INT) int1h
1443 == (unsigned HOST_WIDE_INT) int2h)
1446 low = (int1h < int2h
1447 || (int1h == int2h && int1l < int2l));
1449 if (low == (code == MIN_EXPR))
1450 low = int1l, hi = int1h;
1452 low = int2l, hi = int2h;
1459 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1463 /* Propagate overflow flags ourselves. */
1464 if (((!uns || is_sizetype) && overflow)
1465 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1468 TREE_OVERFLOW (t) = 1;
1469 TREE_CONSTANT_OVERFLOW (t) = 1;
1471 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1474 TREE_CONSTANT_OVERFLOW (t) = 1;
1478 t = force_fit_type (t, 1,
1479 ((!uns || is_sizetype) && overflow)
1480 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1481 TREE_CONSTANT_OVERFLOW (arg1)
1482 | TREE_CONSTANT_OVERFLOW (arg2));
1487 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1488 constant. We assume ARG1 and ARG2 have the same data type, or at least
1489 are the same kind of constant and the same machine mode.
1491 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1494 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1499 if (TREE_CODE (arg1) == INTEGER_CST)
1500 return int_const_binop (code, arg1, arg2, notrunc);
1502 if (TREE_CODE (arg1) == REAL_CST)
1504 enum machine_mode mode;
1507 REAL_VALUE_TYPE value;
1508 REAL_VALUE_TYPE result;
1512 d1 = TREE_REAL_CST (arg1);
1513 d2 = TREE_REAL_CST (arg2);
1515 type = TREE_TYPE (arg1);
1516 mode = TYPE_MODE (type);
1518 /* Don't perform operation if we honor signaling NaNs and
1519 either operand is a NaN. */
1520 if (HONOR_SNANS (mode)
1521 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1524 /* Don't perform operation if it would raise a division
1525 by zero exception. */
1526 if (code == RDIV_EXPR
1527 && REAL_VALUES_EQUAL (d2, dconst0)
1528 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1531 /* If either operand is a NaN, just return it. Otherwise, set up
1532 for floating-point trap; we return an overflow. */
1533 if (REAL_VALUE_ISNAN (d1))
1535 else if (REAL_VALUE_ISNAN (d2))
1538 inexact = real_arithmetic (&value, code, &d1, &d2);
1539 real_convert (&result, mode, &value);
1541 /* Don't constant fold this floating point operation if the
1542 result may dependent upon the run-time rounding mode and
1543 flag_rounding_math is set, or if GCC's software emulation
1544 is unable to accurately represent the result. */
1546 if ((flag_rounding_math
1547 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1548 && !flag_unsafe_math_optimizations))
1549 && (inexact || !real_identical (&result, &value)))
1552 t = build_real (type, result);
1554 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1555 TREE_CONSTANT_OVERFLOW (t)
1557 | TREE_CONSTANT_OVERFLOW (arg1)
1558 | TREE_CONSTANT_OVERFLOW (arg2);
1561 if (TREE_CODE (arg1) == COMPLEX_CST)
1563 tree type = TREE_TYPE (arg1);
1564 tree r1 = TREE_REALPART (arg1);
1565 tree i1 = TREE_IMAGPART (arg1);
1566 tree r2 = TREE_REALPART (arg2);
1567 tree i2 = TREE_IMAGPART (arg2);
1573 t = build_complex (type,
1574 const_binop (PLUS_EXPR, r1, r2, notrunc),
1575 const_binop (PLUS_EXPR, i1, i2, notrunc));
1579 t = build_complex (type,
1580 const_binop (MINUS_EXPR, r1, r2, notrunc),
1581 const_binop (MINUS_EXPR, i1, i2, notrunc));
1585 t = build_complex (type,
1586 const_binop (MINUS_EXPR,
1587 const_binop (MULT_EXPR,
1589 const_binop (MULT_EXPR,
1592 const_binop (PLUS_EXPR,
1593 const_binop (MULT_EXPR,
1595 const_binop (MULT_EXPR,
1602 tree t1, t2, real, imag;
1604 = const_binop (PLUS_EXPR,
1605 const_binop (MULT_EXPR, r2, r2, notrunc),
1606 const_binop (MULT_EXPR, i2, i2, notrunc),
1609 t1 = const_binop (PLUS_EXPR,
1610 const_binop (MULT_EXPR, r1, r2, notrunc),
1611 const_binop (MULT_EXPR, i1, i2, notrunc),
1613 t2 = const_binop (MINUS_EXPR,
1614 const_binop (MULT_EXPR, i1, r2, notrunc),
1615 const_binop (MULT_EXPR, r1, i2, notrunc),
1618 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1620 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1621 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1625 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1626 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1631 t = build_complex (type, real, imag);
1643 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1644 indicates which particular sizetype to create. */
1647 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1649 return build_int_cst (sizetype_tab[(int) kind], number);
1652 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1653 is a tree code. The type of the result is taken from the operands.
1654 Both must be the same type integer type and it must be a size type.
1655 If the operands are constant, so is the result. */
1658 size_binop (enum tree_code code, tree arg0, tree arg1)
1660 tree type = TREE_TYPE (arg0);
1662 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1663 && type == TREE_TYPE (arg1));
1665 /* Handle the special case of two integer constants faster. */
1666 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1668 /* And some specific cases even faster than that. */
1669 if (code == PLUS_EXPR && integer_zerop (arg0))
1671 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1672 && integer_zerop (arg1))
1674 else if (code == MULT_EXPR && integer_onep (arg0))
1677 /* Handle general case of two integer constants. */
1678 return int_const_binop (code, arg0, arg1, 0);
1681 if (arg0 == error_mark_node || arg1 == error_mark_node)
1682 return error_mark_node;
1684 return fold_build2 (code, type, arg0, arg1);
1687 /* Given two values, either both of sizetype or both of bitsizetype,
1688 compute the difference between the two values. Return the value
1689 in signed type corresponding to the type of the operands. */
1692 size_diffop (tree arg0, tree arg1)
1694 tree type = TREE_TYPE (arg0);
1697 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1698 && type == TREE_TYPE (arg1));
1700 /* If the type is already signed, just do the simple thing. */
1701 if (!TYPE_UNSIGNED (type))
1702 return size_binop (MINUS_EXPR, arg0, arg1);
1704 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1706 /* If either operand is not a constant, do the conversions to the signed
1707 type and subtract. The hardware will do the right thing with any
1708 overflow in the subtraction. */
1709 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1710 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1711 fold_convert (ctype, arg1));
1713 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1714 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1715 overflow) and negate (which can't either). Special-case a result
1716 of zero while we're here. */
1717 if (tree_int_cst_equal (arg0, arg1))
1718 return fold_convert (ctype, integer_zero_node);
1719 else if (tree_int_cst_lt (arg1, arg0))
1720 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1722 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1723 fold_convert (ctype, size_binop (MINUS_EXPR,
1727 /* A subroutine of fold_convert_const handling conversions of an
1728 INTEGER_CST to another integer type. */
1731 fold_convert_const_int_from_int (tree type, tree arg1)
1735 /* Given an integer constant, make new constant with new type,
1736 appropriately sign-extended or truncated. */
1737 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1738 TREE_INT_CST_HIGH (arg1));
1740 t = force_fit_type (t,
1741 /* Don't set the overflow when
1742 converting a pointer */
1743 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1744 (TREE_INT_CST_HIGH (arg1) < 0
1745 && (TYPE_UNSIGNED (type)
1746 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1747 | TREE_OVERFLOW (arg1),
1748 TREE_CONSTANT_OVERFLOW (arg1));
1753 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1754 to an integer type. */
1757 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1762 /* The following code implements the floating point to integer
1763 conversion rules required by the Java Language Specification,
1764 that IEEE NaNs are mapped to zero and values that overflow
1765 the target precision saturate, i.e. values greater than
1766 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1767 are mapped to INT_MIN. These semantics are allowed by the
1768 C and C++ standards that simply state that the behavior of
1769 FP-to-integer conversion is unspecified upon overflow. */
1771 HOST_WIDE_INT high, low;
1773 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1777 case FIX_TRUNC_EXPR:
1778 real_trunc (&r, VOIDmode, &x);
1782 real_ceil (&r, VOIDmode, &x);
1785 case FIX_FLOOR_EXPR:
1786 real_floor (&r, VOIDmode, &x);
1789 case FIX_ROUND_EXPR:
1790 real_round (&r, VOIDmode, &x);
1797 /* If R is NaN, return zero and show we have an overflow. */
1798 if (REAL_VALUE_ISNAN (r))
1805 /* See if R is less than the lower bound or greater than the
1810 tree lt = TYPE_MIN_VALUE (type);
1811 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1812 if (REAL_VALUES_LESS (r, l))
1815 high = TREE_INT_CST_HIGH (lt);
1816 low = TREE_INT_CST_LOW (lt);
1822 tree ut = TYPE_MAX_VALUE (type);
1825 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1826 if (REAL_VALUES_LESS (u, r))
1829 high = TREE_INT_CST_HIGH (ut);
1830 low = TREE_INT_CST_LOW (ut);
1836 REAL_VALUE_TO_INT (&low, &high, r);
1838 t = build_int_cst_wide (type, low, high);
1840 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1841 TREE_CONSTANT_OVERFLOW (arg1));
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to another floating point type. */
1849 fold_convert_const_real_from_real (tree type, tree arg1)
1851 REAL_VALUE_TYPE value;
1854 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1855 t = build_real (type, value);
1857 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1858 TREE_CONSTANT_OVERFLOW (t)
1859 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1863 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1864 type TYPE. If no simplification can be done return NULL_TREE. */
1867 fold_convert_const (enum tree_code code, tree type, tree arg1)
1869 if (TREE_TYPE (arg1) == type)
1872 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1874 if (TREE_CODE (arg1) == INTEGER_CST)
1875 return fold_convert_const_int_from_int (type, arg1);
1876 else if (TREE_CODE (arg1) == REAL_CST)
1877 return fold_convert_const_int_from_real (code, type, arg1);
1879 else if (TREE_CODE (type) == REAL_TYPE)
1881 if (TREE_CODE (arg1) == INTEGER_CST)
1882 return build_real_from_int_cst (type, arg1);
1883 if (TREE_CODE (arg1) == REAL_CST)
1884 return fold_convert_const_real_from_real (type, arg1);
1889 /* Construct a vector of zero elements of vector type TYPE. */
1892 build_zero_vector (tree type)
1897 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1898 units = TYPE_VECTOR_SUBPARTS (type);
1901 for (i = 0; i < units; i++)
1902 list = tree_cons (NULL_TREE, elem, list);
1903 return build_vector (type, list);
1906 /* Convert expression ARG to type TYPE. Used by the middle-end for
1907 simple conversions in preference to calling the front-end's convert. */
1910 fold_convert (tree type, tree arg)
1912 tree orig = TREE_TYPE (arg);
1918 if (TREE_CODE (arg) == ERROR_MARK
1919 || TREE_CODE (type) == ERROR_MARK
1920 || TREE_CODE (orig) == ERROR_MARK)
1921 return error_mark_node;
1923 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1924 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1925 TYPE_MAIN_VARIANT (orig)))
1926 return fold_build1 (NOP_EXPR, type, arg);
1928 switch (TREE_CODE (type))
1930 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1931 case POINTER_TYPE: case REFERENCE_TYPE:
1933 if (TREE_CODE (arg) == INTEGER_CST)
1935 tem = fold_convert_const (NOP_EXPR, type, arg);
1936 if (tem != NULL_TREE)
1939 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1940 || TREE_CODE (orig) == OFFSET_TYPE)
1941 return fold_build1 (NOP_EXPR, type, arg);
1942 if (TREE_CODE (orig) == COMPLEX_TYPE)
1944 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1945 return fold_convert (type, tem);
1947 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1948 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1949 return fold_build1 (NOP_EXPR, type, arg);
1952 if (TREE_CODE (arg) == INTEGER_CST)
1954 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1955 if (tem != NULL_TREE)
1958 else if (TREE_CODE (arg) == REAL_CST)
1960 tem = fold_convert_const (NOP_EXPR, type, arg);
1961 if (tem != NULL_TREE)
1965 switch (TREE_CODE (orig))
1967 case INTEGER_TYPE: case CHAR_TYPE:
1968 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1969 case POINTER_TYPE: case REFERENCE_TYPE:
1970 return fold_build1 (FLOAT_EXPR, type, arg);
1973 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1977 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1978 return fold_convert (type, tem);
1985 switch (TREE_CODE (orig))
1987 case INTEGER_TYPE: case CHAR_TYPE:
1988 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1989 case POINTER_TYPE: case REFERENCE_TYPE:
1991 return build2 (COMPLEX_EXPR, type,
1992 fold_convert (TREE_TYPE (type), arg),
1993 fold_convert (TREE_TYPE (type), integer_zero_node));
1998 if (TREE_CODE (arg) == COMPLEX_EXPR)
2000 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2001 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2002 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2005 arg = save_expr (arg);
2006 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2007 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2008 rpart = fold_convert (TREE_TYPE (type), rpart);
2009 ipart = fold_convert (TREE_TYPE (type), ipart);
2010 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2018 if (integer_zerop (arg))
2019 return build_zero_vector (type);
2020 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2021 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2022 || TREE_CODE (orig) == VECTOR_TYPE);
2023 return fold_build1 (NOP_EXPR, type, arg);
2026 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2033 /* Return false if expr can be assumed not to be an value, true
2037 maybe_lvalue_p (tree x)
2039 /* We only need to wrap lvalue tree codes. */
2040 switch (TREE_CODE (x))
2051 case ALIGN_INDIRECT_REF:
2052 case MISALIGNED_INDIRECT_REF:
2054 case ARRAY_RANGE_REF:
2060 case PREINCREMENT_EXPR:
2061 case PREDECREMENT_EXPR:
2063 case TRY_CATCH_EXPR:
2064 case WITH_CLEANUP_EXPR:
2075 /* Assume the worst for front-end tree codes. */
2076 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2084 /* Return an expr equal to X but certainly not valid as an lvalue. */
2089 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2094 if (! maybe_lvalue_p (x))
2096 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2099 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2100 Zero means allow extended lvalues. */
2102 int pedantic_lvalues;
2104 /* When pedantic, return an expr equal to X but certainly not valid as a
2105 pedantic lvalue. Otherwise, return X. */
2108 pedantic_non_lvalue (tree x)
2110 if (pedantic_lvalues)
2111 return non_lvalue (x);
2116 /* Given a tree comparison code, return the code that is the logical inverse
2117 of the given code. It is not safe to do this for floating-point
2118 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2119 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2122 invert_tree_comparison (enum tree_code code, bool honor_nans)
2124 if (honor_nans && flag_trapping_math)
2134 return honor_nans ? UNLE_EXPR : LE_EXPR;
2136 return honor_nans ? UNLT_EXPR : LT_EXPR;
2138 return honor_nans ? UNGE_EXPR : GE_EXPR;
2140 return honor_nans ? UNGT_EXPR : GT_EXPR;
2154 return UNORDERED_EXPR;
2155 case UNORDERED_EXPR:
2156 return ORDERED_EXPR;
2162 /* Similar, but return the comparison that results if the operands are
2163 swapped. This is safe for floating-point. */
2166 swap_tree_comparison (enum tree_code code)
2173 case UNORDERED_EXPR:
2199 /* Convert a comparison tree code from an enum tree_code representation
2200 into a compcode bit-based encoding. This function is the inverse of
2201 compcode_to_comparison. */
2203 static enum comparison_code
2204 comparison_to_compcode (enum tree_code code)
2221 return COMPCODE_ORD;
2222 case UNORDERED_EXPR:
2223 return COMPCODE_UNORD;
2225 return COMPCODE_UNLT;
2227 return COMPCODE_UNEQ;
2229 return COMPCODE_UNLE;
2231 return COMPCODE_UNGT;
2233 return COMPCODE_LTGT;
2235 return COMPCODE_UNGE;
2241 /* Convert a compcode bit-based encoding of a comparison operator back
2242 to GCC's enum tree_code representation. This function is the
2243 inverse of comparison_to_compcode. */
2245 static enum tree_code
2246 compcode_to_comparison (enum comparison_code code)
2263 return ORDERED_EXPR;
2264 case COMPCODE_UNORD:
2265 return UNORDERED_EXPR;
2283 /* Return a tree for the comparison which is the combination of
2284 doing the AND or OR (depending on CODE) of the two operations LCODE
2285 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2286 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2287 if this makes the transformation invalid. */
2290 combine_comparisons (enum tree_code code, enum tree_code lcode,
2291 enum tree_code rcode, tree truth_type,
2292 tree ll_arg, tree lr_arg)
2294 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2295 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2296 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2297 enum comparison_code compcode;
2301 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2302 compcode = lcompcode & rcompcode;
2305 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2306 compcode = lcompcode | rcompcode;
2315 /* Eliminate unordered comparisons, as well as LTGT and ORD
2316 which are not used unless the mode has NaNs. */
2317 compcode &= ~COMPCODE_UNORD;
2318 if (compcode == COMPCODE_LTGT)
2319 compcode = COMPCODE_NE;
2320 else if (compcode == COMPCODE_ORD)
2321 compcode = COMPCODE_TRUE;
2323 else if (flag_trapping_math)
2325 /* Check that the original operation and the optimized ones will trap
2326 under the same condition. */
2327 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2328 && (lcompcode != COMPCODE_EQ)
2329 && (lcompcode != COMPCODE_ORD);
2330 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2331 && (rcompcode != COMPCODE_EQ)
2332 && (rcompcode != COMPCODE_ORD);
2333 bool trap = (compcode & COMPCODE_UNORD) == 0
2334 && (compcode != COMPCODE_EQ)
2335 && (compcode != COMPCODE_ORD);
2337 /* In a short-circuited boolean expression the LHS might be
2338 such that the RHS, if evaluated, will never trap. For
2339 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2340 if neither x nor y is NaN. (This is a mixed blessing: for
2341 example, the expression above will never trap, hence
2342 optimizing it to x < y would be invalid). */
2343 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2344 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2347 /* If the comparison was short-circuited, and only the RHS
2348 trapped, we may now generate a spurious trap. */
2350 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2353 /* If we changed the conditions that cause a trap, we lose. */
2354 if ((ltrap || rtrap) != trap)
2358 if (compcode == COMPCODE_TRUE)
2359 return constant_boolean_node (true, truth_type);
2360 else if (compcode == COMPCODE_FALSE)
2361 return constant_boolean_node (false, truth_type);
2363 return fold_build2 (compcode_to_comparison (compcode),
2364 truth_type, ll_arg, lr_arg);
2367 /* Return nonzero if CODE is a tree code that represents a truth value. */
2370 truth_value_p (enum tree_code code)
2372 return (TREE_CODE_CLASS (code) == tcc_comparison
2373 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2374 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2375 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2378 /* Return nonzero if two operands (typically of the same tree node)
2379 are necessarily equal. If either argument has side-effects this
2380 function returns zero. FLAGS modifies behavior as follows:
2382 If OEP_ONLY_CONST is set, only return nonzero for constants.
2383 This function tests whether the operands are indistinguishable;
2384 it does not test whether they are equal using C's == operation.
2385 The distinction is important for IEEE floating point, because
2386 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2387 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2389 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2390 even though it may hold multiple values during a function.
2391 This is because a GCC tree node guarantees that nothing else is
2392 executed between the evaluation of its "operands" (which may often
2393 be evaluated in arbitrary order). Hence if the operands themselves
2394 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2395 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2396 unset means assuming isochronic (or instantaneous) tree equivalence.
2397 Unless comparing arbitrary expression trees, such as from different
2398 statements, this flag can usually be left unset.
2400 If OEP_PURE_SAME is set, then pure functions with identical arguments
2401 are considered the same. It is used when the caller has other ways
2402 to ensure that global memory is unchanged in between. */
2405 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2407 /* If either is ERROR_MARK, they aren't equal. */
2408 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2411 /* If both types don't have the same signedness, then we can't consider
2412 them equal. We must check this before the STRIP_NOPS calls
2413 because they may change the signedness of the arguments. */
2414 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2420 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2421 /* This is needed for conversions and for COMPONENT_REF.
2422 Might as well play it safe and always test this. */
2423 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2424 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2425 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2428 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2429 We don't care about side effects in that case because the SAVE_EXPR
2430 takes care of that for us. In all other cases, two expressions are
2431 equal if they have no side effects. If we have two identical
2432 expressions with side effects that should be treated the same due
2433 to the only side effects being identical SAVE_EXPR's, that will
2434 be detected in the recursive calls below. */
2435 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2436 && (TREE_CODE (arg0) == SAVE_EXPR
2437 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2440 /* Next handle constant cases, those for which we can return 1 even
2441 if ONLY_CONST is set. */
2442 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2443 switch (TREE_CODE (arg0))
2446 return (! TREE_CONSTANT_OVERFLOW (arg0)
2447 && ! TREE_CONSTANT_OVERFLOW (arg1)
2448 && tree_int_cst_equal (arg0, arg1));
2451 return (! TREE_CONSTANT_OVERFLOW (arg0)
2452 && ! TREE_CONSTANT_OVERFLOW (arg1)
2453 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2454 TREE_REAL_CST (arg1)));
2460 if (TREE_CONSTANT_OVERFLOW (arg0)
2461 || TREE_CONSTANT_OVERFLOW (arg1))
2464 v1 = TREE_VECTOR_CST_ELTS (arg0);
2465 v2 = TREE_VECTOR_CST_ELTS (arg1);
2468 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2471 v1 = TREE_CHAIN (v1);
2472 v2 = TREE_CHAIN (v2);
2479 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2481 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2485 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2486 && ! memcmp (TREE_STRING_POINTER (arg0),
2487 TREE_STRING_POINTER (arg1),
2488 TREE_STRING_LENGTH (arg0)));
2491 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2497 if (flags & OEP_ONLY_CONST)
2500 /* Define macros to test an operand from arg0 and arg1 for equality and a
2501 variant that allows null and views null as being different from any
2502 non-null value. In the latter case, if either is null, the both
2503 must be; otherwise, do the normal comparison. */
2504 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2505 TREE_OPERAND (arg1, N), flags)
2507 #define OP_SAME_WITH_NULL(N) \
2508 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2509 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2511 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2514 /* Two conversions are equal only if signedness and modes match. */
2515 switch (TREE_CODE (arg0))
2520 case FIX_TRUNC_EXPR:
2521 case FIX_FLOOR_EXPR:
2522 case FIX_ROUND_EXPR:
2523 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2524 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2534 case tcc_comparison:
2536 if (OP_SAME (0) && OP_SAME (1))
2539 /* For commutative ops, allow the other order. */
2540 return (commutative_tree_code (TREE_CODE (arg0))
2541 && operand_equal_p (TREE_OPERAND (arg0, 0),
2542 TREE_OPERAND (arg1, 1), flags)
2543 && operand_equal_p (TREE_OPERAND (arg0, 1),
2544 TREE_OPERAND (arg1, 0), flags));
2547 /* If either of the pointer (or reference) expressions we are
2548 dereferencing contain a side effect, these cannot be equal. */
2549 if (TREE_SIDE_EFFECTS (arg0)
2550 || TREE_SIDE_EFFECTS (arg1))
2553 switch (TREE_CODE (arg0))
2556 case ALIGN_INDIRECT_REF:
2557 case MISALIGNED_INDIRECT_REF:
2563 case ARRAY_RANGE_REF:
2564 /* Operands 2 and 3 may be null. */
2567 && OP_SAME_WITH_NULL (2)
2568 && OP_SAME_WITH_NULL (3));
2571 /* Handle operand 2 the same as for ARRAY_REF. */
2572 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2575 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2581 case tcc_expression:
2582 switch (TREE_CODE (arg0))
2585 case TRUTH_NOT_EXPR:
2588 case TRUTH_ANDIF_EXPR:
2589 case TRUTH_ORIF_EXPR:
2590 return OP_SAME (0) && OP_SAME (1);
2592 case TRUTH_AND_EXPR:
2594 case TRUTH_XOR_EXPR:
2595 if (OP_SAME (0) && OP_SAME (1))
2598 /* Otherwise take into account this is a commutative operation. */
2599 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2600 TREE_OPERAND (arg1, 1), flags)
2601 && operand_equal_p (TREE_OPERAND (arg0, 1),
2602 TREE_OPERAND (arg1, 0), flags));
2605 /* If the CALL_EXPRs call different functions, then they
2606 clearly can not be equal. */
2611 unsigned int cef = call_expr_flags (arg0);
2612 if (flags & OEP_PURE_SAME)
2613 cef &= ECF_CONST | ECF_PURE;
2620 /* Now see if all the arguments are the same. operand_equal_p
2621 does not handle TREE_LIST, so we walk the operands here
2622 feeding them to operand_equal_p. */
2623 arg0 = TREE_OPERAND (arg0, 1);
2624 arg1 = TREE_OPERAND (arg1, 1);
2625 while (arg0 && arg1)
2627 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2631 arg0 = TREE_CHAIN (arg0);
2632 arg1 = TREE_CHAIN (arg1);
2635 /* If we get here and both argument lists are exhausted
2636 then the CALL_EXPRs are equal. */
2637 return ! (arg0 || arg1);
2643 case tcc_declaration:
2644 /* Consider __builtin_sqrt equal to sqrt. */
2645 return (TREE_CODE (arg0) == FUNCTION_DECL
2646 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2647 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2648 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2655 #undef OP_SAME_WITH_NULL
2658 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2659 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2661 When in doubt, return 0. */
2664 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2666 int unsignedp1, unsignedpo;
2667 tree primarg0, primarg1, primother;
2668 unsigned int correct_width;
2670 if (operand_equal_p (arg0, arg1, 0))
2673 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2674 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2677 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2678 and see if the inner values are the same. This removes any
2679 signedness comparison, which doesn't matter here. */
2680 primarg0 = arg0, primarg1 = arg1;
2681 STRIP_NOPS (primarg0);
2682 STRIP_NOPS (primarg1);
2683 if (operand_equal_p (primarg0, primarg1, 0))
2686 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2687 actual comparison operand, ARG0.
2689 First throw away any conversions to wider types
2690 already present in the operands. */
2692 primarg1 = get_narrower (arg1, &unsignedp1);
2693 primother = get_narrower (other, &unsignedpo);
2695 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2696 if (unsignedp1 == unsignedpo
2697 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2698 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2700 tree type = TREE_TYPE (arg0);
2702 /* Make sure shorter operand is extended the right way
2703 to match the longer operand. */
2704 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2705 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2707 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2714 /* See if ARG is an expression that is either a comparison or is performing
2715 arithmetic on comparisons. The comparisons must only be comparing
2716 two different values, which will be stored in *CVAL1 and *CVAL2; if
2717 they are nonzero it means that some operands have already been found.
2718 No variables may be used anywhere else in the expression except in the
2719 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2720 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2722 If this is true, return 1. Otherwise, return zero. */
2725 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2727 enum tree_code code = TREE_CODE (arg);
2728 enum tree_code_class class = TREE_CODE_CLASS (code);
2730 /* We can handle some of the tcc_expression cases here. */
2731 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2733 else if (class == tcc_expression
2734 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2735 || code == COMPOUND_EXPR))
2738 else if (class == tcc_expression && code == SAVE_EXPR
2739 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2741 /* If we've already found a CVAL1 or CVAL2, this expression is
2742 two complex to handle. */
2743 if (*cval1 || *cval2)
2753 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2756 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2757 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2758 cval1, cval2, save_p));
2763 case tcc_expression:
2764 if (code == COND_EXPR)
2765 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2766 cval1, cval2, save_p)
2767 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2768 cval1, cval2, save_p)
2769 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2770 cval1, cval2, save_p));
2773 case tcc_comparison:
2774 /* First see if we can handle the first operand, then the second. For
2775 the second operand, we know *CVAL1 can't be zero. It must be that
2776 one side of the comparison is each of the values; test for the
2777 case where this isn't true by failing if the two operands
2780 if (operand_equal_p (TREE_OPERAND (arg, 0),
2781 TREE_OPERAND (arg, 1), 0))
2785 *cval1 = TREE_OPERAND (arg, 0);
2786 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2788 else if (*cval2 == 0)
2789 *cval2 = TREE_OPERAND (arg, 0);
2790 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2795 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2797 else if (*cval2 == 0)
2798 *cval2 = TREE_OPERAND (arg, 1);
2799 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2811 /* ARG is a tree that is known to contain just arithmetic operations and
2812 comparisons. Evaluate the operations in the tree substituting NEW0 for
2813 any occurrence of OLD0 as an operand of a comparison and likewise for
2817 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2819 tree type = TREE_TYPE (arg);
2820 enum tree_code code = TREE_CODE (arg);
2821 enum tree_code_class class = TREE_CODE_CLASS (code);
2823 /* We can handle some of the tcc_expression cases here. */
2824 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2826 else if (class == tcc_expression
2827 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2833 return fold_build1 (code, type,
2834 eval_subst (TREE_OPERAND (arg, 0),
2835 old0, new0, old1, new1));
2838 return fold_build2 (code, type,
2839 eval_subst (TREE_OPERAND (arg, 0),
2840 old0, new0, old1, new1),
2841 eval_subst (TREE_OPERAND (arg, 1),
2842 old0, new0, old1, new1));
2844 case tcc_expression:
2848 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2851 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2854 return fold_build3 (code, type,
2855 eval_subst (TREE_OPERAND (arg, 0),
2856 old0, new0, old1, new1),
2857 eval_subst (TREE_OPERAND (arg, 1),
2858 old0, new0, old1, new1),
2859 eval_subst (TREE_OPERAND (arg, 2),
2860 old0, new0, old1, new1));
2864 /* Fall through - ??? */
2866 case tcc_comparison:
2868 tree arg0 = TREE_OPERAND (arg, 0);
2869 tree arg1 = TREE_OPERAND (arg, 1);
2871 /* We need to check both for exact equality and tree equality. The
2872 former will be true if the operand has a side-effect. In that
2873 case, we know the operand occurred exactly once. */
2875 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2877 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2880 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2882 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2885 return fold_build2 (code, type, arg0, arg1);
2893 /* Return a tree for the case when the result of an expression is RESULT
2894 converted to TYPE and OMITTED was previously an operand of the expression
2895 but is now not needed (e.g., we folded OMITTED * 0).
2897 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2898 the conversion of RESULT to TYPE. */
2901 omit_one_operand (tree type, tree result, tree omitted)
2903 tree t = fold_convert (type, result);
2905 if (TREE_SIDE_EFFECTS (omitted))
2906 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2908 return non_lvalue (t);
2911 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2914 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2916 tree t = fold_convert (type, result);
2918 if (TREE_SIDE_EFFECTS (omitted))
2919 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2921 return pedantic_non_lvalue (t);
2924 /* Return a tree for the case when the result of an expression is RESULT
2925 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2926 of the expression but are now not needed.
2928 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2929 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2930 evaluated before OMITTED2. Otherwise, if neither has side effects,
2931 just do the conversion of RESULT to TYPE. */
2934 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2936 tree t = fold_convert (type, result);
2938 if (TREE_SIDE_EFFECTS (omitted2))
2939 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2940 if (TREE_SIDE_EFFECTS (omitted1))
2941 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2943 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2947 /* Return a simplified tree node for the truth-negation of ARG. This
2948 never alters ARG itself. We assume that ARG is an operation that
2949 returns a truth value (0 or 1).
2951 FIXME: one would think we would fold the result, but it causes
2952 problems with the dominator optimizer. */
2954 invert_truthvalue (tree arg)
2956 tree type = TREE_TYPE (arg);
2957 enum tree_code code = TREE_CODE (arg);
2959 if (code == ERROR_MARK)
2962 /* If this is a comparison, we can simply invert it, except for
2963 floating-point non-equality comparisons, in which case we just
2964 enclose a TRUTH_NOT_EXPR around what we have. */
2966 if (TREE_CODE_CLASS (code) == tcc_comparison)
2968 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2969 if (FLOAT_TYPE_P (op_type)
2970 && flag_trapping_math
2971 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2972 && code != NE_EXPR && code != EQ_EXPR)
2973 return build1 (TRUTH_NOT_EXPR, type, arg);
2976 code = invert_tree_comparison (code,
2977 HONOR_NANS (TYPE_MODE (op_type)));
2978 if (code == ERROR_MARK)
2979 return build1 (TRUTH_NOT_EXPR, type, arg);
2981 return build2 (code, type,
2982 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2989 return constant_boolean_node (integer_zerop (arg), type);
2991 case TRUTH_AND_EXPR:
2992 return build2 (TRUTH_OR_EXPR, type,
2993 invert_truthvalue (TREE_OPERAND (arg, 0)),
2994 invert_truthvalue (TREE_OPERAND (arg, 1)));
2997 return build2 (TRUTH_AND_EXPR, type,
2998 invert_truthvalue (TREE_OPERAND (arg, 0)),
2999 invert_truthvalue (TREE_OPERAND (arg, 1)));
3001 case TRUTH_XOR_EXPR:
3002 /* Here we can invert either operand. We invert the first operand
3003 unless the second operand is a TRUTH_NOT_EXPR in which case our
3004 result is the XOR of the first operand with the inside of the
3005 negation of the second operand. */
3007 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3008 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3009 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3011 return build2 (TRUTH_XOR_EXPR, type,
3012 invert_truthvalue (TREE_OPERAND (arg, 0)),
3013 TREE_OPERAND (arg, 1));
3015 case TRUTH_ANDIF_EXPR:
3016 return build2 (TRUTH_ORIF_EXPR, type,
3017 invert_truthvalue (TREE_OPERAND (arg, 0)),
3018 invert_truthvalue (TREE_OPERAND (arg, 1)));
3020 case TRUTH_ORIF_EXPR:
3021 return build2 (TRUTH_ANDIF_EXPR, type,
3022 invert_truthvalue (TREE_OPERAND (arg, 0)),
3023 invert_truthvalue (TREE_OPERAND (arg, 1)));
3025 case TRUTH_NOT_EXPR:
3026 return TREE_OPERAND (arg, 0);
3029 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3030 invert_truthvalue (TREE_OPERAND (arg, 1)),
3031 invert_truthvalue (TREE_OPERAND (arg, 2)));
3034 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3035 invert_truthvalue (TREE_OPERAND (arg, 1)));
3037 case NON_LVALUE_EXPR:
3038 return invert_truthvalue (TREE_OPERAND (arg, 0));
3041 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3046 return build1 (TREE_CODE (arg), type,
3047 invert_truthvalue (TREE_OPERAND (arg, 0)));
3050 if (!integer_onep (TREE_OPERAND (arg, 1)))
3052 return build2 (EQ_EXPR, type, arg,
3053 fold_convert (type, integer_zero_node));
3056 return build1 (TRUTH_NOT_EXPR, type, arg);
3058 case CLEANUP_POINT_EXPR:
3059 return build1 (CLEANUP_POINT_EXPR, type,
3060 invert_truthvalue (TREE_OPERAND (arg, 0)));
3065 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3066 return build1 (TRUTH_NOT_EXPR, type, arg);
3069 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3070 operands are another bit-wise operation with a common input. If so,
3071 distribute the bit operations to save an operation and possibly two if
3072 constants are involved. For example, convert
3073 (A | B) & (A | C) into A | (B & C)
3074 Further simplification will occur if B and C are constants.
3076 If this optimization cannot be done, 0 will be returned. */
3079 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3084 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3085 || TREE_CODE (arg0) == code
3086 || (TREE_CODE (arg0) != BIT_AND_EXPR
3087 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3090 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3092 common = TREE_OPERAND (arg0, 0);
3093 left = TREE_OPERAND (arg0, 1);
3094 right = TREE_OPERAND (arg1, 1);
3096 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3098 common = TREE_OPERAND (arg0, 0);
3099 left = TREE_OPERAND (arg0, 1);
3100 right = TREE_OPERAND (arg1, 0);
3102 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3104 common = TREE_OPERAND (arg0, 1);
3105 left = TREE_OPERAND (arg0, 0);
3106 right = TREE_OPERAND (arg1, 1);
3108 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3110 common = TREE_OPERAND (arg0, 1);
3111 left = TREE_OPERAND (arg0, 0);
3112 right = TREE_OPERAND (arg1, 0);
3117 return fold_build2 (TREE_CODE (arg0), type, common,
3118 fold_build2 (code, type, left, right));
3121 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3122 with code CODE. This optimization is unsafe. */
3124 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3126 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3127 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3129 /* (A / C) +- (B / C) -> (A +- B) / C. */
3131 && operand_equal_p (TREE_OPERAND (arg0, 1),
3132 TREE_OPERAND (arg1, 1), 0))
3133 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3134 fold_build2 (code, type,
3135 TREE_OPERAND (arg0, 0),
3136 TREE_OPERAND (arg1, 0)),
3137 TREE_OPERAND (arg0, 1));
3139 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3140 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3141 TREE_OPERAND (arg1, 0), 0)
3142 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3143 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3145 REAL_VALUE_TYPE r0, r1;
3146 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3147 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3149 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3151 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3152 real_arithmetic (&r0, code, &r0, &r1);
3153 return fold_build2 (MULT_EXPR, type,
3154 TREE_OPERAND (arg0, 0),
3155 build_real (type, r0));
3161 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3162 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3165 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3172 tree size = TYPE_SIZE (TREE_TYPE (inner));
3173 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3174 || POINTER_TYPE_P (TREE_TYPE (inner)))
3175 && host_integerp (size, 0)
3176 && tree_low_cst (size, 0) == bitsize)
3177 return fold_convert (type, inner);
3180 result = build3 (BIT_FIELD_REF, type, inner,
3181 size_int (bitsize), bitsize_int (bitpos));
3183 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3188 /* Optimize a bit-field compare.
3190 There are two cases: First is a compare against a constant and the
3191 second is a comparison of two items where the fields are at the same
3192 bit position relative to the start of a chunk (byte, halfword, word)
3193 large enough to contain it. In these cases we can avoid the shift
3194 implicit in bitfield extractions.
3196 For constants, we emit a compare of the shifted constant with the
3197 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3198 compared. For two fields at the same position, we do the ANDs with the
3199 similar mask and compare the result of the ANDs.
3201 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3202 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3203 are the left and right operands of the comparison, respectively.
3205 If the optimization described above can be done, we return the resulting
3206 tree. Otherwise we return zero. */
3209 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3212 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3213 tree type = TREE_TYPE (lhs);
3214 tree signed_type, unsigned_type;
3215 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3216 enum machine_mode lmode, rmode, nmode;
3217 int lunsignedp, runsignedp;
3218 int lvolatilep = 0, rvolatilep = 0;
3219 tree linner, rinner = NULL_TREE;
3223 /* Get all the information about the extractions being done. If the bit size
3224 if the same as the size of the underlying object, we aren't doing an
3225 extraction at all and so can do nothing. We also don't want to
3226 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3227 then will no longer be able to replace it. */
3228 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3229 &lunsignedp, &lvolatilep, false);
3230 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3231 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3236 /* If this is not a constant, we can only do something if bit positions,
3237 sizes, and signedness are the same. */
3238 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3239 &runsignedp, &rvolatilep, false);
3241 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3242 || lunsignedp != runsignedp || offset != 0
3243 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3247 /* See if we can find a mode to refer to this field. We should be able to,
3248 but fail if we can't. */
3249 nmode = get_best_mode (lbitsize, lbitpos,
3250 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3251 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3252 TYPE_ALIGN (TREE_TYPE (rinner))),
3253 word_mode, lvolatilep || rvolatilep);
3254 if (nmode == VOIDmode)
3257 /* Set signed and unsigned types of the precision of this mode for the
3259 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3260 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3262 /* Compute the bit position and size for the new reference and our offset
3263 within it. If the new reference is the same size as the original, we
3264 won't optimize anything, so return zero. */
3265 nbitsize = GET_MODE_BITSIZE (nmode);
3266 nbitpos = lbitpos & ~ (nbitsize - 1);
3268 if (nbitsize == lbitsize)
3271 if (BYTES_BIG_ENDIAN)
3272 lbitpos = nbitsize - lbitsize - lbitpos;
3274 /* Make the mask to be used against the extracted field. */
3275 mask = build_int_cst (unsigned_type, -1);
3276 mask = force_fit_type (mask, 0, false, false);
3277 mask = fold_convert (unsigned_type, mask);
3278 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3279 mask = const_binop (RSHIFT_EXPR, mask,
3280 size_int (nbitsize - lbitsize - lbitpos), 0);
3283 /* If not comparing with constant, just rework the comparison
3285 return build2 (code, compare_type,
3286 build2 (BIT_AND_EXPR, unsigned_type,
3287 make_bit_field_ref (linner, unsigned_type,
3288 nbitsize, nbitpos, 1),
3290 build2 (BIT_AND_EXPR, unsigned_type,
3291 make_bit_field_ref (rinner, unsigned_type,
3292 nbitsize, nbitpos, 1),
3295 /* Otherwise, we are handling the constant case. See if the constant is too
3296 big for the field. Warn and return a tree of for 0 (false) if so. We do
3297 this not only for its own sake, but to avoid having to test for this
3298 error case below. If we didn't, we might generate wrong code.
3300 For unsigned fields, the constant shifted right by the field length should
3301 be all zero. For signed fields, the high-order bits should agree with
3306 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3307 fold_convert (unsigned_type, rhs),
3308 size_int (lbitsize), 0)))
3310 warning (0, "comparison is always %d due to width of bit-field",
3312 return constant_boolean_node (code == NE_EXPR, compare_type);
3317 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3318 size_int (lbitsize - 1), 0);
3319 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3321 warning (0, "comparison is always %d due to width of bit-field",
3323 return constant_boolean_node (code == NE_EXPR, compare_type);
3327 /* Single-bit compares should always be against zero. */
3328 if (lbitsize == 1 && ! integer_zerop (rhs))
3330 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3331 rhs = fold_convert (type, integer_zero_node);
3334 /* Make a new bitfield reference, shift the constant over the
3335 appropriate number of bits and mask it with the computed mask
3336 (in case this was a signed field). If we changed it, make a new one. */
3337 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3340 TREE_SIDE_EFFECTS (lhs) = 1;
3341 TREE_THIS_VOLATILE (lhs) = 1;
3344 rhs = fold (const_binop (BIT_AND_EXPR,
3345 const_binop (LSHIFT_EXPR,
3346 fold_convert (unsigned_type, rhs),
3347 size_int (lbitpos), 0),
3350 return build2 (code, compare_type,
3351 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3355 /* Subroutine for fold_truthop: decode a field reference.
3357 If EXP is a comparison reference, we return the innermost reference.
3359 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3360 set to the starting bit number.
3362 If the innermost field can be completely contained in a mode-sized
3363 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3365 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3366 otherwise it is not changed.
3368 *PUNSIGNEDP is set to the signedness of the field.
3370 *PMASK is set to the mask used. This is either contained in a
3371 BIT_AND_EXPR or derived from the width of the field.
3373 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3375 Return 0 if this is not a component reference or is one that we can't
3376 do anything with. */
3379 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3380 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3381 int *punsignedp, int *pvolatilep,
3382 tree *pmask, tree *pand_mask)
3384 tree outer_type = 0;
3386 tree mask, inner, offset;
3388 unsigned int precision;
3390 /* All the optimizations using this function assume integer fields.
3391 There are problems with FP fields since the type_for_size call
3392 below can fail for, e.g., XFmode. */
3393 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3396 /* We are interested in the bare arrangement of bits, so strip everything
3397 that doesn't affect the machine mode. However, record the type of the
3398 outermost expression if it may matter below. */
3399 if (TREE_CODE (exp) == NOP_EXPR
3400 || TREE_CODE (exp) == CONVERT_EXPR
3401 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3402 outer_type = TREE_TYPE (exp);
3405 if (TREE_CODE (exp) == BIT_AND_EXPR)
3407 and_mask = TREE_OPERAND (exp, 1);
3408 exp = TREE_OPERAND (exp, 0);
3409 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3410 if (TREE_CODE (and_mask) != INTEGER_CST)
3414 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3415 punsignedp, pvolatilep, false);
3416 if ((inner == exp && and_mask == 0)
3417 || *pbitsize < 0 || offset != 0
3418 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3421 /* If the number of bits in the reference is the same as the bitsize of
3422 the outer type, then the outer type gives the signedness. Otherwise
3423 (in case of a small bitfield) the signedness is unchanged. */
3424 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3425 *punsignedp = TYPE_UNSIGNED (outer_type);
3427 /* Compute the mask to access the bitfield. */
3428 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3429 precision = TYPE_PRECISION (unsigned_type);
3431 mask = build_int_cst (unsigned_type, -1);
3432 mask = force_fit_type (mask, 0, false, false);
3434 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3435 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3437 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3439 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3440 fold_convert (unsigned_type, and_mask), mask);
3443 *pand_mask = and_mask;
3447 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3451 all_ones_mask_p (tree mask, int size)
3453 tree type = TREE_TYPE (mask);
3454 unsigned int precision = TYPE_PRECISION (type);
3457 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3458 tmask = force_fit_type (tmask, 0, false, false);
3461 tree_int_cst_equal (mask,
3462 const_binop (RSHIFT_EXPR,
3463 const_binop (LSHIFT_EXPR, tmask,
3464 size_int (precision - size),
3466 size_int (precision - size), 0));
3469 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3470 represents the sign bit of EXP's type. If EXP represents a sign
3471 or zero extension, also test VAL against the unextended type.
3472 The return value is the (sub)expression whose sign bit is VAL,
3473 or NULL_TREE otherwise. */
3476 sign_bit_p (tree exp, tree val)
3478 unsigned HOST_WIDE_INT mask_lo, lo;
3479 HOST_WIDE_INT mask_hi, hi;
3483 /* Tree EXP must have an integral type. */
3484 t = TREE_TYPE (exp);
3485 if (! INTEGRAL_TYPE_P (t))
3488 /* Tree VAL must be an integer constant. */
3489 if (TREE_CODE (val) != INTEGER_CST
3490 || TREE_CONSTANT_OVERFLOW (val))
3493 width = TYPE_PRECISION (t);
3494 if (width > HOST_BITS_PER_WIDE_INT)
3496 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3499 mask_hi = ((unsigned HOST_WIDE_INT) -1
3500 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3506 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3509 mask_lo = ((unsigned HOST_WIDE_INT) -1
3510 >> (HOST_BITS_PER_WIDE_INT - width));
3513 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3514 treat VAL as if it were unsigned. */
3515 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3516 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3519 /* Handle extension from a narrower type. */
3520 if (TREE_CODE (exp) == NOP_EXPR
3521 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3522 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3527 /* Subroutine for fold_truthop: determine if an operand is simple enough
3528 to be evaluated unconditionally. */
3531 simple_operand_p (tree exp)
3533 /* Strip any conversions that don't change the machine mode. */
3536 return (CONSTANT_CLASS_P (exp)
3537 || TREE_CODE (exp) == SSA_NAME
3539 && ! TREE_ADDRESSABLE (exp)
3540 && ! TREE_THIS_VOLATILE (exp)
3541 && ! DECL_NONLOCAL (exp)
3542 /* Don't regard global variables as simple. They may be
3543 allocated in ways unknown to the compiler (shared memory,
3544 #pragma weak, etc). */
3545 && ! TREE_PUBLIC (exp)
3546 && ! DECL_EXTERNAL (exp)
3547 /* Loading a static variable is unduly expensive, but global
3548 registers aren't expensive. */
3549 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3552 /* The following functions are subroutines to fold_range_test and allow it to
3553 try to change a logical combination of comparisons into a range test.
3556 X == 2 || X == 3 || X == 4 || X == 5
3560 (unsigned) (X - 2) <= 3
3562 We describe each set of comparisons as being either inside or outside
3563 a range, using a variable named like IN_P, and then describe the
3564 range with a lower and upper bound. If one of the bounds is omitted,
3565 it represents either the highest or lowest value of the type.
3567 In the comments below, we represent a range by two numbers in brackets
3568 preceded by a "+" to designate being inside that range, or a "-" to
3569 designate being outside that range, so the condition can be inverted by
3570 flipping the prefix. An omitted bound is represented by a "-". For
3571 example, "- [-, 10]" means being outside the range starting at the lowest
3572 possible value and ending at 10, in other words, being greater than 10.
3573 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3576 We set up things so that the missing bounds are handled in a consistent
3577 manner so neither a missing bound nor "true" and "false" need to be
3578 handled using a special case. */
3580 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3581 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3582 and UPPER1_P are nonzero if the respective argument is an upper bound
3583 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3584 must be specified for a comparison. ARG1 will be converted to ARG0's
3585 type if both are specified. */
3588 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3589 tree arg1, int upper1_p)
3595 /* If neither arg represents infinity, do the normal operation.
3596 Else, if not a comparison, return infinity. Else handle the special
3597 comparison rules. Note that most of the cases below won't occur, but
3598 are handled for consistency. */
3600 if (arg0 != 0 && arg1 != 0)
3602 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3603 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3605 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3608 if (TREE_CODE_CLASS (code) != tcc_comparison)
3611 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3612 for neither. In real maths, we cannot assume open ended ranges are
3613 the same. But, this is computer arithmetic, where numbers are finite.
3614 We can therefore make the transformation of any unbounded range with
3615 the value Z, Z being greater than any representable number. This permits
3616 us to treat unbounded ranges as equal. */
3617 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3618 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3622 result = sgn0 == sgn1;
3625 result = sgn0 != sgn1;
3628 result = sgn0 < sgn1;
3631 result = sgn0 <= sgn1;
3634 result = sgn0 > sgn1;
3637 result = sgn0 >= sgn1;
3643 return constant_boolean_node (result, type);
3646 /* Given EXP, a logical expression, set the range it is testing into
3647 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3648 actually being tested. *PLOW and *PHIGH will be made of the same type
3649 as the returned expression. If EXP is not a comparison, we will most
3650 likely not be returning a useful value and range. */
3653 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3655 enum tree_code code;
3656 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3657 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3659 tree low, high, n_low, n_high;
3661 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3662 and see if we can refine the range. Some of the cases below may not
3663 happen, but it doesn't seem worth worrying about this. We "continue"
3664 the outer loop when we've changed something; otherwise we "break"
3665 the switch, which will "break" the while. */
3668 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3672 code = TREE_CODE (exp);
3673 exp_type = TREE_TYPE (exp);
3675 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3677 if (TREE_CODE_LENGTH (code) > 0)
3678 arg0 = TREE_OPERAND (exp, 0);
3679 if (TREE_CODE_CLASS (code) == tcc_comparison
3680 || TREE_CODE_CLASS (code) == tcc_unary
3681 || TREE_CODE_CLASS (code) == tcc_binary)
3682 arg0_type = TREE_TYPE (arg0);
3683 if (TREE_CODE_CLASS (code) == tcc_binary
3684 || TREE_CODE_CLASS (code) == tcc_comparison
3685 || (TREE_CODE_CLASS (code) == tcc_expression
3686 && TREE_CODE_LENGTH (code) > 1))
3687 arg1 = TREE_OPERAND (exp, 1);
3692 case TRUTH_NOT_EXPR:
3693 in_p = ! in_p, exp = arg0;
3696 case EQ_EXPR: case NE_EXPR:
3697 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3698 /* We can only do something if the range is testing for zero
3699 and if the second operand is an integer constant. Note that
3700 saying something is "in" the range we make is done by
3701 complementing IN_P since it will set in the initial case of
3702 being not equal to zero; "out" is leaving it alone. */
3703 if (low == 0 || high == 0
3704 || ! integer_zerop (low) || ! integer_zerop (high)
3705 || TREE_CODE (arg1) != INTEGER_CST)
3710 case NE_EXPR: /* - [c, c] */
3713 case EQ_EXPR: /* + [c, c] */
3714 in_p = ! in_p, low = high = arg1;
3716 case GT_EXPR: /* - [-, c] */
3717 low = 0, high = arg1;
3719 case GE_EXPR: /* + [c, -] */
3720 in_p = ! in_p, low = arg1, high = 0;
3722 case LT_EXPR: /* - [c, -] */
3723 low = arg1, high = 0;
3725 case LE_EXPR: /* + [-, c] */
3726 in_p = ! in_p, low = 0, high = arg1;
3732 /* If this is an unsigned comparison, we also know that EXP is
3733 greater than or equal to zero. We base the range tests we make
3734 on that fact, so we record it here so we can parse existing
3735 range tests. We test arg0_type since often the return type
3736 of, e.g. EQ_EXPR, is boolean. */
3737 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3739 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3741 fold_convert (arg0_type, integer_zero_node),
3745 in_p = n_in_p, low = n_low, high = n_high;
3747 /* If the high bound is missing, but we have a nonzero low
3748 bound, reverse the range so it goes from zero to the low bound
3750 if (high == 0 && low && ! integer_zerop (low))
3753 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3754 integer_one_node, 0);
3755 low = fold_convert (arg0_type, integer_zero_node);
3763 /* (-x) IN [a,b] -> x in [-b, -a] */
3764 n_low = range_binop (MINUS_EXPR, exp_type,
3765 fold_convert (exp_type, integer_zero_node),
3767 n_high = range_binop (MINUS_EXPR, exp_type,
3768 fold_convert (exp_type, integer_zero_node),
3770 low = n_low, high = n_high;
3776 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3777 fold_convert (exp_type, integer_one_node));
3780 case PLUS_EXPR: case MINUS_EXPR:
3781 if (TREE_CODE (arg1) != INTEGER_CST)
3784 /* If EXP is signed, any overflow in the computation is undefined,
3785 so we don't worry about it so long as our computations on
3786 the bounds don't overflow. For unsigned, overflow is defined
3787 and this is exactly the right thing. */
3788 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3789 arg0_type, low, 0, arg1, 0);
3790 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3791 arg0_type, high, 1, arg1, 0);
3792 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3793 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3796 /* Check for an unsigned range which has wrapped around the maximum
3797 value thus making n_high < n_low, and normalize it. */
3798 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3800 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3801 integer_one_node, 0);
3802 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3803 integer_one_node, 0);
3805 /* If the range is of the form +/- [ x+1, x ], we won't
3806 be able to normalize it. But then, it represents the
3807 whole range or the empty set, so make it
3809 if (tree_int_cst_equal (n_low, low)
3810 && tree_int_cst_equal (n_high, high))
3816 low = n_low, high = n_high;
3821 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3822 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3825 if (! INTEGRAL_TYPE_P (arg0_type)
3826 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3827 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3830 n_low = low, n_high = high;
3833 n_low = fold_convert (arg0_type, n_low);
3836 n_high = fold_convert (arg0_type, n_high);
3839 /* If we're converting arg0 from an unsigned type, to exp,
3840 a signed type, we will be doing the comparison as unsigned.
3841 The tests above have already verified that LOW and HIGH
3844 So we have to ensure that we will handle large unsigned
3845 values the same way that the current signed bounds treat
3848 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3851 tree equiv_type = lang_hooks.types.type_for_mode
3852 (TYPE_MODE (arg0_type), 1);
3854 /* A range without an upper bound is, naturally, unbounded.
3855 Since convert would have cropped a very large value, use
3856 the max value for the destination type. */
3858 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3859 : TYPE_MAX_VALUE (arg0_type);
3861 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3862 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3863 fold_convert (arg0_type,
3865 fold_convert (arg0_type,
3868 /* If the low bound is specified, "and" the range with the
3869 range for which the original unsigned value will be
3873 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3874 1, n_low, n_high, 1,
3875 fold_convert (arg0_type,
3880 in_p = (n_in_p == in_p);
3884 /* Otherwise, "or" the range with the range of the input
3885 that will be interpreted as negative. */
3886 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3887 0, n_low, n_high, 1,
3888 fold_convert (arg0_type,
3893 in_p = (in_p != n_in_p);
3898 low = n_low, high = n_high;
3908 /* If EXP is a constant, we can evaluate whether this is true or false. */
3909 if (TREE_CODE (exp) == INTEGER_CST)
3911 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3913 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3919 *pin_p = in_p, *plow = low, *phigh = high;
3923 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3924 type, TYPE, return an expression to test if EXP is in (or out of, depending
3925 on IN_P) the range. Return 0 if the test couldn't be created. */
3928 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3930 tree etype = TREE_TYPE (exp);
3935 value = build_range_check (type, exp, 1, low, high);
3937 return invert_truthvalue (value);
3942 if (low == 0 && high == 0)
3943 return fold_convert (type, integer_one_node);
3946 return fold_build2 (LE_EXPR, type, exp, high);
3949 return fold_build2 (GE_EXPR, type, exp, low);
3951 if (operand_equal_p (low, high, 0))
3952 return fold_build2 (EQ_EXPR, type, exp, low);
3954 if (integer_zerop (low))
3956 if (! TYPE_UNSIGNED (etype))
3958 etype = lang_hooks.types.unsigned_type (etype);
3959 high = fold_convert (etype, high);
3960 exp = fold_convert (etype, exp);
3962 return build_range_check (type, exp, 1, 0, high);
3965 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3966 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3968 unsigned HOST_WIDE_INT lo;
3972 prec = TYPE_PRECISION (etype);
3973 if (prec <= HOST_BITS_PER_WIDE_INT)
3976 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3980 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3981 lo = (unsigned HOST_WIDE_INT) -1;
3984 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3986 if (TYPE_UNSIGNED (etype))
3988 etype = lang_hooks.types.signed_type (etype);
3989 exp = fold_convert (etype, exp);
3991 return fold_build2 (GT_EXPR, type, exp,
3992 fold_convert (etype, integer_zero_node));
3996 value = const_binop (MINUS_EXPR, high, low, 0);
3997 if (value != 0 && TREE_OVERFLOW (value) && ! TYPE_UNSIGNED (etype))
3999 tree utype, minv, maxv;
4001 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4002 for the type in question, as we rely on this here. */
4003 switch (TREE_CODE (etype))
4008 utype = lang_hooks.types.unsigned_type (etype);
4009 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4010 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4011 integer_one_node, 1);
4012 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4013 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4017 high = fold_convert (etype, high);
4018 low = fold_convert (etype, low);
4019 exp = fold_convert (etype, exp);
4020 value = const_binop (MINUS_EXPR, high, low, 0);
4028 if (value != 0 && ! TREE_OVERFLOW (value))
4029 return build_range_check (type,
4030 fold_build2 (MINUS_EXPR, etype, exp, low),
4031 1, fold_convert (etype, integer_zero_node),
4037 /* Given two ranges, see if we can merge them into one. Return 1 if we
4038 can, 0 if we can't. Set the output range into the specified parameters. */
4041 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4042 tree high0, int in1_p, tree low1, tree high1)
4050 int lowequal = ((low0 == 0 && low1 == 0)
4051 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4052 low0, 0, low1, 0)));
4053 int highequal = ((high0 == 0 && high1 == 0)
4054 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4055 high0, 1, high1, 1)));
4057 /* Make range 0 be the range that starts first, or ends last if they
4058 start at the same value. Swap them if it isn't. */
4059 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4062 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4063 high1, 1, high0, 1))))
4065 temp = in0_p, in0_p = in1_p, in1_p = temp;
4066 tem = low0, low0 = low1, low1 = tem;
4067 tem = high0, high0 = high1, high1 = tem;
4070 /* Now flag two cases, whether the ranges are disjoint or whether the
4071 second range is totally subsumed in the first. Note that the tests
4072 below are simplified by the ones above. */
4073 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4074 high0, 1, low1, 0));
4075 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4076 high1, 1, high0, 1));
4078 /* We now have four cases, depending on whether we are including or
4079 excluding the two ranges. */
4082 /* If they don't overlap, the result is false. If the second range
4083 is a subset it is the result. Otherwise, the range is from the start
4084 of the second to the end of the first. */
4086 in_p = 0, low = high = 0;
4088 in_p = 1, low = low1, high = high1;
4090 in_p = 1, low = low1, high = high0;
4093 else if (in0_p && ! in1_p)
4095 /* If they don't overlap, the result is the first range. If they are
4096 equal, the result is false. If the second range is a subset of the
4097 first, and the ranges begin at the same place, we go from just after
4098 the end of the first range to the end of the second. If the second
4099 range is not a subset of the first, or if it is a subset and both
4100 ranges end at the same place, the range starts at the start of the
4101 first range and ends just before the second range.
4102 Otherwise, we can't describe this as a single range. */
4104 in_p = 1, low = low0, high = high0;
4105 else if (lowequal && highequal)
4106 in_p = 0, low = high = 0;
4107 else if (subset && lowequal)
4109 in_p = 1, high = high0;
4110 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4111 integer_one_node, 0);
4113 else if (! subset || highequal)
4115 in_p = 1, low = low0;
4116 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4117 integer_one_node, 0);
4123 else if (! in0_p && in1_p)
4125 /* If they don't overlap, the result is the second range. If the second
4126 is a subset of the first, the result is false. Otherwise,
4127 the range starts just after the first range and ends at the
4128 end of the second. */
4130 in_p = 1, low = low1, high = high1;
4131 else if (subset || highequal)
4132 in_p = 0, low = high = 0;
4135 in_p = 1, high = high1;
4136 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4137 integer_one_node, 0);
4143 /* The case where we are excluding both ranges. Here the complex case
4144 is if they don't overlap. In that case, the only time we have a
4145 range is if they are adjacent. If the second is a subset of the
4146 first, the result is the first. Otherwise, the range to exclude
4147 starts at the beginning of the first range and ends at the end of the
4151 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4152 range_binop (PLUS_EXPR, NULL_TREE,
4154 integer_one_node, 1),
4156 in_p = 0, low = low0, high = high1;
4159 /* Canonicalize - [min, x] into - [-, x]. */
4160 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4161 switch (TREE_CODE (TREE_TYPE (low0)))
4164 if (TYPE_PRECISION (TREE_TYPE (low0))
4165 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4170 if (tree_int_cst_equal (low0,
4171 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4175 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4176 && integer_zerop (low0))
4183 /* Canonicalize - [x, max] into - [x, -]. */
4184 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4185 switch (TREE_CODE (TREE_TYPE (high1)))
4188 if (TYPE_PRECISION (TREE_TYPE (high1))
4189 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4194 if (tree_int_cst_equal (high1,
4195 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4199 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4200 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4202 integer_one_node, 1)))
4209 /* The ranges might be also adjacent between the maximum and
4210 minimum values of the given type. For
4211 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4212 return + [x + 1, y - 1]. */
4213 if (low0 == 0 && high1 == 0)
4215 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4216 integer_one_node, 1);
4217 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4218 integer_one_node, 0);
4219 if (low == 0 || high == 0)
4229 in_p = 0, low = low0, high = high0;
4231 in_p = 0, low = low0, high = high1;
4234 *pin_p = in_p, *plow = low, *phigh = high;
4239 /* Subroutine of fold, looking inside expressions of the form
4240 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4241 of the COND_EXPR. This function is being used also to optimize
4242 A op B ? C : A, by reversing the comparison first.
4244 Return a folded expression whose code is not a COND_EXPR
4245 anymore, or NULL_TREE if no folding opportunity is found. */
4248 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4250 enum tree_code comp_code = TREE_CODE (arg0);
4251 tree arg00 = TREE_OPERAND (arg0, 0);
4252 tree arg01 = TREE_OPERAND (arg0, 1);
4253 tree arg1_type = TREE_TYPE (arg1);
4259 /* If we have A op 0 ? A : -A, consider applying the following
4262 A == 0? A : -A same as -A
4263 A != 0? A : -A same as A
4264 A >= 0? A : -A same as abs (A)
4265 A > 0? A : -A same as abs (A)
4266 A <= 0? A : -A same as -abs (A)
4267 A < 0? A : -A same as -abs (A)
4269 None of these transformations work for modes with signed
4270 zeros. If A is +/-0, the first two transformations will
4271 change the sign of the result (from +0 to -0, or vice
4272 versa). The last four will fix the sign of the result,
4273 even though the original expressions could be positive or
4274 negative, depending on the sign of A.
4276 Note that all these transformations are correct if A is
4277 NaN, since the two alternatives (A and -A) are also NaNs. */
4278 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4279 ? real_zerop (arg01)
4280 : integer_zerop (arg01))
4281 && ((TREE_CODE (arg2) == NEGATE_EXPR
4282 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4283 /* In the case that A is of the form X-Y, '-A' (arg2) may
4284 have already been folded to Y-X, check for that. */
4285 || (TREE_CODE (arg1) == MINUS_EXPR
4286 && TREE_CODE (arg2) == MINUS_EXPR
4287 && operand_equal_p (TREE_OPERAND (arg1, 0),
4288 TREE_OPERAND (arg2, 1), 0)
4289 && operand_equal_p (TREE_OPERAND (arg1, 1),
4290 TREE_OPERAND (arg2, 0), 0))))
4295 tem = fold_convert (arg1_type, arg1);
4296 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4299 return pedantic_non_lvalue (fold_convert (type, arg1));
4302 if (flag_trapping_math)
4307 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4308 arg1 = fold_convert (lang_hooks.types.signed_type
4309 (TREE_TYPE (arg1)), arg1);
4310 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4311 return pedantic_non_lvalue (fold_convert (type, tem));
4314 if (flag_trapping_math)
4318 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4319 arg1 = fold_convert (lang_hooks.types.signed_type
4320 (TREE_TYPE (arg1)), arg1);
4321 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4322 return negate_expr (fold_convert (type, tem));
4324 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4328 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4329 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4330 both transformations are correct when A is NaN: A != 0
4331 is then true, and A == 0 is false. */
4333 if (integer_zerop (arg01) && integer_zerop (arg2))
4335 if (comp_code == NE_EXPR)
4336 return pedantic_non_lvalue (fold_convert (type, arg1));
4337 else if (comp_code == EQ_EXPR)
4338 return fold_convert (type, integer_zero_node);
4341 /* Try some transformations of A op B ? A : B.
4343 A == B? A : B same as B
4344 A != B? A : B same as A
4345 A >= B? A : B same as max (A, B)
4346 A > B? A : B same as max (B, A)
4347 A <= B? A : B same as min (A, B)
4348 A < B? A : B same as min (B, A)
4350 As above, these transformations don't work in the presence
4351 of signed zeros. For example, if A and B are zeros of
4352 opposite sign, the first two transformations will change
4353 the sign of the result. In the last four, the original
4354 expressions give different results for (A=+0, B=-0) and
4355 (A=-0, B=+0), but the transformed expressions do not.
4357 The first two transformations are correct if either A or B
4358 is a NaN. In the first transformation, the condition will
4359 be false, and B will indeed be chosen. In the case of the
4360 second transformation, the condition A != B will be true,
4361 and A will be chosen.
4363 The conversions to max() and min() are not correct if B is
4364 a number and A is not. The conditions in the original
4365 expressions will be false, so all four give B. The min()
4366 and max() versions would give a NaN instead. */
4367 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4368 /* Avoid these transformations if the COND_EXPR may be used
4369 as an lvalue in the C++ front-end. PR c++/19199. */
4371 || strcmp (lang_hooks.name, "GNU C++") != 0
4372 || ! maybe_lvalue_p (arg1)
4373 || ! maybe_lvalue_p (arg2)))
4375 tree comp_op0 = arg00;
4376 tree comp_op1 = arg01;
4377 tree comp_type = TREE_TYPE (comp_op0);
4379 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4380 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4390 return pedantic_non_lvalue (fold_convert (type, arg2));
4392 return pedantic_non_lvalue (fold_convert (type, arg1));
4397 /* In C++ a ?: expression can be an lvalue, so put the
4398 operand which will be used if they are equal first
4399 so that we can convert this back to the
4400 corresponding COND_EXPR. */
4401 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4403 comp_op0 = fold_convert (comp_type, comp_op0);
4404 comp_op1 = fold_convert (comp_type, comp_op1);
4405 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4406 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4407 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4408 return pedantic_non_lvalue (fold_convert (type, tem));
4415 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4417 comp_op0 = fold_convert (comp_type, comp_op0);
4418 comp_op1 = fold_convert (comp_type, comp_op1);
4419 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4420 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4421 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4422 return pedantic_non_lvalue (fold_convert (type, tem));
4426 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4427 return pedantic_non_lvalue (fold_convert (type, arg2));
4430 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4431 return pedantic_non_lvalue (fold_convert (type, arg1));
4434 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4439 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4440 we might still be able to simplify this. For example,
4441 if C1 is one less or one more than C2, this might have started
4442 out as a MIN or MAX and been transformed by this function.
4443 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4445 if (INTEGRAL_TYPE_P (type)
4446 && TREE_CODE (arg01) == INTEGER_CST
4447 && TREE_CODE (arg2) == INTEGER_CST)
4451 /* We can replace A with C1 in this case. */
4452 arg1 = fold_convert (type, arg01);
4453 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4456 /* If C1 is C2 + 1, this is min(A, C2). */
4457 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4459 && operand_equal_p (arg01,
4460 const_binop (PLUS_EXPR, arg2,
4461 integer_one_node, 0),
4463 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4468 /* If C1 is C2 - 1, this is min(A, C2). */
4469 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4471 && operand_equal_p (arg01,
4472 const_binop (MINUS_EXPR, arg2,
4473 integer_one_node, 0),
4475 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4480 /* If C1 is C2 - 1, this is max(A, C2). */
4481 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4483 && operand_equal_p (arg01,
4484 const_binop (MINUS_EXPR, arg2,
4485 integer_one_node, 0),
4487 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4492 /* If C1 is C2 + 1, this is max(A, C2). */
4493 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4495 && operand_equal_p (arg01,
4496 const_binop (PLUS_EXPR, arg2,
4497 integer_one_node, 0),
4499 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4513 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4514 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4517 /* EXP is some logical combination of boolean tests. See if we can
4518 merge it into some range test. Return the new tree if so. */
4521 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4523 int or_op = (code == TRUTH_ORIF_EXPR
4524 || code == TRUTH_OR_EXPR);
4525 int in0_p, in1_p, in_p;
4526 tree low0, low1, low, high0, high1, high;
4527 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4528 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4531 /* If this is an OR operation, invert both sides; we will invert
4532 again at the end. */
4534 in0_p = ! in0_p, in1_p = ! in1_p;
4536 /* If both expressions are the same, if we can merge the ranges, and we
4537 can build the range test, return it or it inverted. If one of the
4538 ranges is always true or always false, consider it to be the same
4539 expression as the other. */
4540 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4541 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4543 && 0 != (tem = (build_range_check (type,
4545 : rhs != 0 ? rhs : integer_zero_node,
4547 return or_op ? invert_truthvalue (tem) : tem;
4549 /* On machines where the branch cost is expensive, if this is a
4550 short-circuited branch and the underlying object on both sides
4551 is the same, make a non-short-circuit operation. */
4552 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4553 && lhs != 0 && rhs != 0
4554 && (code == TRUTH_ANDIF_EXPR
4555 || code == TRUTH_ORIF_EXPR)
4556 && operand_equal_p (lhs, rhs, 0))
4558 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4559 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4560 which cases we can't do this. */
4561 if (simple_operand_p (lhs))
4562 return build2 (code == TRUTH_ANDIF_EXPR
4563 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4566 else if (lang_hooks.decls.global_bindings_p () == 0
4567 && ! CONTAINS_PLACEHOLDER_P (lhs))
4569 tree common = save_expr (lhs);
4571 if (0 != (lhs = build_range_check (type, common,
4572 or_op ? ! in0_p : in0_p,
4574 && (0 != (rhs = build_range_check (type, common,
4575 or_op ? ! in1_p : in1_p,
4577 return build2 (code == TRUTH_ANDIF_EXPR
4578 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4586 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4587 bit value. Arrange things so the extra bits will be set to zero if and
4588 only if C is signed-extended to its full width. If MASK is nonzero,
4589 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4592 unextend (tree c, int p, int unsignedp, tree mask)
4594 tree type = TREE_TYPE (c);
4595 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4598 if (p == modesize || unsignedp)
4601 /* We work by getting just the sign bit into the low-order bit, then
4602 into the high-order bit, then sign-extend. We then XOR that value
4604 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4605 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4607 /* We must use a signed type in order to get an arithmetic right shift.
4608 However, we must also avoid introducing accidental overflows, so that
4609 a subsequent call to integer_zerop will work. Hence we must
4610 do the type conversion here. At this point, the constant is either
4611 zero or one, and the conversion to a signed type can never overflow.
4612 We could get an overflow if this conversion is done anywhere else. */
4613 if (TYPE_UNSIGNED (type))
4614 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4616 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4617 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4619 temp = const_binop (BIT_AND_EXPR, temp,
4620 fold_convert (TREE_TYPE (c), mask), 0);
4621 /* If necessary, convert the type back to match the type of C. */
4622 if (TYPE_UNSIGNED (type))
4623 temp = fold_convert (type, temp);
4625 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4628 /* Find ways of folding logical expressions of LHS and RHS:
4629 Try to merge two comparisons to the same innermost item.
4630 Look for range tests like "ch >= '0' && ch <= '9'".
4631 Look for combinations of simple terms on machines with expensive branches
4632 and evaluate the RHS unconditionally.
4634 For example, if we have p->a == 2 && p->b == 4 and we can make an
4635 object large enough to span both A and B, we can do this with a comparison
4636 against the object ANDed with the a mask.
4638 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4639 operations to do this with one comparison.
4641 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4642 function and the one above.
4644 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4645 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4647 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4650 We return the simplified tree or 0 if no optimization is possible. */
4653 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4655 /* If this is the "or" of two comparisons, we can do something if
4656 the comparisons are NE_EXPR. If this is the "and", we can do something
4657 if the comparisons are EQ_EXPR. I.e.,
4658 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4660 WANTED_CODE is this operation code. For single bit fields, we can
4661 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4662 comparison for one-bit fields. */
4664 enum tree_code wanted_code;
4665 enum tree_code lcode, rcode;
4666 tree ll_arg, lr_arg, rl_arg, rr_arg;
4667 tree ll_inner, lr_inner, rl_inner, rr_inner;
4668 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4669 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4670 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4671 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4672 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4673 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4674 enum machine_mode lnmode, rnmode;
4675 tree ll_mask, lr_mask, rl_mask, rr_mask;
4676 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4677 tree l_const, r_const;
4678 tree lntype, rntype, result;
4679 int first_bit, end_bit;
4682 /* Start by getting the comparison codes. Fail if anything is volatile.
4683 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4684 it were surrounded with a NE_EXPR. */
4686 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4689 lcode = TREE_CODE (lhs);
4690 rcode = TREE_CODE (rhs);
4692 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4694 lhs = build2 (NE_EXPR, truth_type, lhs,
4695 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4699 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4701 rhs = build2 (NE_EXPR, truth_type, rhs,
4702 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4706 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4707 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4710 ll_arg = TREE_OPERAND (lhs, 0);
4711 lr_arg = TREE_OPERAND (lhs, 1);
4712 rl_arg = TREE_OPERAND (rhs, 0);
4713 rr_arg = TREE_OPERAND (rhs, 1);
4715 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4716 if (simple_operand_p (ll_arg)
4717 && simple_operand_p (lr_arg))
4720 if (operand_equal_p (ll_arg, rl_arg, 0)
4721 && operand_equal_p (lr_arg, rr_arg, 0))
4723 result = combine_comparisons (code, lcode, rcode,
4724 truth_type, ll_arg, lr_arg);
4728 else if (operand_equal_p (ll_arg, rr_arg, 0)
4729 && operand_equal_p (lr_arg, rl_arg, 0))
4731 result = combine_comparisons (code, lcode,
4732 swap_tree_comparison (rcode),
4733 truth_type, ll_arg, lr_arg);
4739 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4740 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4742 /* If the RHS can be evaluated unconditionally and its operands are
4743 simple, it wins to evaluate the RHS unconditionally on machines
4744 with expensive branches. In this case, this isn't a comparison
4745 that can be merged. Avoid doing this if the RHS is a floating-point
4746 comparison since those can trap. */
4748 if (BRANCH_COST >= 2
4749 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4750 && simple_operand_p (rl_arg)
4751 && simple_operand_p (rr_arg))
4753 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4754 if (code == TRUTH_OR_EXPR
4755 && lcode == NE_EXPR && integer_zerop (lr_arg)
4756 && rcode == NE_EXPR && integer_zerop (rr_arg)
4757 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4758 return build2 (NE_EXPR, truth_type,
4759 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4761 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4763 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4764 if (code == TRUTH_AND_EXPR
4765 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4766 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4767 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4768 return build2 (EQ_EXPR, truth_type,
4769 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4771 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4773 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4774 return build2 (code, truth_type, lhs, rhs);
4777 /* See if the comparisons can be merged. Then get all the parameters for
4780 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4781 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4785 ll_inner = decode_field_reference (ll_arg,
4786 &ll_bitsize, &ll_bitpos, &ll_mode,
4787 &ll_unsignedp, &volatilep, &ll_mask,
4789 lr_inner = decode_field_reference (lr_arg,
4790 &lr_bitsize, &lr_bitpos, &lr_mode,
4791 &lr_unsignedp, &volatilep, &lr_mask,
4793 rl_inner = decode_field_reference (rl_arg,
4794 &rl_bitsize, &rl_bitpos, &rl_mode,
4795 &rl_unsignedp, &volatilep, &rl_mask,
4797 rr_inner = decode_field_reference (rr_arg,
4798 &rr_bitsize, &rr_bitpos, &rr_mode,
4799 &rr_unsignedp, &volatilep, &rr_mask,
4802 /* It must be true that the inner operation on the lhs of each
4803 comparison must be the same if we are to be able to do anything.
4804 Then see if we have constants. If not, the same must be true for
4806 if (volatilep || ll_inner == 0 || rl_inner == 0
4807 || ! operand_equal_p (ll_inner, rl_inner, 0))
4810 if (TREE_CODE (lr_arg) == INTEGER_CST
4811 && TREE_CODE (rr_arg) == INTEGER_CST)
4812 l_const = lr_arg, r_const = rr_arg;
4813 else if (lr_inner == 0 || rr_inner == 0
4814 || ! operand_equal_p (lr_inner, rr_inner, 0))
4817 l_const = r_const = 0;
4819 /* If either comparison code is not correct for our logical operation,
4820 fail. However, we can convert a one-bit comparison against zero into
4821 the opposite comparison against that bit being set in the field. */
4823 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4824 if (lcode != wanted_code)
4826 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4828 /* Make the left operand unsigned, since we are only interested
4829 in the value of one bit. Otherwise we are doing the wrong
4838 /* This is analogous to the code for l_const above. */
4839 if (rcode != wanted_code)
4841 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4850 /* After this point all optimizations will generate bit-field
4851 references, which we might not want. */
4852 if (! lang_hooks.can_use_bit_fields_p ())
4855 /* See if we can find a mode that contains both fields being compared on
4856 the left. If we can't, fail. Otherwise, update all constants and masks
4857 to be relative to a field of that size. */
4858 first_bit = MIN (ll_bitpos, rl_bitpos);
4859 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4860 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4861 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4863 if (lnmode == VOIDmode)
4866 lnbitsize = GET_MODE_BITSIZE (lnmode);
4867 lnbitpos = first_bit & ~ (lnbitsize - 1);
4868 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4869 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4871 if (BYTES_BIG_ENDIAN)
4873 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4874 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4877 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4878 size_int (xll_bitpos), 0);
4879 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4880 size_int (xrl_bitpos), 0);
4884 l_const = fold_convert (lntype, l_const);
4885 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4886 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4887 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4888 fold_build1 (BIT_NOT_EXPR,
4892 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4894 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4899 r_const = fold_convert (lntype, r_const);
4900 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4901 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4902 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4903 fold_build1 (BIT_NOT_EXPR,
4907 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4909 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4913 /* If the right sides are not constant, do the same for it. Also,
4914 disallow this optimization if a size or signedness mismatch occurs
4915 between the left and right sides. */
4918 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4919 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4920 /* Make sure the two fields on the right
4921 correspond to the left without being swapped. */
4922 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4925 first_bit = MIN (lr_bitpos, rr_bitpos);
4926 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4927 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4928 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4930 if (rnmode == VOIDmode)
4933 rnbitsize = GET_MODE_BITSIZE (rnmode);
4934 rnbitpos = first_bit & ~ (rnbitsize - 1);
4935 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4936 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4938 if (BYTES_BIG_ENDIAN)
4940 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4941 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4944 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4945 size_int (xlr_bitpos), 0);
4946 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4947 size_int (xrr_bitpos), 0);
4949 /* Make a mask that corresponds to both fields being compared.
4950 Do this for both items being compared. If the operands are the
4951 same size and the bits being compared are in the same position
4952 then we can do this by masking both and comparing the masked
4954 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4955 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4956 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4958 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4959 ll_unsignedp || rl_unsignedp);
4960 if (! all_ones_mask_p (ll_mask, lnbitsize))
4961 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
4963 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4964 lr_unsignedp || rr_unsignedp);
4965 if (! all_ones_mask_p (lr_mask, rnbitsize))
4966 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
4968 return build2 (wanted_code, truth_type, lhs, rhs);
4971 /* There is still another way we can do something: If both pairs of
4972 fields being compared are adjacent, we may be able to make a wider
4973 field containing them both.
4975 Note that we still must mask the lhs/rhs expressions. Furthermore,
4976 the mask must be shifted to account for the shift done by
4977 make_bit_field_ref. */
4978 if ((ll_bitsize + ll_bitpos == rl_bitpos
4979 && lr_bitsize + lr_bitpos == rr_bitpos)
4980 || (ll_bitpos == rl_bitpos + rl_bitsize
4981 && lr_bitpos == rr_bitpos + rr_bitsize))
4985 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4986 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4987 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4988 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4990 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4991 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4992 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4993 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4995 /* Convert to the smaller type before masking out unwanted bits. */
4997 if (lntype != rntype)
4999 if (lnbitsize > rnbitsize)
5001 lhs = fold_convert (rntype, lhs);
5002 ll_mask = fold_convert (rntype, ll_mask);
5005 else if (lnbitsize < rnbitsize)
5007 rhs = fold_convert (lntype, rhs);
5008 lr_mask = fold_convert (lntype, lr_mask);
5013 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5014 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5016 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5017 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5019 return build2 (wanted_code, truth_type, lhs, rhs);
5025 /* Handle the case of comparisons with constants. If there is something in
5026 common between the masks, those bits of the constants must be the same.
5027 If not, the condition is always false. Test for this to avoid generating
5028 incorrect code below. */
5029 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5030 if (! integer_zerop (result)
5031 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5032 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5034 if (wanted_code == NE_EXPR)
5036 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5037 return constant_boolean_node (true, truth_type);
5041 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5042 return constant_boolean_node (false, truth_type);
5046 /* Construct the expression we will return. First get the component
5047 reference we will make. Unless the mask is all ones the width of
5048 that field, perform the mask operation. Then compare with the
5050 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5051 ll_unsignedp || rl_unsignedp);
5053 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5054 if (! all_ones_mask_p (ll_mask, lnbitsize))
5055 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5057 return build2 (wanted_code, truth_type, result,
5058 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5061 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5065 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5068 enum tree_code op_code;
5069 tree comp_const = op1;
5071 int consts_equal, consts_lt;
5074 STRIP_SIGN_NOPS (arg0);
5076 op_code = TREE_CODE (arg0);
5077 minmax_const = TREE_OPERAND (arg0, 1);
5078 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5079 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5080 inner = TREE_OPERAND (arg0, 0);
5082 /* If something does not permit us to optimize, return the original tree. */
5083 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5084 || TREE_CODE (comp_const) != INTEGER_CST
5085 || TREE_CONSTANT_OVERFLOW (comp_const)
5086 || TREE_CODE (minmax_const) != INTEGER_CST
5087 || TREE_CONSTANT_OVERFLOW (minmax_const))
5090 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5091 and GT_EXPR, doing the rest with recursive calls using logical
5095 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5097 /* FIXME: We should be able to invert code without building a
5098 scratch tree node, but doing so would require us to
5099 duplicate a part of invert_truthvalue here. */
5100 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5101 tem = optimize_minmax_comparison (TREE_CODE (tem),
5103 TREE_OPERAND (tem, 0),
5104 TREE_OPERAND (tem, 1));
5105 return invert_truthvalue (tem);
5110 fold_build2 (TRUTH_ORIF_EXPR, type,
5111 optimize_minmax_comparison
5112 (EQ_EXPR, type, arg0, comp_const),
5113 optimize_minmax_comparison
5114 (GT_EXPR, type, arg0, comp_const));
5117 if (op_code == MAX_EXPR && consts_equal)
5118 /* MAX (X, 0) == 0 -> X <= 0 */
5119 return fold_build2 (LE_EXPR, type, inner, comp_const);
5121 else if (op_code == MAX_EXPR && consts_lt)
5122 /* MAX (X, 0) == 5 -> X == 5 */
5123 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5125 else if (op_code == MAX_EXPR)
5126 /* MAX (X, 0) == -1 -> false */
5127 return omit_one_operand (type, integer_zero_node, inner);
5129 else if (consts_equal)
5130 /* MIN (X, 0) == 0 -> X >= 0 */
5131 return fold_build2 (GE_EXPR, type, inner, comp_const);
5134 /* MIN (X, 0) == 5 -> false */
5135 return omit_one_operand (type, integer_zero_node, inner);
5138 /* MIN (X, 0) == -1 -> X == -1 */
5139 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5142 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5143 /* MAX (X, 0) > 0 -> X > 0
5144 MAX (X, 0) > 5 -> X > 5 */
5145 return fold_build2 (GT_EXPR, type, inner, comp_const);
5147 else if (op_code == MAX_EXPR)
5148 /* MAX (X, 0) > -1 -> true */
5149 return omit_one_operand (type, integer_one_node, inner);
5151 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5152 /* MIN (X, 0) > 0 -> false
5153 MIN (X, 0) > 5 -> false */
5154 return omit_one_operand (type, integer_zero_node, inner);
5157 /* MIN (X, 0) > -1 -> X > -1 */
5158 return fold_build2 (GT_EXPR, type, inner, comp_const);
5165 /* T is an integer expression that is being multiplied, divided, or taken a
5166 modulus (CODE says which and what kind of divide or modulus) by a
5167 constant C. See if we can eliminate that operation by folding it with
5168 other operations already in T. WIDE_TYPE, if non-null, is a type that
5169 should be used for the computation if wider than our type.
5171 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5172 (X * 2) + (Y * 4). We must, however, be assured that either the original
5173 expression would not overflow or that overflow is undefined for the type
5174 in the language in question.
5176 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5177 the machine has a multiply-accumulate insn or that this is part of an
5178 addressing calculation.
5180 If we return a non-null expression, it is an equivalent form of the
5181 original computation, but need not be in the original type. */
5184 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5186 /* To avoid exponential search depth, refuse to allow recursion past
5187 three levels. Beyond that (1) it's highly unlikely that we'll find
5188 something interesting and (2) we've probably processed it before
5189 when we built the inner expression. */
5198 ret = extract_muldiv_1 (t, c, code, wide_type);
5205 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5207 tree type = TREE_TYPE (t);
5208 enum tree_code tcode = TREE_CODE (t);
5209 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5210 > GET_MODE_SIZE (TYPE_MODE (type)))
5211 ? wide_type : type);
5213 int same_p = tcode == code;
5214 tree op0 = NULL_TREE, op1 = NULL_TREE;
5216 /* Don't deal with constants of zero here; they confuse the code below. */
5217 if (integer_zerop (c))
5220 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5221 op0 = TREE_OPERAND (t, 0);
5223 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5224 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5226 /* Note that we need not handle conditional operations here since fold
5227 already handles those cases. So just do arithmetic here. */
5231 /* For a constant, we can always simplify if we are a multiply
5232 or (for divide and modulus) if it is a multiple of our constant. */
5233 if (code == MULT_EXPR
5234 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5235 return const_binop (code, fold_convert (ctype, t),
5236 fold_convert (ctype, c), 0);
5239 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5240 /* If op0 is an expression ... */
5241 if ((COMPARISON_CLASS_P (op0)
5242 || UNARY_CLASS_P (op0)
5243 || BINARY_CLASS_P (op0)
5244 || EXPRESSION_CLASS_P (op0))
5245 /* ... and is unsigned, and its type is smaller than ctype,
5246 then we cannot pass through as widening. */
5247 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5248 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5249 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5250 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5251 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5252 /* ... or this is a truncation (t is narrower than op0),
5253 then we cannot pass through this narrowing. */
5254 || (GET_MODE_SIZE (TYPE_MODE (type))
5255 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5256 /* ... or signedness changes for division or modulus,
5257 then we cannot pass through this conversion. */
5258 || (code != MULT_EXPR
5259 && (TYPE_UNSIGNED (ctype)
5260 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5263 /* Pass the constant down and see if we can make a simplification. If
5264 we can, replace this expression with the inner simplification for
5265 possible later conversion to our or some other type. */
5266 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5267 && TREE_CODE (t2) == INTEGER_CST
5268 && ! TREE_CONSTANT_OVERFLOW (t2)
5269 && (0 != (t1 = extract_muldiv (op0, t2, code,
5271 ? ctype : NULL_TREE))))
5276 /* If widening the type changes it from signed to unsigned, then we
5277 must avoid building ABS_EXPR itself as unsigned. */
5278 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5280 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5281 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5283 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5284 return fold_convert (ctype, t1);
5290 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5291 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5294 case MIN_EXPR: case MAX_EXPR:
5295 /* If widening the type changes the signedness, then we can't perform
5296 this optimization as that changes the result. */
5297 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5300 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5301 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5302 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5304 if (tree_int_cst_sgn (c) < 0)
5305 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5307 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5308 fold_convert (ctype, t2));
5312 case LSHIFT_EXPR: case RSHIFT_EXPR:
5313 /* If the second operand is constant, this is a multiplication
5314 or floor division, by a power of two, so we can treat it that
5315 way unless the multiplier or divisor overflows. Signed
5316 left-shift overflow is implementation-defined rather than
5317 undefined in C90, so do not convert signed left shift into
5319 if (TREE_CODE (op1) == INTEGER_CST
5320 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5321 /* const_binop may not detect overflow correctly,
5322 so check for it explicitly here. */
5323 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5324 && TREE_INT_CST_HIGH (op1) == 0
5325 && 0 != (t1 = fold_convert (ctype,
5326 const_binop (LSHIFT_EXPR,
5329 && ! TREE_OVERFLOW (t1))
5330 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5331 ? MULT_EXPR : FLOOR_DIV_EXPR,
5332 ctype, fold_convert (ctype, op0), t1),
5333 c, code, wide_type);
5336 case PLUS_EXPR: case MINUS_EXPR:
5337 /* See if we can eliminate the operation on both sides. If we can, we
5338 can return a new PLUS or MINUS. If we can't, the only remaining
5339 cases where we can do anything are if the second operand is a
5341 t1 = extract_muldiv (op0, c, code, wide_type);
5342 t2 = extract_muldiv (op1, c, code, wide_type);
5343 if (t1 != 0 && t2 != 0
5344 && (code == MULT_EXPR
5345 /* If not multiplication, we can only do this if both operands
5346 are divisible by c. */
5347 || (multiple_of_p (ctype, op0, c)
5348 && multiple_of_p (ctype, op1, c))))
5349 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5350 fold_convert (ctype, t2));
5352 /* If this was a subtraction, negate OP1 and set it to be an addition.
5353 This simplifies the logic below. */
5354 if (tcode == MINUS_EXPR)
5355 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5357 if (TREE_CODE (op1) != INTEGER_CST)
5360 /* If either OP1 or C are negative, this optimization is not safe for
5361 some of the division and remainder types while for others we need
5362 to change the code. */
5363 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5365 if (code == CEIL_DIV_EXPR)
5366 code = FLOOR_DIV_EXPR;
5367 else if (code == FLOOR_DIV_EXPR)
5368 code = CEIL_DIV_EXPR;
5369 else if (code != MULT_EXPR
5370 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5374 /* If it's a multiply or a division/modulus operation of a multiple
5375 of our constant, do the operation and verify it doesn't overflow. */
5376 if (code == MULT_EXPR
5377 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5379 op1 = const_binop (code, fold_convert (ctype, op1),
5380 fold_convert (ctype, c), 0);
5381 /* We allow the constant to overflow with wrapping semantics. */
5383 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5389 /* If we have an unsigned type is not a sizetype, we cannot widen
5390 the operation since it will change the result if the original
5391 computation overflowed. */
5392 if (TYPE_UNSIGNED (ctype)
5393 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5397 /* If we were able to eliminate our operation from the first side,
5398 apply our operation to the second side and reform the PLUS. */
5399 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5400 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5402 /* The last case is if we are a multiply. In that case, we can
5403 apply the distributive law to commute the multiply and addition
5404 if the multiplication of the constants doesn't overflow. */
5405 if (code == MULT_EXPR)
5406 return fold_build2 (tcode, ctype,
5407 fold_build2 (code, ctype,
5408 fold_convert (ctype, op0),
5409 fold_convert (ctype, c)),
5415 /* We have a special case here if we are doing something like
5416 (C * 8) % 4 since we know that's zero. */
5417 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5418 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5419 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5420 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5421 return omit_one_operand (type, integer_zero_node, op0);
5423 /* ... fall through ... */
5425 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5426 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5427 /* If we can extract our operation from the LHS, do so and return a
5428 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5429 do something only if the second operand is a constant. */
5431 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5432 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5433 fold_convert (ctype, op1));
5434 else if (tcode == MULT_EXPR && code == MULT_EXPR
5435 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5436 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5437 fold_convert (ctype, t1));
5438 else if (TREE_CODE (op1) != INTEGER_CST)
5441 /* If these are the same operation types, we can associate them
5442 assuming no overflow. */
5444 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5445 fold_convert (ctype, c), 0))
5446 && ! TREE_OVERFLOW (t1))
5447 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5449 /* If these operations "cancel" each other, we have the main
5450 optimizations of this pass, which occur when either constant is a
5451 multiple of the other, in which case we replace this with either an
5452 operation or CODE or TCODE.
5454 If we have an unsigned type that is not a sizetype, we cannot do
5455 this since it will change the result if the original computation
5457 if ((! TYPE_UNSIGNED (ctype)
5458 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5460 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5461 || (tcode == MULT_EXPR
5462 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5463 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5465 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5466 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5467 fold_convert (ctype,
5468 const_binop (TRUNC_DIV_EXPR,
5470 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5471 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5472 fold_convert (ctype,
5473 const_binop (TRUNC_DIV_EXPR,
5485 /* Return a node which has the indicated constant VALUE (either 0 or
5486 1), and is of the indicated TYPE. */
5489 constant_boolean_node (int value, tree type)
5491 if (type == integer_type_node)
5492 return value ? integer_one_node : integer_zero_node;
5493 else if (type == boolean_type_node)
5494 return value ? boolean_true_node : boolean_false_node;
5496 return build_int_cst (type, value);
5500 /* Return true if expr looks like an ARRAY_REF and set base and
5501 offset to the appropriate trees. If there is no offset,
5502 offset is set to NULL_TREE. Base will be canonicalized to
5503 something you can get the element type from using
5504 TREE_TYPE (TREE_TYPE (base)). */
5507 extract_array_ref (tree expr, tree *base, tree *offset)
5509 /* One canonical form is a PLUS_EXPR with the first
5510 argument being an ADDR_EXPR with a possible NOP_EXPR
5512 if (TREE_CODE (expr) == PLUS_EXPR)
5514 tree op0 = TREE_OPERAND (expr, 0);
5515 tree inner_base, dummy1;
5516 /* Strip NOP_EXPRs here because the C frontends and/or
5517 folders present us (int *)&x.a + 4B possibly. */
5519 if (extract_array_ref (op0, &inner_base, &dummy1))
5522 if (dummy1 == NULL_TREE)
5523 *offset = TREE_OPERAND (expr, 1);
5525 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5526 dummy1, TREE_OPERAND (expr, 1));
5530 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5531 which we transform into an ADDR_EXPR with appropriate
5532 offset. For other arguments to the ADDR_EXPR we assume
5533 zero offset and as such do not care about the ADDR_EXPR
5534 type and strip possible nops from it. */
5535 else if (TREE_CODE (expr) == ADDR_EXPR)
5537 tree op0 = TREE_OPERAND (expr, 0);
5538 if (TREE_CODE (op0) == ARRAY_REF)
5540 *base = TREE_OPERAND (op0, 0);
5541 *offset = TREE_OPERAND (op0, 1);
5545 /* Handle array-to-pointer decay as &a. */
5546 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5547 *base = TREE_OPERAND (expr, 0);
5550 *offset = NULL_TREE;
5554 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5555 else if (SSA_VAR_P (expr)
5556 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5559 *offset = NULL_TREE;
5567 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5568 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5569 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5570 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5571 COND is the first argument to CODE; otherwise (as in the example
5572 given here), it is the second argument. TYPE is the type of the
5573 original expression. Return NULL_TREE if no simplification is
5577 fold_binary_op_with_conditional_arg (enum tree_code code,
5578 tree type, tree op0, tree op1,
5579 tree cond, tree arg, int cond_first_p)
5581 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5582 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5583 tree test, true_value, false_value;
5584 tree lhs = NULL_TREE;
5585 tree rhs = NULL_TREE;
5587 /* This transformation is only worthwhile if we don't have to wrap
5588 arg in a SAVE_EXPR, and the operation can be simplified on at least
5589 one of the branches once its pushed inside the COND_EXPR. */
5590 if (!TREE_CONSTANT (arg))
5593 if (TREE_CODE (cond) == COND_EXPR)
5595 test = TREE_OPERAND (cond, 0);
5596 true_value = TREE_OPERAND (cond, 1);
5597 false_value = TREE_OPERAND (cond, 2);
5598 /* If this operand throws an expression, then it does not make
5599 sense to try to perform a logical or arithmetic operation
5601 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5603 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5608 tree testtype = TREE_TYPE (cond);
5610 true_value = constant_boolean_node (true, testtype);
5611 false_value = constant_boolean_node (false, testtype);
5614 arg = fold_convert (arg_type, arg);
5617 true_value = fold_convert (cond_type, true_value);
5619 lhs = fold_build2 (code, type, true_value, arg);
5621 lhs = fold_build2 (code, type, arg, true_value);
5625 false_value = fold_convert (cond_type, false_value);
5627 rhs = fold_build2 (code, type, false_value, arg);
5629 rhs = fold_build2 (code, type, arg, false_value);
5632 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5633 return fold_convert (type, test);
5637 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5639 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5640 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5641 ADDEND is the same as X.
5643 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5644 and finite. The problematic cases are when X is zero, and its mode
5645 has signed zeros. In the case of rounding towards -infinity,
5646 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5647 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5650 fold_real_zero_addition_p (tree type, tree addend, int negate)
5652 if (!real_zerop (addend))
5655 /* Don't allow the fold with -fsignaling-nans. */
5656 if (HONOR_SNANS (TYPE_MODE (type)))
5659 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5660 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5663 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5664 if (TREE_CODE (addend) == REAL_CST
5665 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5668 /* The mode has signed zeros, and we have to honor their sign.
5669 In this situation, there is only one case we can return true for.
5670 X - 0 is the same as X unless rounding towards -infinity is
5672 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5675 /* Subroutine of fold() that checks comparisons of built-in math
5676 functions against real constants.
5678 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5679 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5680 is the type of the result and ARG0 and ARG1 are the operands of the
5681 comparison. ARG1 must be a TREE_REAL_CST.
5683 The function returns the constant folded tree if a simplification
5684 can be made, and NULL_TREE otherwise. */
5687 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5688 tree type, tree arg0, tree arg1)
5692 if (BUILTIN_SQRT_P (fcode))
5694 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5695 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5697 c = TREE_REAL_CST (arg1);
5698 if (REAL_VALUE_NEGATIVE (c))
5700 /* sqrt(x) < y is always false, if y is negative. */
5701 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5702 return omit_one_operand (type, integer_zero_node, arg);
5704 /* sqrt(x) > y is always true, if y is negative and we
5705 don't care about NaNs, i.e. negative values of x. */
5706 if (code == NE_EXPR || !HONOR_NANS (mode))
5707 return omit_one_operand (type, integer_one_node, arg);
5709 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5710 return fold_build2 (GE_EXPR, type, arg,
5711 build_real (TREE_TYPE (arg), dconst0));
5713 else if (code == GT_EXPR || code == GE_EXPR)
5717 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5718 real_convert (&c2, mode, &c2);
5720 if (REAL_VALUE_ISINF (c2))
5722 /* sqrt(x) > y is x == +Inf, when y is very large. */
5723 if (HONOR_INFINITIES (mode))
5724 return fold_build2 (EQ_EXPR, type, arg,
5725 build_real (TREE_TYPE (arg), c2));
5727 /* sqrt(x) > y is always false, when y is very large
5728 and we don't care about infinities. */
5729 return omit_one_operand (type, integer_zero_node, arg);
5732 /* sqrt(x) > c is the same as x > c*c. */
5733 return fold_build2 (code, type, arg,
5734 build_real (TREE_TYPE (arg), c2));
5736 else if (code == LT_EXPR || code == LE_EXPR)
5740 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5741 real_convert (&c2, mode, &c2);
5743 if (REAL_VALUE_ISINF (c2))
5745 /* sqrt(x) < y is always true, when y is a very large
5746 value and we don't care about NaNs or Infinities. */
5747 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5748 return omit_one_operand (type, integer_one_node, arg);
5750 /* sqrt(x) < y is x != +Inf when y is very large and we
5751 don't care about NaNs. */
5752 if (! HONOR_NANS (mode))
5753 return fold_build2 (NE_EXPR, type, arg,
5754 build_real (TREE_TYPE (arg), c2));
5756 /* sqrt(x) < y is x >= 0 when y is very large and we
5757 don't care about Infinities. */
5758 if (! HONOR_INFINITIES (mode))
5759 return fold_build2 (GE_EXPR, type, arg,
5760 build_real (TREE_TYPE (arg), dconst0));
5762 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5763 if (lang_hooks.decls.global_bindings_p () != 0
5764 || CONTAINS_PLACEHOLDER_P (arg))
5767 arg = save_expr (arg);
5768 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5769 fold_build2 (GE_EXPR, type, arg,
5770 build_real (TREE_TYPE (arg),
5772 fold_build2 (NE_EXPR, type, arg,
5773 build_real (TREE_TYPE (arg),
5777 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5778 if (! HONOR_NANS (mode))
5779 return fold_build2 (code, type, arg,
5780 build_real (TREE_TYPE (arg), c2));
5782 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5783 if (lang_hooks.decls.global_bindings_p () == 0
5784 && ! CONTAINS_PLACEHOLDER_P (arg))
5786 arg = save_expr (arg);
5787 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5788 fold_build2 (GE_EXPR, type, arg,
5789 build_real (TREE_TYPE (arg),
5791 fold_build2 (code, type, arg,
5792 build_real (TREE_TYPE (arg),
5801 /* Subroutine of fold() that optimizes comparisons against Infinities,
5802 either +Inf or -Inf.
5804 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5805 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5806 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5808 The function returns the constant folded tree if a simplification
5809 can be made, and NULL_TREE otherwise. */
5812 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5814 enum machine_mode mode;
5815 REAL_VALUE_TYPE max;
5819 mode = TYPE_MODE (TREE_TYPE (arg0));
5821 /* For negative infinity swap the sense of the comparison. */
5822 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5824 code = swap_tree_comparison (code);
5829 /* x > +Inf is always false, if with ignore sNANs. */
5830 if (HONOR_SNANS (mode))
5832 return omit_one_operand (type, integer_zero_node, arg0);
5835 /* x <= +Inf is always true, if we don't case about NaNs. */
5836 if (! HONOR_NANS (mode))
5837 return omit_one_operand (type, integer_one_node, arg0);
5839 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5840 if (lang_hooks.decls.global_bindings_p () == 0
5841 && ! CONTAINS_PLACEHOLDER_P (arg0))
5843 arg0 = save_expr (arg0);
5844 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5850 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5851 real_maxval (&max, neg, mode);
5852 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5853 arg0, build_real (TREE_TYPE (arg0), max));
5856 /* x < +Inf is always equal to x <= DBL_MAX. */
5857 real_maxval (&max, neg, mode);
5858 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5859 arg0, build_real (TREE_TYPE (arg0), max));
5862 /* x != +Inf is always equal to !(x > DBL_MAX). */
5863 real_maxval (&max, neg, mode);
5864 if (! HONOR_NANS (mode))
5865 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5866 arg0, build_real (TREE_TYPE (arg0), max));
5868 /* The transformation below creates non-gimple code and thus is
5869 not appropriate if we are in gimple form. */
5873 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5874 arg0, build_real (TREE_TYPE (arg0), max));
5875 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5884 /* Subroutine of fold() that optimizes comparisons of a division by
5885 a nonzero integer constant against an integer constant, i.e.
5888 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5889 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5890 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5892 The function returns the constant folded tree if a simplification
5893 can be made, and NULL_TREE otherwise. */
5896 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5898 tree prod, tmp, hi, lo;
5899 tree arg00 = TREE_OPERAND (arg0, 0);
5900 tree arg01 = TREE_OPERAND (arg0, 1);
5901 unsigned HOST_WIDE_INT lpart;
5902 HOST_WIDE_INT hpart;
5905 /* We have to do this the hard way to detect unsigned overflow.
5906 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5907 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5908 TREE_INT_CST_HIGH (arg01),
5909 TREE_INT_CST_LOW (arg1),
5910 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5911 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5912 prod = force_fit_type (prod, -1, overflow, false);
5914 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5916 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5919 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5920 overflow = add_double (TREE_INT_CST_LOW (prod),
5921 TREE_INT_CST_HIGH (prod),
5922 TREE_INT_CST_LOW (tmp),
5923 TREE_INT_CST_HIGH (tmp),
5925 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5926 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5927 TREE_CONSTANT_OVERFLOW (prod));
5929 else if (tree_int_cst_sgn (arg01) >= 0)
5931 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5932 switch (tree_int_cst_sgn (arg1))
5935 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5940 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
5945 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5955 /* A negative divisor reverses the relational operators. */
5956 code = swap_tree_comparison (code);
5958 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
5959 switch (tree_int_cst_sgn (arg1))
5962 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5967 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
5972 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
5984 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5985 return omit_one_operand (type, integer_zero_node, arg00);
5986 if (TREE_OVERFLOW (hi))
5987 return fold_build2 (GE_EXPR, type, arg00, lo);
5988 if (TREE_OVERFLOW (lo))
5989 return fold_build2 (LE_EXPR, type, arg00, hi);
5990 return build_range_check (type, arg00, 1, lo, hi);
5993 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
5994 return omit_one_operand (type, integer_one_node, arg00);
5995 if (TREE_OVERFLOW (hi))
5996 return fold_build2 (LT_EXPR, type, arg00, lo);
5997 if (TREE_OVERFLOW (lo))
5998 return fold_build2 (GT_EXPR, type, arg00, hi);
5999 return build_range_check (type, arg00, 0, lo, hi);
6002 if (TREE_OVERFLOW (lo))
6003 return omit_one_operand (type, integer_zero_node, arg00);
6004 return fold_build2 (LT_EXPR, type, arg00, lo);
6007 if (TREE_OVERFLOW (hi))
6008 return omit_one_operand (type, integer_one_node, arg00);
6009 return fold_build2 (LE_EXPR, type, arg00, hi);
6012 if (TREE_OVERFLOW (hi))
6013 return omit_one_operand (type, integer_zero_node, arg00);
6014 return fold_build2 (GT_EXPR, type, arg00, hi);
6017 if (TREE_OVERFLOW (lo))
6018 return omit_one_operand (type, integer_one_node, arg00);
6019 return fold_build2 (GE_EXPR, type, arg00, lo);
6029 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6030 equality/inequality test, then return a simplified form of the test
6031 using a sign testing. Otherwise return NULL. TYPE is the desired
6035 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6038 /* If this is testing a single bit, we can optimize the test. */
6039 if ((code == NE_EXPR || code == EQ_EXPR)
6040 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6041 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6043 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6044 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6045 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6047 if (arg00 != NULL_TREE
6048 /* This is only a win if casting to a signed type is cheap,
6049 i.e. when arg00's type is not a partial mode. */
6050 && TYPE_PRECISION (TREE_TYPE (arg00))
6051 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6053 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6054 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6055 result_type, fold_convert (stype, arg00),
6056 fold_convert (stype, integer_zero_node));
6063 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6064 equality/inequality test, then return a simplified form of
6065 the test using shifts and logical operations. Otherwise return
6066 NULL. TYPE is the desired result type. */
6069 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6072 /* If this is testing a single bit, we can optimize the test. */
6073 if ((code == NE_EXPR || code == EQ_EXPR)
6074 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6075 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6077 tree inner = TREE_OPERAND (arg0, 0);
6078 tree type = TREE_TYPE (arg0);
6079 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6080 enum machine_mode operand_mode = TYPE_MODE (type);
6082 tree signed_type, unsigned_type, intermediate_type;
6085 /* First, see if we can fold the single bit test into a sign-bit
6087 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6092 /* Otherwise we have (A & C) != 0 where C is a single bit,
6093 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6094 Similarly for (A & C) == 0. */
6096 /* If INNER is a right shift of a constant and it plus BITNUM does
6097 not overflow, adjust BITNUM and INNER. */
6098 if (TREE_CODE (inner) == RSHIFT_EXPR
6099 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6100 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6101 && bitnum < TYPE_PRECISION (type)
6102 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6103 bitnum - TYPE_PRECISION (type)))
6105 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6106 inner = TREE_OPERAND (inner, 0);
6109 /* If we are going to be able to omit the AND below, we must do our
6110 operations as unsigned. If we must use the AND, we have a choice.
6111 Normally unsigned is faster, but for some machines signed is. */
6112 #ifdef LOAD_EXTEND_OP
6113 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6114 && !flag_syntax_only) ? 0 : 1;
6119 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6120 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6121 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6122 inner = fold_convert (intermediate_type, inner);
6125 inner = build2 (RSHIFT_EXPR, intermediate_type,
6126 inner, size_int (bitnum));
6128 if (code == EQ_EXPR)
6129 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6130 inner, integer_one_node);
6132 /* Put the AND last so it can combine with more things. */
6133 inner = build2 (BIT_AND_EXPR, intermediate_type,
6134 inner, integer_one_node);
6136 /* Make sure to return the proper type. */
6137 inner = fold_convert (result_type, inner);
6144 /* Check whether we are allowed to reorder operands arg0 and arg1,
6145 such that the evaluation of arg1 occurs before arg0. */
6148 reorder_operands_p (tree arg0, tree arg1)
6150 if (! flag_evaluation_order)
6152 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6154 return ! TREE_SIDE_EFFECTS (arg0)
6155 && ! TREE_SIDE_EFFECTS (arg1);
6158 /* Test whether it is preferable two swap two operands, ARG0 and
6159 ARG1, for example because ARG0 is an integer constant and ARG1
6160 isn't. If REORDER is true, only recommend swapping if we can
6161 evaluate the operands in reverse order. */
6164 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6166 STRIP_SIGN_NOPS (arg0);
6167 STRIP_SIGN_NOPS (arg1);
6169 if (TREE_CODE (arg1) == INTEGER_CST)
6171 if (TREE_CODE (arg0) == INTEGER_CST)
6174 if (TREE_CODE (arg1) == REAL_CST)
6176 if (TREE_CODE (arg0) == REAL_CST)
6179 if (TREE_CODE (arg1) == COMPLEX_CST)
6181 if (TREE_CODE (arg0) == COMPLEX_CST)
6184 if (TREE_CONSTANT (arg1))
6186 if (TREE_CONSTANT (arg0))
6192 if (reorder && flag_evaluation_order
6193 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6201 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6202 for commutative and comparison operators. Ensuring a canonical
6203 form allows the optimizers to find additional redundancies without
6204 having to explicitly check for both orderings. */
6205 if (TREE_CODE (arg0) == SSA_NAME
6206 && TREE_CODE (arg1) == SSA_NAME
6207 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6213 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6214 ARG0 is extended to a wider type. */
6217 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6219 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6221 tree shorter_type, outer_type;
6225 if (arg0_unw == arg0)
6227 shorter_type = TREE_TYPE (arg0_unw);
6229 #ifdef HAVE_canonicalize_funcptr_for_compare
6230 /* Disable this optimization if we're casting a function pointer
6231 type on targets that require function pointer canonicalization. */
6232 if (HAVE_canonicalize_funcptr_for_compare
6233 && TREE_CODE (shorter_type) == POINTER_TYPE
6234 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6238 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6241 arg1_unw = get_unwidened (arg1, shorter_type);
6245 /* If possible, express the comparison in the shorter mode. */
6246 if ((code == EQ_EXPR || code == NE_EXPR
6247 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6248 && (TREE_TYPE (arg1_unw) == shorter_type
6249 || (TREE_CODE (arg1_unw) == INTEGER_CST
6250 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6251 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6252 && int_fits_type_p (arg1_unw, shorter_type))))
6253 return fold_build2 (code, type, arg0_unw,
6254 fold_convert (shorter_type, arg1_unw));
6256 if (TREE_CODE (arg1_unw) != INTEGER_CST)
6259 /* If we are comparing with the integer that does not fit into the range
6260 of the shorter type, the result is known. */
6261 outer_type = TREE_TYPE (arg1_unw);
6262 min = lower_bound_in_type (outer_type, shorter_type);
6263 max = upper_bound_in_type (outer_type, shorter_type);
6265 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6267 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6274 return omit_one_operand (type, integer_zero_node, arg0);
6279 return omit_one_operand (type, integer_one_node, arg0);
6285 return omit_one_operand (type, integer_one_node, arg0);
6287 return omit_one_operand (type, integer_zero_node, arg0);
6292 return omit_one_operand (type, integer_zero_node, arg0);
6294 return omit_one_operand (type, integer_one_node, arg0);
6303 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6304 ARG0 just the signedness is changed. */
6307 fold_sign_changed_comparison (enum tree_code code, tree type,
6308 tree arg0, tree arg1)
6310 tree arg0_inner, tmp;
6311 tree inner_type, outer_type;
6313 if (TREE_CODE (arg0) != NOP_EXPR
6314 && TREE_CODE (arg0) != CONVERT_EXPR)
6317 outer_type = TREE_TYPE (arg0);
6318 arg0_inner = TREE_OPERAND (arg0, 0);
6319 inner_type = TREE_TYPE (arg0_inner);
6321 #ifdef HAVE_canonicalize_funcptr_for_compare
6322 /* Disable this optimization if we're casting a function pointer
6323 type on targets that require function pointer canonicalization. */
6324 if (HAVE_canonicalize_funcptr_for_compare
6325 && TREE_CODE (inner_type) == POINTER_TYPE
6326 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6330 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6333 if (TREE_CODE (arg1) != INTEGER_CST
6334 && !((TREE_CODE (arg1) == NOP_EXPR
6335 || TREE_CODE (arg1) == CONVERT_EXPR)
6336 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6339 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6344 if (TREE_CODE (arg1) == INTEGER_CST)
6346 tmp = build_int_cst_wide (inner_type,
6347 TREE_INT_CST_LOW (arg1),
6348 TREE_INT_CST_HIGH (arg1));
6349 arg1 = force_fit_type (tmp, 0,
6350 TREE_OVERFLOW (arg1),
6351 TREE_CONSTANT_OVERFLOW (arg1));
6354 arg1 = fold_convert (inner_type, arg1);
6356 return fold_build2 (code, type, arg0_inner, arg1);
6359 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6360 step of the array. Reconstructs s and delta in the case of s * delta
6361 being an integer constant (and thus already folded).
6362 ADDR is the address. MULT is the multiplicative expression.
6363 If the function succeeds, the new address expression is returned. Otherwise
6364 NULL_TREE is returned. */
6367 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6369 tree s, delta, step;
6370 tree ref = TREE_OPERAND (addr, 0), pref;
6374 /* Canonicalize op1 into a possibly non-constant delta
6375 and an INTEGER_CST s. */
6376 if (TREE_CODE (op1) == MULT_EXPR)
6378 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6383 if (TREE_CODE (arg0) == INTEGER_CST)
6388 else if (TREE_CODE (arg1) == INTEGER_CST)
6396 else if (TREE_CODE (op1) == INTEGER_CST)
6403 /* Simulate we are delta * 1. */
6405 s = integer_one_node;
6408 for (;; ref = TREE_OPERAND (ref, 0))
6410 if (TREE_CODE (ref) == ARRAY_REF)
6412 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6416 step = array_ref_element_size (ref);
6417 if (TREE_CODE (step) != INTEGER_CST)
6422 if (! tree_int_cst_equal (step, s))
6427 /* Try if delta is a multiple of step. */
6428 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6437 if (!handled_component_p (ref))
6441 /* We found the suitable array reference. So copy everything up to it,
6442 and replace the index. */
6444 pref = TREE_OPERAND (addr, 0);
6445 ret = copy_node (pref);
6450 pref = TREE_OPERAND (pref, 0);
6451 TREE_OPERAND (pos, 0) = copy_node (pref);
6452 pos = TREE_OPERAND (pos, 0);
6455 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6456 fold_convert (itype,
6457 TREE_OPERAND (pos, 1)),
6458 fold_convert (itype, delta));
6460 return build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6464 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6465 means A >= Y && A != MAX, but in this case we know that
6466 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6469 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6471 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6473 if (TREE_CODE (bound) == LT_EXPR)
6474 a = TREE_OPERAND (bound, 0);
6475 else if (TREE_CODE (bound) == GT_EXPR)
6476 a = TREE_OPERAND (bound, 1);
6480 typea = TREE_TYPE (a);
6481 if (!INTEGRAL_TYPE_P (typea)
6482 && !POINTER_TYPE_P (typea))
6485 if (TREE_CODE (ineq) == LT_EXPR)
6487 a1 = TREE_OPERAND (ineq, 1);
6488 y = TREE_OPERAND (ineq, 0);
6490 else if (TREE_CODE (ineq) == GT_EXPR)
6492 a1 = TREE_OPERAND (ineq, 0);
6493 y = TREE_OPERAND (ineq, 1);
6498 if (TREE_TYPE (a1) != typea)
6501 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6502 if (!integer_onep (diff))
6505 return fold_build2 (GE_EXPR, type, a, y);
6508 /* Fold a unary expression of code CODE and type TYPE with operand
6509 OP0. Return the folded expression if folding is successful.
6510 Otherwise, return NULL_TREE. */
6513 fold_unary (enum tree_code code, tree type, tree op0)
6517 enum tree_code_class kind = TREE_CODE_CLASS (code);
6519 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6520 && TREE_CODE_LENGTH (code) == 1);
6525 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
6527 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
6528 STRIP_SIGN_NOPS (arg0);
6532 /* Strip any conversions that don't change the mode. This
6533 is safe for every expression, except for a comparison
6534 expression because its signedness is derived from its
6537 Note that this is done as an internal manipulation within
6538 the constant folder, in order to find the simplest
6539 representation of the arguments so that their form can be
6540 studied. In any cases, the appropriate type conversions
6541 should be put back in the tree that will get out of the
6547 if (TREE_CODE_CLASS (code) == tcc_unary)
6549 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6550 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6551 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6552 else if (TREE_CODE (arg0) == COND_EXPR)
6554 tree arg01 = TREE_OPERAND (arg0, 1);
6555 tree arg02 = TREE_OPERAND (arg0, 2);
6556 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6557 arg01 = fold_build1 (code, type, arg01);
6558 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6559 arg02 = fold_build1 (code, type, arg02);
6560 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6563 /* If this was a conversion, and all we did was to move into
6564 inside the COND_EXPR, bring it back out. But leave it if
6565 it is a conversion from integer to integer and the
6566 result precision is no wider than a word since such a
6567 conversion is cheap and may be optimized away by combine,
6568 while it couldn't if it were outside the COND_EXPR. Then return
6569 so we don't get into an infinite recursion loop taking the
6570 conversion out and then back in. */
6572 if ((code == NOP_EXPR || code == CONVERT_EXPR
6573 || code == NON_LVALUE_EXPR)
6574 && TREE_CODE (tem) == COND_EXPR
6575 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6576 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6577 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6578 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6579 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6580 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6581 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6583 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6584 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6585 || flag_syntax_only))
6586 tem = build1 (code, type,
6588 TREE_TYPE (TREE_OPERAND
6589 (TREE_OPERAND (tem, 1), 0)),
6590 TREE_OPERAND (tem, 0),
6591 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6592 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6595 else if (COMPARISON_CLASS_P (arg0))
6597 if (TREE_CODE (type) == BOOLEAN_TYPE)
6599 arg0 = copy_node (arg0);
6600 TREE_TYPE (arg0) = type;
6603 else if (TREE_CODE (type) != INTEGER_TYPE)
6604 return fold_build3 (COND_EXPR, type, arg0,
6605 fold_build1 (code, type,
6607 fold_build1 (code, type,
6608 integer_zero_node));
6617 case FIX_TRUNC_EXPR:
6619 case FIX_FLOOR_EXPR:
6620 case FIX_ROUND_EXPR:
6621 if (TREE_TYPE (op0) == type)
6624 /* Handle cases of two conversions in a row. */
6625 if (TREE_CODE (op0) == NOP_EXPR
6626 || TREE_CODE (op0) == CONVERT_EXPR)
6628 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6629 tree inter_type = TREE_TYPE (op0);
6630 int inside_int = INTEGRAL_TYPE_P (inside_type);
6631 int inside_ptr = POINTER_TYPE_P (inside_type);
6632 int inside_float = FLOAT_TYPE_P (inside_type);
6633 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6634 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6635 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6636 int inter_int = INTEGRAL_TYPE_P (inter_type);
6637 int inter_ptr = POINTER_TYPE_P (inter_type);
6638 int inter_float = FLOAT_TYPE_P (inter_type);
6639 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6640 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6641 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6642 int final_int = INTEGRAL_TYPE_P (type);
6643 int final_ptr = POINTER_TYPE_P (type);
6644 int final_float = FLOAT_TYPE_P (type);
6645 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6646 unsigned int final_prec = TYPE_PRECISION (type);
6647 int final_unsignedp = TYPE_UNSIGNED (type);
6649 /* In addition to the cases of two conversions in a row
6650 handled below, if we are converting something to its own
6651 type via an object of identical or wider precision, neither
6652 conversion is needed. */
6653 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6654 && ((inter_int && final_int) || (inter_float && final_float))
6655 && inter_prec >= final_prec)
6656 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6658 /* Likewise, if the intermediate and final types are either both
6659 float or both integer, we don't need the middle conversion if
6660 it is wider than the final type and doesn't change the signedness
6661 (for integers). Avoid this if the final type is a pointer
6662 since then we sometimes need the inner conversion. Likewise if
6663 the outer has a precision not equal to the size of its mode. */
6664 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6665 || (inter_float && inside_float)
6666 || (inter_vec && inside_vec))
6667 && inter_prec >= inside_prec
6668 && (inter_float || inter_vec
6669 || inter_unsignedp == inside_unsignedp)
6670 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6671 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6673 && (! final_vec || inter_prec == inside_prec))
6674 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6676 /* If we have a sign-extension of a zero-extended value, we can
6677 replace that by a single zero-extension. */
6678 if (inside_int && inter_int && final_int
6679 && inside_prec < inter_prec && inter_prec < final_prec
6680 && inside_unsignedp && !inter_unsignedp)
6681 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6683 /* Two conversions in a row are not needed unless:
6684 - some conversion is floating-point (overstrict for now), or
6685 - some conversion is a vector (overstrict for now), or
6686 - the intermediate type is narrower than both initial and
6688 - the intermediate type and innermost type differ in signedness,
6689 and the outermost type is wider than the intermediate, or
6690 - the initial type is a pointer type and the precisions of the
6691 intermediate and final types differ, or
6692 - the final type is a pointer type and the precisions of the
6693 initial and intermediate types differ. */
6694 if (! inside_float && ! inter_float && ! final_float
6695 && ! inside_vec && ! inter_vec && ! final_vec
6696 && (inter_prec > inside_prec || inter_prec > final_prec)
6697 && ! (inside_int && inter_int
6698 && inter_unsignedp != inside_unsignedp
6699 && inter_prec < final_prec)
6700 && ((inter_unsignedp && inter_prec > inside_prec)
6701 == (final_unsignedp && final_prec > inter_prec))
6702 && ! (inside_ptr && inter_prec != final_prec)
6703 && ! (final_ptr && inside_prec != inter_prec)
6704 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6705 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6707 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6710 if (TREE_CODE (op0) == MODIFY_EXPR
6711 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6712 /* Detect assigning a bitfield. */
6713 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6714 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6716 /* Don't leave an assignment inside a conversion
6717 unless assigning a bitfield. */
6718 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6719 /* First do the assignment, then return converted constant. */
6720 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6721 TREE_NO_WARNING (tem) = 1;
6722 TREE_USED (tem) = 1;
6726 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6727 constants (if x has signed type, the sign bit cannot be set
6728 in c). This folds extension into the BIT_AND_EXPR. */
6729 if (INTEGRAL_TYPE_P (type)
6730 && TREE_CODE (type) != BOOLEAN_TYPE
6731 && TREE_CODE (op0) == BIT_AND_EXPR
6732 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6735 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6738 if (TYPE_UNSIGNED (TREE_TYPE (and))
6739 || (TYPE_PRECISION (type)
6740 <= TYPE_PRECISION (TREE_TYPE (and))))
6742 else if (TYPE_PRECISION (TREE_TYPE (and1))
6743 <= HOST_BITS_PER_WIDE_INT
6744 && host_integerp (and1, 1))
6746 unsigned HOST_WIDE_INT cst;
6748 cst = tree_low_cst (and1, 1);
6749 cst &= (HOST_WIDE_INT) -1
6750 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6751 change = (cst == 0);
6752 #ifdef LOAD_EXTEND_OP
6754 && !flag_syntax_only
6755 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6758 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6759 and0 = fold_convert (uns, and0);
6760 and1 = fold_convert (uns, and1);
6766 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6767 TREE_INT_CST_HIGH (and1));
6768 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6769 TREE_CONSTANT_OVERFLOW (and1));
6770 return fold_build2 (BIT_AND_EXPR, type,
6771 fold_convert (type, and0), tem);
6775 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6776 T2 being pointers to types of the same size. */
6777 if (POINTER_TYPE_P (type)
6778 && BINARY_CLASS_P (arg0)
6779 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6780 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6782 tree arg00 = TREE_OPERAND (arg0, 0);
6784 tree t1 = TREE_TYPE (arg00);
6785 tree tt0 = TREE_TYPE (t0);
6786 tree tt1 = TREE_TYPE (t1);
6787 tree s0 = TYPE_SIZE (tt0);
6788 tree s1 = TYPE_SIZE (tt1);
6790 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6791 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6792 TREE_OPERAND (arg0, 1));
6795 tem = fold_convert_const (code, type, arg0);
6796 return tem ? tem : NULL_TREE;
6798 case VIEW_CONVERT_EXPR:
6799 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6800 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6804 if (negate_expr_p (arg0))
6805 return fold_convert (type, negate_expr (arg0));
6806 /* Convert - (~A) to A + 1. */
6807 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6808 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6809 build_int_cst (type, 1));
6813 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6814 return fold_abs_const (arg0, type);
6815 else if (TREE_CODE (arg0) == NEGATE_EXPR)
6816 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6817 /* Convert fabs((double)float) into (double)fabsf(float). */
6818 else if (TREE_CODE (arg0) == NOP_EXPR
6819 && TREE_CODE (type) == REAL_TYPE)
6821 tree targ0 = strip_float_extensions (arg0);
6823 return fold_convert (type, fold_build1 (ABS_EXPR,
6827 else if (tree_expr_nonnegative_p (arg0))
6830 /* Strip sign ops from argument. */
6831 if (TREE_CODE (type) == REAL_TYPE)
6833 tem = fold_strip_sign_ops (arg0);
6835 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6840 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6841 return fold_convert (type, arg0);
6842 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6843 return build2 (COMPLEX_EXPR, type,
6844 TREE_OPERAND (arg0, 0),
6845 negate_expr (TREE_OPERAND (arg0, 1)));
6846 else if (TREE_CODE (arg0) == COMPLEX_CST)
6847 return build_complex (type, TREE_REALPART (arg0),
6848 negate_expr (TREE_IMAGPART (arg0)));
6849 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6850 return fold_build2 (TREE_CODE (arg0), type,
6851 fold_build1 (CONJ_EXPR, type,
6852 TREE_OPERAND (arg0, 0)),
6853 fold_build1 (CONJ_EXPR, type,
6854 TREE_OPERAND (arg0, 1)));
6855 else if (TREE_CODE (arg0) == CONJ_EXPR)
6856 return TREE_OPERAND (arg0, 0);
6860 if (TREE_CODE (arg0) == INTEGER_CST)
6861 return fold_not_const (arg0, type);
6862 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6863 return TREE_OPERAND (arg0, 0);
6864 /* Convert ~ (-A) to A - 1. */
6865 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6866 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6867 build_int_cst (type, 1));
6868 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
6869 else if (INTEGRAL_TYPE_P (type)
6870 && ((TREE_CODE (arg0) == MINUS_EXPR
6871 && integer_onep (TREE_OPERAND (arg0, 1)))
6872 || (TREE_CODE (arg0) == PLUS_EXPR
6873 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6874 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6875 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
6876 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6877 && (tem = fold_unary (BIT_NOT_EXPR, type,
6879 TREE_OPERAND (arg0, 0)))))
6880 return fold_build2 (BIT_XOR_EXPR, type, tem,
6881 fold_convert (type, TREE_OPERAND (arg0, 1)));
6882 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6883 && (tem = fold_unary (BIT_NOT_EXPR, type,
6885 TREE_OPERAND (arg0, 1)))))
6886 return fold_build2 (BIT_XOR_EXPR, type,
6887 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6891 case TRUTH_NOT_EXPR:
6892 /* The argument to invert_truthvalue must have Boolean type. */
6893 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6894 arg0 = fold_convert (boolean_type_node, arg0);
6896 /* Note that the operand of this must be an int
6897 and its values must be 0 or 1.
6898 ("true" is a fixed value perhaps depending on the language,
6899 but we don't handle values other than 1 correctly yet.) */
6900 tem = invert_truthvalue (arg0);
6901 /* Avoid infinite recursion. */
6902 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6904 return fold_convert (type, tem);
6907 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6909 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6910 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6911 TREE_OPERAND (arg0, 1));
6912 else if (TREE_CODE (arg0) == COMPLEX_CST)
6913 return TREE_REALPART (arg0);
6914 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6915 return fold_build2 (TREE_CODE (arg0), type,
6916 fold_build1 (REALPART_EXPR, type,
6917 TREE_OPERAND (arg0, 0)),
6918 fold_build1 (REALPART_EXPR, type,
6919 TREE_OPERAND (arg0, 1)));
6923 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6924 return fold_convert (type, integer_zero_node);
6925 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6926 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6927 TREE_OPERAND (arg0, 0));
6928 else if (TREE_CODE (arg0) == COMPLEX_CST)
6929 return TREE_IMAGPART (arg0);
6930 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6931 return fold_build2 (TREE_CODE (arg0), type,
6932 fold_build1 (IMAGPART_EXPR, type,
6933 TREE_OPERAND (arg0, 0)),
6934 fold_build1 (IMAGPART_EXPR, type,
6935 TREE_OPERAND (arg0, 1)));
6940 } /* switch (code) */
6943 /* Fold a binary expression of code CODE and type TYPE with operands
6944 OP0 and OP1. Return the folded expression if folding is
6945 successful. Otherwise, return NULL_TREE. */
6948 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
6950 tree t1 = NULL_TREE;
6952 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
6953 enum tree_code_class kind = TREE_CODE_CLASS (code);
6955 /* WINS will be nonzero when the switch is done
6956 if all operands are constant. */
6959 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6960 && TREE_CODE_LENGTH (code) == 2);
6969 /* Strip any conversions that don't change the mode. This is
6970 safe for every expression, except for a comparison expression
6971 because its signedness is derived from its operands. So, in
6972 the latter case, only strip conversions that don't change the
6975 Note that this is done as an internal manipulation within the
6976 constant folder, in order to find the simplest representation
6977 of the arguments so that their form can be studied. In any
6978 cases, the appropriate type conversions should be put back in
6979 the tree that will get out of the constant folder. */
6980 if (kind == tcc_comparison)
6981 STRIP_SIGN_NOPS (arg0);
6985 if (TREE_CODE (arg0) == COMPLEX_CST)
6986 subop = TREE_REALPART (arg0);
6990 if (TREE_CODE (subop) != INTEGER_CST
6991 && TREE_CODE (subop) != REAL_CST)
6992 /* Note that TREE_CONSTANT isn't enough:
6993 static var addresses are constant but we can't
6994 do arithmetic on them. */
7002 /* Strip any conversions that don't change the mode. This is
7003 safe for every expression, except for a comparison expression
7004 because its signedness is derived from its operands. So, in
7005 the latter case, only strip conversions that don't change the
7008 Note that this is done as an internal manipulation within the
7009 constant folder, in order to find the simplest representation
7010 of the arguments so that their form can be studied. In any
7011 cases, the appropriate type conversions should be put back in
7012 the tree that will get out of the constant folder. */
7013 if (kind == tcc_comparison)
7014 STRIP_SIGN_NOPS (arg1);
7018 if (TREE_CODE (arg1) == COMPLEX_CST)
7019 subop = TREE_REALPART (arg1);
7023 if (TREE_CODE (subop) != INTEGER_CST
7024 && TREE_CODE (subop) != REAL_CST)
7025 /* Note that TREE_CONSTANT isn't enough:
7026 static var addresses are constant but we can't
7027 do arithmetic on them. */
7031 /* If this is a commutative operation, and ARG0 is a constant, move it
7032 to ARG1 to reduce the number of tests below. */
7033 if (commutative_tree_code (code)
7034 && tree_swap_operands_p (arg0, arg1, true))
7035 return fold_build2 (code, type, op1, op0);
7037 /* Now WINS is set as described above,
7038 ARG0 is the first operand of EXPR,
7039 and ARG1 is the second operand (if it has more than one operand).
7041 First check for cases where an arithmetic operation is applied to a
7042 compound, conditional, or comparison operation. Push the arithmetic
7043 operation inside the compound or conditional to see if any folding
7044 can then be done. Convert comparison to conditional for this purpose.
7045 The also optimizes non-constant cases that used to be done in
7048 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7049 one of the operands is a comparison and the other is a comparison, a
7050 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7051 code below would make the expression more complex. Change it to a
7052 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7053 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7055 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7056 || code == EQ_EXPR || code == NE_EXPR)
7057 && ((truth_value_p (TREE_CODE (arg0))
7058 && (truth_value_p (TREE_CODE (arg1))
7059 || (TREE_CODE (arg1) == BIT_AND_EXPR
7060 && integer_onep (TREE_OPERAND (arg1, 1)))))
7061 || (truth_value_p (TREE_CODE (arg1))
7062 && (truth_value_p (TREE_CODE (arg0))
7063 || (TREE_CODE (arg0) == BIT_AND_EXPR
7064 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7066 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7067 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7070 fold_convert (boolean_type_node, arg0),
7071 fold_convert (boolean_type_node, arg1));
7073 if (code == EQ_EXPR)
7074 tem = invert_truthvalue (tem);
7076 return fold_convert (type, tem);
7079 if (TREE_CODE_CLASS (code) == tcc_comparison
7080 && TREE_CODE (arg0) == COMPOUND_EXPR)
7081 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7082 fold_build2 (code, type, TREE_OPERAND (arg0, 1), arg1));
7083 else if (TREE_CODE_CLASS (code) == tcc_comparison
7084 && TREE_CODE (arg1) == COMPOUND_EXPR)
7085 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7086 fold_build2 (code, type, arg0, TREE_OPERAND (arg1, 1)));
7087 else if (TREE_CODE_CLASS (code) == tcc_binary
7088 || TREE_CODE_CLASS (code) == tcc_comparison)
7090 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7091 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7092 fold_build2 (code, type, TREE_OPERAND (arg0, 1),
7094 if (TREE_CODE (arg1) == COMPOUND_EXPR
7095 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7096 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7097 fold_build2 (code, type,
7098 arg0, TREE_OPERAND (arg1, 1)));
7100 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7102 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7104 /*cond_first_p=*/1);
7105 if (tem != NULL_TREE)
7109 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7111 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7113 /*cond_first_p=*/0);
7114 if (tem != NULL_TREE)
7122 /* A + (-B) -> A - B */
7123 if (TREE_CODE (arg1) == NEGATE_EXPR)
7124 return fold_build2 (MINUS_EXPR, type,
7125 fold_convert (type, arg0),
7126 fold_convert (type, TREE_OPERAND (arg1, 0)));
7127 /* (-A) + B -> B - A */
7128 if (TREE_CODE (arg0) == NEGATE_EXPR
7129 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7130 return fold_build2 (MINUS_EXPR, type,
7131 fold_convert (type, arg1),
7132 fold_convert (type, TREE_OPERAND (arg0, 0)));
7133 /* Convert ~A + 1 to -A. */
7134 if (INTEGRAL_TYPE_P (type)
7135 && TREE_CODE (arg0) == BIT_NOT_EXPR
7136 && integer_onep (arg1))
7137 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7139 if (! FLOAT_TYPE_P (type))
7141 if (integer_zerop (arg1))
7142 return non_lvalue (fold_convert (type, arg0));
7144 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7145 with a constant, and the two constants have no bits in common,
7146 we should treat this as a BIT_IOR_EXPR since this may produce more
7148 if (TREE_CODE (arg0) == BIT_AND_EXPR
7149 && TREE_CODE (arg1) == BIT_AND_EXPR
7150 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7151 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7152 && integer_zerop (const_binop (BIT_AND_EXPR,
7153 TREE_OPERAND (arg0, 1),
7154 TREE_OPERAND (arg1, 1), 0)))
7156 code = BIT_IOR_EXPR;
7160 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7161 (plus (plus (mult) (mult)) (foo)) so that we can
7162 take advantage of the factoring cases below. */
7163 if (((TREE_CODE (arg0) == PLUS_EXPR
7164 || TREE_CODE (arg0) == MINUS_EXPR)
7165 && TREE_CODE (arg1) == MULT_EXPR)
7166 || ((TREE_CODE (arg1) == PLUS_EXPR
7167 || TREE_CODE (arg1) == MINUS_EXPR)
7168 && TREE_CODE (arg0) == MULT_EXPR))
7170 tree parg0, parg1, parg, marg;
7171 enum tree_code pcode;
7173 if (TREE_CODE (arg1) == MULT_EXPR)
7174 parg = arg0, marg = arg1;
7176 parg = arg1, marg = arg0;
7177 pcode = TREE_CODE (parg);
7178 parg0 = TREE_OPERAND (parg, 0);
7179 parg1 = TREE_OPERAND (parg, 1);
7183 if (TREE_CODE (parg0) == MULT_EXPR
7184 && TREE_CODE (parg1) != MULT_EXPR)
7185 return fold_build2 (pcode, type,
7186 fold_build2 (PLUS_EXPR, type,
7187 fold_convert (type, parg0),
7188 fold_convert (type, marg)),
7189 fold_convert (type, parg1));
7190 if (TREE_CODE (parg0) != MULT_EXPR
7191 && TREE_CODE (parg1) == MULT_EXPR)
7192 return fold_build2 (PLUS_EXPR, type,
7193 fold_convert (type, parg0),
7194 fold_build2 (pcode, type,
7195 fold_convert (type, marg),
7200 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7202 tree arg00, arg01, arg10, arg11;
7203 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7205 /* (A * C) + (B * C) -> (A+B) * C.
7206 We are most concerned about the case where C is a constant,
7207 but other combinations show up during loop reduction. Since
7208 it is not difficult, try all four possibilities. */
7210 arg00 = TREE_OPERAND (arg0, 0);
7211 arg01 = TREE_OPERAND (arg0, 1);
7212 arg10 = TREE_OPERAND (arg1, 0);
7213 arg11 = TREE_OPERAND (arg1, 1);
7216 if (operand_equal_p (arg01, arg11, 0))
7217 same = arg01, alt0 = arg00, alt1 = arg10;
7218 else if (operand_equal_p (arg00, arg10, 0))
7219 same = arg00, alt0 = arg01, alt1 = arg11;
7220 else if (operand_equal_p (arg00, arg11, 0))
7221 same = arg00, alt0 = arg01, alt1 = arg10;
7222 else if (operand_equal_p (arg01, arg10, 0))
7223 same = arg01, alt0 = arg00, alt1 = arg11;
7225 /* No identical multiplicands; see if we can find a common
7226 power-of-two factor in non-power-of-two multiplies. This
7227 can help in multi-dimensional array access. */
7228 else if (TREE_CODE (arg01) == INTEGER_CST
7229 && TREE_CODE (arg11) == INTEGER_CST
7230 && TREE_INT_CST_HIGH (arg01) == 0
7231 && TREE_INT_CST_HIGH (arg11) == 0)
7233 HOST_WIDE_INT int01, int11, tmp;
7234 int01 = TREE_INT_CST_LOW (arg01);
7235 int11 = TREE_INT_CST_LOW (arg11);
7237 /* Move min of absolute values to int11. */
7238 if ((int01 >= 0 ? int01 : -int01)
7239 < (int11 >= 0 ? int11 : -int11))
7241 tmp = int01, int01 = int11, int11 = tmp;
7242 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7243 alt0 = arg01, arg01 = arg11, arg11 = alt0;
7246 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7248 alt0 = fold_build2 (MULT_EXPR, type, arg00,
7249 build_int_cst (NULL_TREE,
7257 return fold_build2 (MULT_EXPR, type,
7258 fold_build2 (PLUS_EXPR, type,
7259 fold_convert (type, alt0),
7260 fold_convert (type, alt1)),
7261 fold_convert (type, same));
7264 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7265 of the array. Loop optimizer sometimes produce this type of
7267 if (TREE_CODE (arg0) == ADDR_EXPR)
7269 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7271 return fold_convert (type, fold (tem));
7273 else if (TREE_CODE (arg1) == ADDR_EXPR)
7275 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7277 return fold_convert (type, fold (tem));
7282 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7283 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7284 return non_lvalue (fold_convert (type, arg0));
7286 /* Likewise if the operands are reversed. */
7287 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7288 return non_lvalue (fold_convert (type, arg1));
7290 /* Convert X + -C into X - C. */
7291 if (TREE_CODE (arg1) == REAL_CST
7292 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7294 tem = fold_negate_const (arg1, type);
7295 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7296 return fold_build2 (MINUS_EXPR, type,
7297 fold_convert (type, arg0),
7298 fold_convert (type, tem));
7301 if (flag_unsafe_math_optimizations
7302 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7303 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7304 && (tem = distribute_real_division (code, type, arg0, arg1)))
7307 /* Convert x+x into x*2.0. */
7308 if (operand_equal_p (arg0, arg1, 0)
7309 && SCALAR_FLOAT_TYPE_P (type))
7310 return fold_build2 (MULT_EXPR, type, arg0,
7311 build_real (type, dconst2));
7313 /* Convert x*c+x into x*(c+1). */
7314 if (flag_unsafe_math_optimizations
7315 && TREE_CODE (arg0) == MULT_EXPR
7316 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7317 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7318 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7322 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7323 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7324 return fold_build2 (MULT_EXPR, type, arg1,
7325 build_real (type, c));
7328 /* Convert x+x*c into x*(c+1). */
7329 if (flag_unsafe_math_optimizations
7330 && TREE_CODE (arg1) == MULT_EXPR
7331 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7332 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7333 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7337 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7338 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7339 return fold_build2 (MULT_EXPR, type, arg0,
7340 build_real (type, c));
7343 /* Convert x*c1+x*c2 into x*(c1+c2). */
7344 if (flag_unsafe_math_optimizations
7345 && TREE_CODE (arg0) == MULT_EXPR
7346 && TREE_CODE (arg1) == MULT_EXPR
7347 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7348 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7349 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7350 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7351 && operand_equal_p (TREE_OPERAND (arg0, 0),
7352 TREE_OPERAND (arg1, 0), 0))
7354 REAL_VALUE_TYPE c1, c2;
7356 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7357 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7358 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7359 return fold_build2 (MULT_EXPR, type,
7360 TREE_OPERAND (arg0, 0),
7361 build_real (type, c1));
7363 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7364 if (flag_unsafe_math_optimizations
7365 && TREE_CODE (arg1) == PLUS_EXPR
7366 && TREE_CODE (arg0) != MULT_EXPR)
7368 tree tree10 = TREE_OPERAND (arg1, 0);
7369 tree tree11 = TREE_OPERAND (arg1, 1);
7370 if (TREE_CODE (tree11) == MULT_EXPR
7371 && TREE_CODE (tree10) == MULT_EXPR)
7374 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7375 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7378 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7379 if (flag_unsafe_math_optimizations
7380 && TREE_CODE (arg0) == PLUS_EXPR
7381 && TREE_CODE (arg1) != MULT_EXPR)
7383 tree tree00 = TREE_OPERAND (arg0, 0);
7384 tree tree01 = TREE_OPERAND (arg0, 1);
7385 if (TREE_CODE (tree01) == MULT_EXPR
7386 && TREE_CODE (tree00) == MULT_EXPR)
7389 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7390 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7396 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7397 is a rotate of A by C1 bits. */
7398 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7399 is a rotate of A by B bits. */
7401 enum tree_code code0, code1;
7402 code0 = TREE_CODE (arg0);
7403 code1 = TREE_CODE (arg1);
7404 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7405 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7406 && operand_equal_p (TREE_OPERAND (arg0, 0),
7407 TREE_OPERAND (arg1, 0), 0)
7408 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7410 tree tree01, tree11;
7411 enum tree_code code01, code11;
7413 tree01 = TREE_OPERAND (arg0, 1);
7414 tree11 = TREE_OPERAND (arg1, 1);
7415 STRIP_NOPS (tree01);
7416 STRIP_NOPS (tree11);
7417 code01 = TREE_CODE (tree01);
7418 code11 = TREE_CODE (tree11);
7419 if (code01 == INTEGER_CST
7420 && code11 == INTEGER_CST
7421 && TREE_INT_CST_HIGH (tree01) == 0
7422 && TREE_INT_CST_HIGH (tree11) == 0
7423 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7424 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7425 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7426 code0 == LSHIFT_EXPR ? tree01 : tree11);
7427 else if (code11 == MINUS_EXPR)
7429 tree tree110, tree111;
7430 tree110 = TREE_OPERAND (tree11, 0);
7431 tree111 = TREE_OPERAND (tree11, 1);
7432 STRIP_NOPS (tree110);
7433 STRIP_NOPS (tree111);
7434 if (TREE_CODE (tree110) == INTEGER_CST
7435 && 0 == compare_tree_int (tree110,
7437 (TREE_TYPE (TREE_OPERAND
7439 && operand_equal_p (tree01, tree111, 0))
7440 return build2 ((code0 == LSHIFT_EXPR
7443 type, TREE_OPERAND (arg0, 0), tree01);
7445 else if (code01 == MINUS_EXPR)
7447 tree tree010, tree011;
7448 tree010 = TREE_OPERAND (tree01, 0);
7449 tree011 = TREE_OPERAND (tree01, 1);
7450 STRIP_NOPS (tree010);
7451 STRIP_NOPS (tree011);
7452 if (TREE_CODE (tree010) == INTEGER_CST
7453 && 0 == compare_tree_int (tree010,
7455 (TREE_TYPE (TREE_OPERAND
7457 && operand_equal_p (tree11, tree011, 0))
7458 return build2 ((code0 != LSHIFT_EXPR
7461 type, TREE_OPERAND (arg0, 0), tree11);
7467 /* In most languages, can't associate operations on floats through
7468 parentheses. Rather than remember where the parentheses were, we
7469 don't associate floats at all, unless the user has specified
7470 -funsafe-math-optimizations. */
7473 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7475 tree var0, con0, lit0, minus_lit0;
7476 tree var1, con1, lit1, minus_lit1;
7478 /* Split both trees into variables, constants, and literals. Then
7479 associate each group together, the constants with literals,
7480 then the result with variables. This increases the chances of
7481 literals being recombined later and of generating relocatable
7482 expressions for the sum of a constant and literal. */
7483 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7484 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7485 code == MINUS_EXPR);
7487 /* Only do something if we found more than two objects. Otherwise,
7488 nothing has changed and we risk infinite recursion. */
7489 if (2 < ((var0 != 0) + (var1 != 0)
7490 + (con0 != 0) + (con1 != 0)
7491 + (lit0 != 0) + (lit1 != 0)
7492 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7494 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7495 if (code == MINUS_EXPR)
7498 var0 = associate_trees (var0, var1, code, type);
7499 con0 = associate_trees (con0, con1, code, type);
7500 lit0 = associate_trees (lit0, lit1, code, type);
7501 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7503 /* Preserve the MINUS_EXPR if the negative part of the literal is
7504 greater than the positive part. Otherwise, the multiplicative
7505 folding code (i.e extract_muldiv) may be fooled in case
7506 unsigned constants are subtracted, like in the following
7507 example: ((X*2 + 4) - 8U)/2. */
7508 if (minus_lit0 && lit0)
7510 if (TREE_CODE (lit0) == INTEGER_CST
7511 && TREE_CODE (minus_lit0) == INTEGER_CST
7512 && tree_int_cst_lt (lit0, minus_lit0))
7514 minus_lit0 = associate_trees (minus_lit0, lit0,
7520 lit0 = associate_trees (lit0, minus_lit0,
7528 return fold_convert (type,
7529 associate_trees (var0, minus_lit0,
7533 con0 = associate_trees (con0, minus_lit0,
7535 return fold_convert (type,
7536 associate_trees (var0, con0,
7541 con0 = associate_trees (con0, lit0, code, type);
7542 return fold_convert (type, associate_trees (var0, con0,
7549 t1 = const_binop (code, arg0, arg1, 0);
7550 if (t1 != NULL_TREE)
7552 /* The return value should always have
7553 the same type as the original expression. */
7554 if (TREE_TYPE (t1) != type)
7555 t1 = fold_convert (type, t1);
7562 /* A - (-B) -> A + B */
7563 if (TREE_CODE (arg1) == NEGATE_EXPR)
7564 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7565 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7566 if (TREE_CODE (arg0) == NEGATE_EXPR
7567 && (FLOAT_TYPE_P (type)
7568 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7569 && negate_expr_p (arg1)
7570 && reorder_operands_p (arg0, arg1))
7571 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7572 TREE_OPERAND (arg0, 0));
7573 /* Convert -A - 1 to ~A. */
7574 if (INTEGRAL_TYPE_P (type)
7575 && TREE_CODE (arg0) == NEGATE_EXPR
7576 && integer_onep (arg1))
7577 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7579 /* Convert -1 - A to ~A. */
7580 if (INTEGRAL_TYPE_P (type)
7581 && integer_all_onesp (arg0))
7582 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7584 if (! FLOAT_TYPE_P (type))
7586 if (! wins && integer_zerop (arg0))
7587 return negate_expr (fold_convert (type, arg1));
7588 if (integer_zerop (arg1))
7589 return non_lvalue (fold_convert (type, arg0));
7591 /* Fold A - (A & B) into ~B & A. */
7592 if (!TREE_SIDE_EFFECTS (arg0)
7593 && TREE_CODE (arg1) == BIT_AND_EXPR)
7595 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7596 return fold_build2 (BIT_AND_EXPR, type,
7597 fold_build1 (BIT_NOT_EXPR, type,
7598 TREE_OPERAND (arg1, 0)),
7600 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7601 return fold_build2 (BIT_AND_EXPR, type,
7602 fold_build1 (BIT_NOT_EXPR, type,
7603 TREE_OPERAND (arg1, 1)),
7607 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7608 any power of 2 minus 1. */
7609 if (TREE_CODE (arg0) == BIT_AND_EXPR
7610 && TREE_CODE (arg1) == BIT_AND_EXPR
7611 && operand_equal_p (TREE_OPERAND (arg0, 0),
7612 TREE_OPERAND (arg1, 0), 0))
7614 tree mask0 = TREE_OPERAND (arg0, 1);
7615 tree mask1 = TREE_OPERAND (arg1, 1);
7616 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7618 if (operand_equal_p (tem, mask1, 0))
7620 tem = fold_build2 (BIT_XOR_EXPR, type,
7621 TREE_OPERAND (arg0, 0), mask1);
7622 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7627 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7628 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7629 return non_lvalue (fold_convert (type, arg0));
7631 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7632 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7633 (-ARG1 + ARG0) reduces to -ARG1. */
7634 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7635 return negate_expr (fold_convert (type, arg1));
7637 /* Fold &x - &x. This can happen from &x.foo - &x.
7638 This is unsafe for certain floats even in non-IEEE formats.
7639 In IEEE, it is unsafe because it does wrong for NaNs.
7640 Also note that operand_equal_p is always false if an operand
7643 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7644 && operand_equal_p (arg0, arg1, 0))
7645 return fold_convert (type, integer_zero_node);
7647 /* A - B -> A + (-B) if B is easily negatable. */
7648 if (!wins && negate_expr_p (arg1)
7649 && ((FLOAT_TYPE_P (type)
7650 /* Avoid this transformation if B is a positive REAL_CST. */
7651 && (TREE_CODE (arg1) != REAL_CST
7652 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7653 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7654 return fold_build2 (PLUS_EXPR, type,
7655 fold_convert (type, arg0),
7656 fold_convert (type, negate_expr (arg1)));
7658 /* Try folding difference of addresses. */
7662 if ((TREE_CODE (arg0) == ADDR_EXPR
7663 || TREE_CODE (arg1) == ADDR_EXPR)
7664 && ptr_difference_const (arg0, arg1, &diff))
7665 return build_int_cst_type (type, diff);
7668 /* Fold &a[i] - &a[j] to i-j. */
7669 if (TREE_CODE (arg0) == ADDR_EXPR
7670 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7671 && TREE_CODE (arg1) == ADDR_EXPR
7672 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7674 tree aref0 = TREE_OPERAND (arg0, 0);
7675 tree aref1 = TREE_OPERAND (arg1, 0);
7676 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7677 TREE_OPERAND (aref1, 0), 0))
7679 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7680 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7681 tree esz = array_ref_element_size (aref0);
7682 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7683 return fold_build2 (MULT_EXPR, type, diff,
7684 fold_convert (type, esz));
7689 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7690 of the array. Loop optimizer sometimes produce this type of
7692 if (TREE_CODE (arg0) == ADDR_EXPR)
7694 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7696 return fold_convert (type, fold (tem));
7699 if (flag_unsafe_math_optimizations
7700 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7701 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7702 && (tem = distribute_real_division (code, type, arg0, arg1)))
7705 if (TREE_CODE (arg0) == MULT_EXPR
7706 && TREE_CODE (arg1) == MULT_EXPR
7707 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7709 /* (A * C) - (B * C) -> (A-B) * C. */
7710 if (operand_equal_p (TREE_OPERAND (arg0, 1),
7711 TREE_OPERAND (arg1, 1), 0))
7712 return fold_build2 (MULT_EXPR, type,
7713 fold_build2 (MINUS_EXPR, type,
7714 TREE_OPERAND (arg0, 0),
7715 TREE_OPERAND (arg1, 0)),
7716 TREE_OPERAND (arg0, 1));
7717 /* (A * C1) - (A * C2) -> A * (C1-C2). */
7718 if (operand_equal_p (TREE_OPERAND (arg0, 0),
7719 TREE_OPERAND (arg1, 0), 0))
7720 return fold_build2 (MULT_EXPR, type,
7721 TREE_OPERAND (arg0, 0),
7722 fold_build2 (MINUS_EXPR, type,
7723 TREE_OPERAND (arg0, 1),
7724 TREE_OPERAND (arg1, 1)));
7730 /* (-A) * (-B) -> A * B */
7731 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7732 return fold_build2 (MULT_EXPR, type,
7733 TREE_OPERAND (arg0, 0),
7734 negate_expr (arg1));
7735 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7736 return fold_build2 (MULT_EXPR, type,
7738 TREE_OPERAND (arg1, 0));
7740 if (! FLOAT_TYPE_P (type))
7742 if (integer_zerop (arg1))
7743 return omit_one_operand (type, arg1, arg0);
7744 if (integer_onep (arg1))
7745 return non_lvalue (fold_convert (type, arg0));
7746 /* Transform x * -1 into -x. */
7747 if (integer_all_onesp (arg1))
7748 return fold_convert (type, negate_expr (arg0));
7750 /* (a * (1 << b)) is (a << b) */
7751 if (TREE_CODE (arg1) == LSHIFT_EXPR
7752 && integer_onep (TREE_OPERAND (arg1, 0)))
7753 return fold_build2 (LSHIFT_EXPR, type, arg0,
7754 TREE_OPERAND (arg1, 1));
7755 if (TREE_CODE (arg0) == LSHIFT_EXPR
7756 && integer_onep (TREE_OPERAND (arg0, 0)))
7757 return fold_build2 (LSHIFT_EXPR, type, arg1,
7758 TREE_OPERAND (arg0, 1));
7760 if (TREE_CODE (arg1) == INTEGER_CST
7761 && 0 != (tem = extract_muldiv (op0,
7762 fold_convert (type, arg1),
7764 return fold_convert (type, tem);
7769 /* Maybe fold x * 0 to 0. The expressions aren't the same
7770 when x is NaN, since x * 0 is also NaN. Nor are they the
7771 same in modes with signed zeros, since multiplying a
7772 negative value by 0 gives -0, not +0. */
7773 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7774 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7775 && real_zerop (arg1))
7776 return omit_one_operand (type, arg1, arg0);
7777 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7778 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7779 && real_onep (arg1))
7780 return non_lvalue (fold_convert (type, arg0));
7782 /* Transform x * -1.0 into -x. */
7783 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7784 && real_minus_onep (arg1))
7785 return fold_convert (type, negate_expr (arg0));
7787 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7788 if (flag_unsafe_math_optimizations
7789 && TREE_CODE (arg0) == RDIV_EXPR
7790 && TREE_CODE (arg1) == REAL_CST
7791 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7793 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7796 return fold_build2 (RDIV_EXPR, type, tem,
7797 TREE_OPERAND (arg0, 1));
7800 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7801 if (operand_equal_p (arg0, arg1, 0))
7803 tree tem = fold_strip_sign_ops (arg0);
7804 if (tem != NULL_TREE)
7806 tem = fold_convert (type, tem);
7807 return fold_build2 (MULT_EXPR, type, tem, tem);
7811 if (flag_unsafe_math_optimizations)
7813 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7814 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7816 /* Optimizations of root(...)*root(...). */
7817 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7819 tree rootfn, arg, arglist;
7820 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7821 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7823 /* Optimize sqrt(x)*sqrt(x) as x. */
7824 if (BUILTIN_SQRT_P (fcode0)
7825 && operand_equal_p (arg00, arg10, 0)
7826 && ! HONOR_SNANS (TYPE_MODE (type)))
7829 /* Optimize root(x)*root(y) as root(x*y). */
7830 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7831 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7832 arglist = build_tree_list (NULL_TREE, arg);
7833 return build_function_call_expr (rootfn, arglist);
7836 /* Optimize expN(x)*expN(y) as expN(x+y). */
7837 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7839 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7840 tree arg = fold_build2 (PLUS_EXPR, type,
7841 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7842 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7843 tree arglist = build_tree_list (NULL_TREE, arg);
7844 return build_function_call_expr (expfn, arglist);
7847 /* Optimizations of pow(...)*pow(...). */
7848 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7849 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7850 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7852 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7853 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7855 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7856 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7859 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7860 if (operand_equal_p (arg01, arg11, 0))
7862 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7863 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7864 tree arglist = tree_cons (NULL_TREE, arg,
7865 build_tree_list (NULL_TREE,
7867 return build_function_call_expr (powfn, arglist);
7870 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7871 if (operand_equal_p (arg00, arg10, 0))
7873 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7874 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7875 tree arglist = tree_cons (NULL_TREE, arg00,
7876 build_tree_list (NULL_TREE,
7878 return build_function_call_expr (powfn, arglist);
7882 /* Optimize tan(x)*cos(x) as sin(x). */
7883 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7884 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7885 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7886 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7887 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7888 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7889 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7890 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7892 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7894 if (sinfn != NULL_TREE)
7895 return build_function_call_expr (sinfn,
7896 TREE_OPERAND (arg0, 1));
7899 /* Optimize x*pow(x,c) as pow(x,c+1). */
7900 if (fcode1 == BUILT_IN_POW
7901 || fcode1 == BUILT_IN_POWF
7902 || fcode1 == BUILT_IN_POWL)
7904 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7905 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7907 if (TREE_CODE (arg11) == REAL_CST
7908 && ! TREE_CONSTANT_OVERFLOW (arg11)
7909 && operand_equal_p (arg0, arg10, 0))
7911 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7915 c = TREE_REAL_CST (arg11);
7916 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7917 arg = build_real (type, c);
7918 arglist = build_tree_list (NULL_TREE, arg);
7919 arglist = tree_cons (NULL_TREE, arg0, arglist);
7920 return build_function_call_expr (powfn, arglist);
7924 /* Optimize pow(x,c)*x as pow(x,c+1). */
7925 if (fcode0 == BUILT_IN_POW
7926 || fcode0 == BUILT_IN_POWF
7927 || fcode0 == BUILT_IN_POWL)
7929 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7930 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7932 if (TREE_CODE (arg01) == REAL_CST
7933 && ! TREE_CONSTANT_OVERFLOW (arg01)
7934 && operand_equal_p (arg1, arg00, 0))
7936 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7940 c = TREE_REAL_CST (arg01);
7941 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7942 arg = build_real (type, c);
7943 arglist = build_tree_list (NULL_TREE, arg);
7944 arglist = tree_cons (NULL_TREE, arg1, arglist);
7945 return build_function_call_expr (powfn, arglist);
7949 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
7951 && operand_equal_p (arg0, arg1, 0))
7953 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
7957 tree arg = build_real (type, dconst2);
7958 tree arglist = build_tree_list (NULL_TREE, arg);
7959 arglist = tree_cons (NULL_TREE, arg0, arglist);
7960 return build_function_call_expr (powfn, arglist);
7969 if (integer_all_onesp (arg1))
7970 return omit_one_operand (type, arg1, arg0);
7971 if (integer_zerop (arg1))
7972 return non_lvalue (fold_convert (type, arg0));
7973 if (operand_equal_p (arg0, arg1, 0))
7974 return non_lvalue (fold_convert (type, arg0));
7977 if (TREE_CODE (arg0) == BIT_NOT_EXPR
7978 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7980 t1 = build_int_cst (type, -1);
7981 t1 = force_fit_type (t1, 0, false, false);
7982 return omit_one_operand (type, t1, arg1);
7986 if (TREE_CODE (arg1) == BIT_NOT_EXPR
7987 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7989 t1 = build_int_cst (type, -1);
7990 t1 = force_fit_type (t1, 0, false, false);
7991 return omit_one_operand (type, t1, arg0);
7994 t1 = distribute_bit_expr (code, type, arg0, arg1);
7995 if (t1 != NULL_TREE)
7998 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8000 This results in more efficient code for machines without a NAND
8001 instruction. Combine will canonicalize to the first form
8002 which will allow use of NAND instructions provided by the
8003 backend if they exist. */
8004 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8005 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8007 return fold_build1 (BIT_NOT_EXPR, type,
8008 build2 (BIT_AND_EXPR, type,
8009 TREE_OPERAND (arg0, 0),
8010 TREE_OPERAND (arg1, 0)));
8013 /* See if this can be simplified into a rotate first. If that
8014 is unsuccessful continue in the association code. */
8018 if (integer_zerop (arg1))
8019 return non_lvalue (fold_convert (type, arg0));
8020 if (integer_all_onesp (arg1))
8021 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8022 if (operand_equal_p (arg0, arg1, 0))
8023 return omit_one_operand (type, integer_zero_node, arg0);
8026 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8027 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8029 t1 = build_int_cst (type, -1);
8030 t1 = force_fit_type (t1, 0, false, false);
8031 return omit_one_operand (type, t1, arg1);
8035 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8036 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8038 t1 = build_int_cst (type, -1);
8039 t1 = force_fit_type (t1, 0, false, false);
8040 return omit_one_operand (type, t1, arg0);
8043 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8044 with a constant, and the two constants have no bits in common,
8045 we should treat this as a BIT_IOR_EXPR since this may produce more
8047 if (TREE_CODE (arg0) == BIT_AND_EXPR
8048 && TREE_CODE (arg1) == BIT_AND_EXPR
8049 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8050 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8051 && integer_zerop (const_binop (BIT_AND_EXPR,
8052 TREE_OPERAND (arg0, 1),
8053 TREE_OPERAND (arg1, 1), 0)))
8055 code = BIT_IOR_EXPR;
8059 /* Convert ~X ^ ~Y to X ^ Y. */
8060 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8061 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8062 return fold_build2 (code, type,
8063 fold_convert (type, TREE_OPERAND (arg0, 0)),
8064 fold_convert (type, TREE_OPERAND (arg1, 0)));
8066 /* See if this can be simplified into a rotate first. If that
8067 is unsuccessful continue in the association code. */
8071 if (integer_all_onesp (arg1))
8072 return non_lvalue (fold_convert (type, arg0));
8073 if (integer_zerop (arg1))
8074 return omit_one_operand (type, arg1, arg0);
8075 if (operand_equal_p (arg0, arg1, 0))
8076 return non_lvalue (fold_convert (type, arg0));
8078 /* ~X & X is always zero. */
8079 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8080 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8081 return omit_one_operand (type, integer_zero_node, arg1);
8083 /* X & ~X is always zero. */
8084 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8085 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8086 return omit_one_operand (type, integer_zero_node, arg0);
8088 t1 = distribute_bit_expr (code, type, arg0, arg1);
8089 if (t1 != NULL_TREE)
8091 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8092 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8093 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8096 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8098 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8099 && (~TREE_INT_CST_LOW (arg1)
8100 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8101 return fold_convert (type, TREE_OPERAND (arg0, 0));
8104 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8106 This results in more efficient code for machines without a NOR
8107 instruction. Combine will canonicalize to the first form
8108 which will allow use of NOR instructions provided by the
8109 backend if they exist. */
8110 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8111 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8113 return fold_build1 (BIT_NOT_EXPR, type,
8114 build2 (BIT_IOR_EXPR, type,
8115 TREE_OPERAND (arg0, 0),
8116 TREE_OPERAND (arg1, 0)));
8122 /* Don't touch a floating-point divide by zero unless the mode
8123 of the constant can represent infinity. */
8124 if (TREE_CODE (arg1) == REAL_CST
8125 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8126 && real_zerop (arg1))
8129 /* (-A) / (-B) -> A / B */
8130 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8131 return fold_build2 (RDIV_EXPR, type,
8132 TREE_OPERAND (arg0, 0),
8133 negate_expr (arg1));
8134 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8135 return fold_build2 (RDIV_EXPR, type,
8137 TREE_OPERAND (arg1, 0));
8139 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8140 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8141 && real_onep (arg1))
8142 return non_lvalue (fold_convert (type, arg0));
8144 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8145 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8146 && real_minus_onep (arg1))
8147 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8149 /* If ARG1 is a constant, we can convert this to a multiply by the
8150 reciprocal. This does not have the same rounding properties,
8151 so only do this if -funsafe-math-optimizations. We can actually
8152 always safely do it if ARG1 is a power of two, but it's hard to
8153 tell if it is or not in a portable manner. */
8154 if (TREE_CODE (arg1) == REAL_CST)
8156 if (flag_unsafe_math_optimizations
8157 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8159 return fold_build2 (MULT_EXPR, type, arg0, tem);
8160 /* Find the reciprocal if optimizing and the result is exact. */
8164 r = TREE_REAL_CST (arg1);
8165 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8167 tem = build_real (type, r);
8168 return fold_build2 (MULT_EXPR, type,
8169 fold_convert (type, arg0), tem);
8173 /* Convert A/B/C to A/(B*C). */
8174 if (flag_unsafe_math_optimizations
8175 && TREE_CODE (arg0) == RDIV_EXPR)
8176 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8177 fold_build2 (MULT_EXPR, type,
8178 TREE_OPERAND (arg0, 1), arg1));
8180 /* Convert A/(B/C) to (A/B)*C. */
8181 if (flag_unsafe_math_optimizations
8182 && TREE_CODE (arg1) == RDIV_EXPR)
8183 return fold_build2 (MULT_EXPR, type,
8184 fold_build2 (RDIV_EXPR, type, arg0,
8185 TREE_OPERAND (arg1, 0)),
8186 TREE_OPERAND (arg1, 1));
8188 /* Convert C1/(X*C2) into (C1/C2)/X. */
8189 if (flag_unsafe_math_optimizations
8190 && TREE_CODE (arg1) == MULT_EXPR
8191 && TREE_CODE (arg0) == REAL_CST
8192 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8194 tree tem = const_binop (RDIV_EXPR, arg0,
8195 TREE_OPERAND (arg1, 1), 0);
8197 return fold_build2 (RDIV_EXPR, type, tem,
8198 TREE_OPERAND (arg1, 0));
8201 if (flag_unsafe_math_optimizations)
8203 enum built_in_function fcode = builtin_mathfn_code (arg1);
8204 /* Optimize x/expN(y) into x*expN(-y). */
8205 if (BUILTIN_EXPONENT_P (fcode))
8207 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8208 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8209 tree arglist = build_tree_list (NULL_TREE,
8210 fold_convert (type, arg));
8211 arg1 = build_function_call_expr (expfn, arglist);
8212 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8215 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8216 if (fcode == BUILT_IN_POW
8217 || fcode == BUILT_IN_POWF
8218 || fcode == BUILT_IN_POWL)
8220 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8221 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8222 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8223 tree neg11 = fold_convert (type, negate_expr (arg11));
8224 tree arglist = tree_cons(NULL_TREE, arg10,
8225 build_tree_list (NULL_TREE, neg11));
8226 arg1 = build_function_call_expr (powfn, arglist);
8227 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8231 if (flag_unsafe_math_optimizations)
8233 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8234 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8236 /* Optimize sin(x)/cos(x) as tan(x). */
8237 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8238 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8239 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8240 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8241 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8243 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8245 if (tanfn != NULL_TREE)
8246 return build_function_call_expr (tanfn,
8247 TREE_OPERAND (arg0, 1));
8250 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8251 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8252 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8253 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8254 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8255 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8257 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8259 if (tanfn != NULL_TREE)
8261 tree tmp = TREE_OPERAND (arg0, 1);
8262 tmp = build_function_call_expr (tanfn, tmp);
8263 return fold_build2 (RDIV_EXPR, type,
8264 build_real (type, dconst1), tmp);
8268 /* Optimize pow(x,c)/x as pow(x,c-1). */
8269 if (fcode0 == BUILT_IN_POW
8270 || fcode0 == BUILT_IN_POWF
8271 || fcode0 == BUILT_IN_POWL)
8273 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8274 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8275 if (TREE_CODE (arg01) == REAL_CST
8276 && ! TREE_CONSTANT_OVERFLOW (arg01)
8277 && operand_equal_p (arg1, arg00, 0))
8279 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8283 c = TREE_REAL_CST (arg01);
8284 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8285 arg = build_real (type, c);
8286 arglist = build_tree_list (NULL_TREE, arg);
8287 arglist = tree_cons (NULL_TREE, arg1, arglist);
8288 return build_function_call_expr (powfn, arglist);
8294 case TRUNC_DIV_EXPR:
8295 case ROUND_DIV_EXPR:
8296 case FLOOR_DIV_EXPR:
8298 case EXACT_DIV_EXPR:
8299 if (integer_onep (arg1))
8300 return non_lvalue (fold_convert (type, arg0));
8301 if (integer_zerop (arg1))
8304 if (!TYPE_UNSIGNED (type)
8305 && TREE_CODE (arg1) == INTEGER_CST
8306 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8307 && TREE_INT_CST_HIGH (arg1) == -1)
8308 return fold_convert (type, negate_expr (arg0));
8310 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8311 operation, EXACT_DIV_EXPR.
8313 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8314 At one time others generated faster code, it's not clear if they do
8315 after the last round to changes to the DIV code in expmed.c. */
8316 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8317 && multiple_of_p (type, arg0, arg1))
8318 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8320 if (TREE_CODE (arg1) == INTEGER_CST
8321 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8322 return fold_convert (type, tem);
8327 case FLOOR_MOD_EXPR:
8328 case ROUND_MOD_EXPR:
8329 case TRUNC_MOD_EXPR:
8330 /* X % 1 is always zero, but be sure to preserve any side
8332 if (integer_onep (arg1))
8333 return omit_one_operand (type, integer_zero_node, arg0);
8335 /* X % 0, return X % 0 unchanged so that we can get the
8336 proper warnings and errors. */
8337 if (integer_zerop (arg1))
8340 /* 0 % X is always zero, but be sure to preserve any side
8341 effects in X. Place this after checking for X == 0. */
8342 if (integer_zerop (arg0))
8343 return omit_one_operand (type, integer_zero_node, arg1);
8345 /* X % -1 is zero. */
8346 if (!TYPE_UNSIGNED (type)
8347 && TREE_CODE (arg1) == INTEGER_CST
8348 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8349 && TREE_INT_CST_HIGH (arg1) == -1)
8350 return omit_one_operand (type, integer_zero_node, arg0);
8352 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8353 i.e. "X % C" into "X & C2", if X and C are positive. */
8354 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8355 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8356 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8358 unsigned HOST_WIDE_INT high, low;
8362 l = tree_log2 (arg1);
8363 if (l >= HOST_BITS_PER_WIDE_INT)
8365 high = ((unsigned HOST_WIDE_INT) 1
8366 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8372 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8375 mask = build_int_cst_wide (type, low, high);
8376 return fold_build2 (BIT_AND_EXPR, type,
8377 fold_convert (type, arg0), mask);
8380 /* X % -C is the same as X % C. */
8381 if (code == TRUNC_MOD_EXPR
8382 && !TYPE_UNSIGNED (type)
8383 && TREE_CODE (arg1) == INTEGER_CST
8384 && !TREE_CONSTANT_OVERFLOW (arg1)
8385 && TREE_INT_CST_HIGH (arg1) < 0
8387 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8388 && !sign_bit_p (arg1, arg1))
8389 return fold_build2 (code, type, fold_convert (type, arg0),
8390 fold_convert (type, negate_expr (arg1)));
8392 /* X % -Y is the same as X % Y. */
8393 if (code == TRUNC_MOD_EXPR
8394 && !TYPE_UNSIGNED (type)
8395 && TREE_CODE (arg1) == NEGATE_EXPR
8397 return fold_build2 (code, type, fold_convert (type, arg0),
8398 fold_convert (type, TREE_OPERAND (arg1, 0)));
8400 if (TREE_CODE (arg1) == INTEGER_CST
8401 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8402 return fold_convert (type, tem);
8408 if (integer_all_onesp (arg0))
8409 return omit_one_operand (type, arg0, arg1);
8413 /* Optimize -1 >> x for arithmetic right shifts. */
8414 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8415 return omit_one_operand (type, arg0, arg1);
8416 /* ... fall through ... */
8420 if (integer_zerop (arg1))
8421 return non_lvalue (fold_convert (type, arg0));
8422 if (integer_zerop (arg0))
8423 return omit_one_operand (type, arg0, arg1);
8425 /* Since negative shift count is not well-defined,
8426 don't try to compute it in the compiler. */
8427 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8430 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8431 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8432 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8433 && host_integerp (TREE_OPERAND (arg0, 1), false)
8434 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8436 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8437 + TREE_INT_CST_LOW (arg1));
8439 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8440 being well defined. */
8441 if (low >= TYPE_PRECISION (type))
8443 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8444 low = low % TYPE_PRECISION (type);
8445 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8446 return build_int_cst (type, 0);
8448 low = TYPE_PRECISION (type) - 1;
8451 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8452 build_int_cst (type, low));
8455 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8456 into x & ((unsigned)-1 >> c) for unsigned types. */
8457 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8458 || (TYPE_UNSIGNED (type)
8459 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8460 && host_integerp (arg1, false)
8461 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8462 && host_integerp (TREE_OPERAND (arg0, 1), false)
8463 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8465 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8466 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8472 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8474 lshift = build_int_cst (type, -1);
8475 lshift = int_const_binop (code, lshift, arg1, 0);
8477 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8481 /* Rewrite an LROTATE_EXPR by a constant into an
8482 RROTATE_EXPR by a new constant. */
8483 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8485 tree tem = build_int_cst (NULL_TREE,
8486 GET_MODE_BITSIZE (TYPE_MODE (type)));
8487 tem = fold_convert (TREE_TYPE (arg1), tem);
8488 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8489 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8492 /* If we have a rotate of a bit operation with the rotate count and
8493 the second operand of the bit operation both constant,
8494 permute the two operations. */
8495 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8496 && (TREE_CODE (arg0) == BIT_AND_EXPR
8497 || TREE_CODE (arg0) == BIT_IOR_EXPR
8498 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8499 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8500 return fold_build2 (TREE_CODE (arg0), type,
8501 fold_build2 (code, type,
8502 TREE_OPERAND (arg0, 0), arg1),
8503 fold_build2 (code, type,
8504 TREE_OPERAND (arg0, 1), arg1));
8506 /* Two consecutive rotates adding up to the width of the mode can
8508 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8509 && TREE_CODE (arg0) == RROTATE_EXPR
8510 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8511 && TREE_INT_CST_HIGH (arg1) == 0
8512 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8513 && ((TREE_INT_CST_LOW (arg1)
8514 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8515 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8516 return TREE_OPERAND (arg0, 0);
8521 if (operand_equal_p (arg0, arg1, 0))
8522 return omit_one_operand (type, arg0, arg1);
8523 if (INTEGRAL_TYPE_P (type)
8524 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8525 return omit_one_operand (type, arg1, arg0);
8529 if (operand_equal_p (arg0, arg1, 0))
8530 return omit_one_operand (type, arg0, arg1);
8531 if (INTEGRAL_TYPE_P (type)
8532 && TYPE_MAX_VALUE (type)
8533 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8534 return omit_one_operand (type, arg1, arg0);
8537 case TRUTH_ANDIF_EXPR:
8538 /* Note that the operands of this must be ints
8539 and their values must be 0 or 1.
8540 ("true" is a fixed value perhaps depending on the language.) */
8541 /* If first arg is constant zero, return it. */
8542 if (integer_zerop (arg0))
8543 return fold_convert (type, arg0);
8544 case TRUTH_AND_EXPR:
8545 /* If either arg is constant true, drop it. */
8546 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8547 return non_lvalue (fold_convert (type, arg1));
8548 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8549 /* Preserve sequence points. */
8550 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8551 return non_lvalue (fold_convert (type, arg0));
8552 /* If second arg is constant zero, result is zero, but first arg
8553 must be evaluated. */
8554 if (integer_zerop (arg1))
8555 return omit_one_operand (type, arg1, arg0);
8556 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8557 case will be handled here. */
8558 if (integer_zerop (arg0))
8559 return omit_one_operand (type, arg0, arg1);
8561 /* !X && X is always false. */
8562 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8563 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8564 return omit_one_operand (type, integer_zero_node, arg1);
8565 /* X && !X is always false. */
8566 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8567 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8568 return omit_one_operand (type, integer_zero_node, arg0);
8570 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8571 means A >= Y && A != MAX, but in this case we know that
8574 if (!TREE_SIDE_EFFECTS (arg0)
8575 && !TREE_SIDE_EFFECTS (arg1))
8577 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8578 if (tem && !operand_equal_p (tem, arg0, 0))
8579 return fold_build2 (code, type, tem, arg1);
8581 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8582 if (tem && !operand_equal_p (tem, arg1, 0))
8583 return fold_build2 (code, type, arg0, tem);
8587 /* We only do these simplifications if we are optimizing. */
8591 /* Check for things like (A || B) && (A || C). We can convert this
8592 to A || (B && C). Note that either operator can be any of the four
8593 truth and/or operations and the transformation will still be
8594 valid. Also note that we only care about order for the
8595 ANDIF and ORIF operators. If B contains side effects, this
8596 might change the truth-value of A. */
8597 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8598 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8599 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8600 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8601 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8602 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8604 tree a00 = TREE_OPERAND (arg0, 0);
8605 tree a01 = TREE_OPERAND (arg0, 1);
8606 tree a10 = TREE_OPERAND (arg1, 0);
8607 tree a11 = TREE_OPERAND (arg1, 1);
8608 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8609 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8610 && (code == TRUTH_AND_EXPR
8611 || code == TRUTH_OR_EXPR));
8613 if (operand_equal_p (a00, a10, 0))
8614 return fold_build2 (TREE_CODE (arg0), type, a00,
8615 fold_build2 (code, type, a01, a11));
8616 else if (commutative && operand_equal_p (a00, a11, 0))
8617 return fold_build2 (TREE_CODE (arg0), type, a00,
8618 fold_build2 (code, type, a01, a10));
8619 else if (commutative && operand_equal_p (a01, a10, 0))
8620 return fold_build2 (TREE_CODE (arg0), type, a01,
8621 fold_build2 (code, type, a00, a11));
8623 /* This case if tricky because we must either have commutative
8624 operators or else A10 must not have side-effects. */
8626 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8627 && operand_equal_p (a01, a11, 0))
8628 return fold_build2 (TREE_CODE (arg0), type,
8629 fold_build2 (code, type, a00, a10),
8633 /* See if we can build a range comparison. */
8634 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8637 /* Check for the possibility of merging component references. If our
8638 lhs is another similar operation, try to merge its rhs with our
8639 rhs. Then try to merge our lhs and rhs. */
8640 if (TREE_CODE (arg0) == code
8641 && 0 != (tem = fold_truthop (code, type,
8642 TREE_OPERAND (arg0, 1), arg1)))
8643 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8645 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8650 case TRUTH_ORIF_EXPR:
8651 /* Note that the operands of this must be ints
8652 and their values must be 0 or true.
8653 ("true" is a fixed value perhaps depending on the language.) */
8654 /* If first arg is constant true, return it. */
8655 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8656 return fold_convert (type, arg0);
8658 /* If either arg is constant zero, drop it. */
8659 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8660 return non_lvalue (fold_convert (type, arg1));
8661 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8662 /* Preserve sequence points. */
8663 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8664 return non_lvalue (fold_convert (type, arg0));
8665 /* If second arg is constant true, result is true, but we must
8666 evaluate first arg. */
8667 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8668 return omit_one_operand (type, arg1, arg0);
8669 /* Likewise for first arg, but note this only occurs here for
8671 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8672 return omit_one_operand (type, arg0, arg1);
8674 /* !X || X is always true. */
8675 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8676 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8677 return omit_one_operand (type, integer_one_node, arg1);
8678 /* X || !X is always true. */
8679 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8680 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8681 return omit_one_operand (type, integer_one_node, arg0);
8685 case TRUTH_XOR_EXPR:
8686 /* If the second arg is constant zero, drop it. */
8687 if (integer_zerop (arg1))
8688 return non_lvalue (fold_convert (type, arg0));
8689 /* If the second arg is constant true, this is a logical inversion. */
8690 if (integer_onep (arg1))
8692 /* Only call invert_truthvalue if operand is a truth value. */
8693 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8694 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8696 tem = invert_truthvalue (arg0);
8697 return non_lvalue (fold_convert (type, tem));
8699 /* Identical arguments cancel to zero. */
8700 if (operand_equal_p (arg0, arg1, 0))
8701 return omit_one_operand (type, integer_zero_node, arg0);
8703 /* !X ^ X is always true. */
8704 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8705 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8706 return omit_one_operand (type, integer_one_node, arg1);
8708 /* X ^ !X is always true. */
8709 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8710 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8711 return omit_one_operand (type, integer_one_node, arg0);
8721 /* If one arg is a real or integer constant, put it last. */
8722 if (tree_swap_operands_p (arg0, arg1, true))
8723 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8725 /* bool_var != 0 becomes bool_var. */
8726 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8728 return non_lvalue (fold_convert (type, arg0));
8730 /* bool_var == 1 becomes bool_var. */
8731 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8733 return non_lvalue (fold_convert (type, arg0));
8735 /* If this is an equality comparison of the address of a non-weak
8736 object against zero, then we know the result. */
8737 if ((code == EQ_EXPR || code == NE_EXPR)
8738 && TREE_CODE (arg0) == ADDR_EXPR
8739 && DECL_P (TREE_OPERAND (arg0, 0))
8740 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8741 && integer_zerop (arg1))
8742 return constant_boolean_node (code != EQ_EXPR, type);
8744 /* If this is an equality comparison of the address of two non-weak,
8745 unaliased symbols neither of which are extern (since we do not
8746 have access to attributes for externs), then we know the result. */
8747 if ((code == EQ_EXPR || code == NE_EXPR)
8748 && TREE_CODE (arg0) == ADDR_EXPR
8749 && DECL_P (TREE_OPERAND (arg0, 0))
8750 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8751 && ! lookup_attribute ("alias",
8752 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8753 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8754 && TREE_CODE (arg1) == ADDR_EXPR
8755 && DECL_P (TREE_OPERAND (arg1, 0))
8756 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8757 && ! lookup_attribute ("alias",
8758 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8759 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8760 return constant_boolean_node (operand_equal_p (arg0, arg1, 0)
8761 ? code == EQ_EXPR : code != EQ_EXPR,
8764 /* If this is a comparison of two exprs that look like an
8765 ARRAY_REF of the same object, then we can fold this to a
8766 comparison of the two offsets. */
8767 if (TREE_CODE_CLASS (code) == tcc_comparison)
8769 tree base0, offset0, base1, offset1;
8771 if (extract_array_ref (arg0, &base0, &offset0)
8772 && extract_array_ref (arg1, &base1, &offset1)
8773 && operand_equal_p (base0, base1, 0))
8775 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))
8776 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base0)))))
8777 offset0 = NULL_TREE;
8778 if (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))
8779 && integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_TYPE (base1)))))
8780 offset1 = NULL_TREE;
8781 if (offset0 == NULL_TREE
8782 && offset1 == NULL_TREE)
8784 offset0 = integer_zero_node;
8785 offset1 = integer_zero_node;
8787 else if (offset0 == NULL_TREE)
8788 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8789 else if (offset1 == NULL_TREE)
8790 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8792 if (TREE_TYPE (offset0) == TREE_TYPE (offset1))
8793 return fold_build2 (code, type, offset0, offset1);
8797 /* Transform comparisons of the form X +- C CMP X. */
8798 if ((code != EQ_EXPR && code != NE_EXPR)
8799 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8800 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8801 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8802 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8803 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8804 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8805 && !(flag_wrapv || flag_trapv))))
8807 tree arg01 = TREE_OPERAND (arg0, 1);
8808 enum tree_code code0 = TREE_CODE (arg0);
8811 if (TREE_CODE (arg01) == REAL_CST)
8812 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8814 is_positive = tree_int_cst_sgn (arg01);
8816 /* (X - c) > X becomes false. */
8818 && ((code0 == MINUS_EXPR && is_positive >= 0)
8819 || (code0 == PLUS_EXPR && is_positive <= 0)))
8820 return constant_boolean_node (0, type);
8822 /* Likewise (X + c) < X becomes false. */
8824 && ((code0 == PLUS_EXPR && is_positive >= 0)
8825 || (code0 == MINUS_EXPR && is_positive <= 0)))
8826 return constant_boolean_node (0, type);
8828 /* Convert (X - c) <= X to true. */
8829 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8831 && ((code0 == MINUS_EXPR && is_positive >= 0)
8832 || (code0 == PLUS_EXPR && is_positive <= 0)))
8833 return constant_boolean_node (1, type);
8835 /* Convert (X + c) >= X to true. */
8836 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8838 && ((code0 == PLUS_EXPR && is_positive >= 0)
8839 || (code0 == MINUS_EXPR && is_positive <= 0)))
8840 return constant_boolean_node (1, type);
8842 if (TREE_CODE (arg01) == INTEGER_CST)
8844 /* Convert X + c > X and X - c < X to true for integers. */
8846 && ((code0 == PLUS_EXPR && is_positive > 0)
8847 || (code0 == MINUS_EXPR && is_positive < 0)))
8848 return constant_boolean_node (1, type);
8851 && ((code0 == MINUS_EXPR && is_positive > 0)
8852 || (code0 == PLUS_EXPR && is_positive < 0)))
8853 return constant_boolean_node (1, type);
8855 /* Convert X + c <= X and X - c >= X to false for integers. */
8857 && ((code0 == PLUS_EXPR && is_positive > 0)
8858 || (code0 == MINUS_EXPR && is_positive < 0)))
8859 return constant_boolean_node (0, type);
8862 && ((code0 == MINUS_EXPR && is_positive > 0)
8863 || (code0 == PLUS_EXPR && is_positive < 0)))
8864 return constant_boolean_node (0, type);
8868 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8869 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8870 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8871 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8872 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8873 && !(flag_wrapv || flag_trapv))
8874 && (TREE_CODE (arg1) == INTEGER_CST
8875 && !TREE_OVERFLOW (arg1)))
8877 tree const1 = TREE_OPERAND (arg0, 1);
8879 tree variable = TREE_OPERAND (arg0, 0);
8882 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8884 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8885 TREE_TYPE (arg1), const2, const1);
8886 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8887 && (TREE_CODE (lhs) != INTEGER_CST
8888 || !TREE_OVERFLOW (lhs)))
8889 return fold_build2 (code, type, variable, lhs);
8892 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8894 tree targ0 = strip_float_extensions (arg0);
8895 tree targ1 = strip_float_extensions (arg1);
8896 tree newtype = TREE_TYPE (targ0);
8898 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8899 newtype = TREE_TYPE (targ1);
8901 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8902 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8903 return fold_build2 (code, type, fold_convert (newtype, targ0),
8904 fold_convert (newtype, targ1));
8906 /* (-a) CMP (-b) -> b CMP a */
8907 if (TREE_CODE (arg0) == NEGATE_EXPR
8908 && TREE_CODE (arg1) == NEGATE_EXPR)
8909 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8910 TREE_OPERAND (arg0, 0));
8912 if (TREE_CODE (arg1) == REAL_CST)
8914 REAL_VALUE_TYPE cst;
8915 cst = TREE_REAL_CST (arg1);
8917 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8918 if (TREE_CODE (arg0) == NEGATE_EXPR)
8920 fold_build2 (swap_tree_comparison (code), type,
8921 TREE_OPERAND (arg0, 0),
8922 build_real (TREE_TYPE (arg1),
8923 REAL_VALUE_NEGATE (cst)));
8925 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8926 /* a CMP (-0) -> a CMP 0 */
8927 if (REAL_VALUE_MINUS_ZERO (cst))
8928 return fold_build2 (code, type, arg0,
8929 build_real (TREE_TYPE (arg1), dconst0));
8931 /* x != NaN is always true, other ops are always false. */
8932 if (REAL_VALUE_ISNAN (cst)
8933 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8935 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8936 return omit_one_operand (type, tem, arg0);
8939 /* Fold comparisons against infinity. */
8940 if (REAL_VALUE_ISINF (cst))
8942 tem = fold_inf_compare (code, type, arg0, arg1);
8943 if (tem != NULL_TREE)
8948 /* If this is a comparison of a real constant with a PLUS_EXPR
8949 or a MINUS_EXPR of a real constant, we can convert it into a
8950 comparison with a revised real constant as long as no overflow
8951 occurs when unsafe_math_optimizations are enabled. */
8952 if (flag_unsafe_math_optimizations
8953 && TREE_CODE (arg1) == REAL_CST
8954 && (TREE_CODE (arg0) == PLUS_EXPR
8955 || TREE_CODE (arg0) == MINUS_EXPR)
8956 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8957 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8958 ? MINUS_EXPR : PLUS_EXPR,
8959 arg1, TREE_OPERAND (arg0, 1), 0))
8960 && ! TREE_CONSTANT_OVERFLOW (tem))
8961 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8963 /* Likewise, we can simplify a comparison of a real constant with
8964 a MINUS_EXPR whose first operand is also a real constant, i.e.
8965 (c1 - x) < c2 becomes x > c1-c2. */
8966 if (flag_unsafe_math_optimizations
8967 && TREE_CODE (arg1) == REAL_CST
8968 && TREE_CODE (arg0) == MINUS_EXPR
8969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8970 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8972 && ! TREE_CONSTANT_OVERFLOW (tem))
8973 return fold_build2 (swap_tree_comparison (code), type,
8974 TREE_OPERAND (arg0, 1), tem);
8976 /* Fold comparisons against built-in math functions. */
8977 if (TREE_CODE (arg1) == REAL_CST
8978 && flag_unsafe_math_optimizations
8979 && ! flag_errno_math)
8981 enum built_in_function fcode = builtin_mathfn_code (arg0);
8983 if (fcode != END_BUILTINS)
8985 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8986 if (tem != NULL_TREE)
8992 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8993 if (TREE_CONSTANT (arg1)
8994 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8995 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8996 /* This optimization is invalid for ordered comparisons
8997 if CONST+INCR overflows or if foo+incr might overflow.
8998 This optimization is invalid for floating point due to rounding.
8999 For pointer types we assume overflow doesn't happen. */
9000 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9001 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9002 && (code == EQ_EXPR || code == NE_EXPR))))
9004 tree varop, newconst;
9006 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9008 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9009 arg1, TREE_OPERAND (arg0, 1));
9010 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9011 TREE_OPERAND (arg0, 0),
9012 TREE_OPERAND (arg0, 1));
9016 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9017 arg1, TREE_OPERAND (arg0, 1));
9018 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9019 TREE_OPERAND (arg0, 0),
9020 TREE_OPERAND (arg0, 1));
9024 /* If VAROP is a reference to a bitfield, we must mask
9025 the constant by the width of the field. */
9026 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9027 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9028 && host_integerp (DECL_SIZE (TREE_OPERAND
9029 (TREE_OPERAND (varop, 0), 1)), 1))
9031 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9032 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9033 tree folded_compare, shift;
9035 /* First check whether the comparison would come out
9036 always the same. If we don't do that we would
9037 change the meaning with the masking. */
9038 folded_compare = fold_build2 (code, type,
9039 TREE_OPERAND (varop, 0), arg1);
9040 if (integer_zerop (folded_compare)
9041 || integer_onep (folded_compare))
9042 return omit_one_operand (type, folded_compare, varop);
9044 shift = build_int_cst (NULL_TREE,
9045 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9046 shift = fold_convert (TREE_TYPE (varop), shift);
9047 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9049 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9053 return fold_build2 (code, type, varop, newconst);
9056 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9057 This transformation affects the cases which are handled in later
9058 optimizations involving comparisons with non-negative constants. */
9059 if (TREE_CODE (arg1) == INTEGER_CST
9060 && TREE_CODE (arg0) != INTEGER_CST
9061 && tree_int_cst_sgn (arg1) > 0)
9066 arg1 = const_binop (MINUS_EXPR, arg1,
9067 build_int_cst (TREE_TYPE (arg1), 1), 0);
9068 return fold_build2 (GT_EXPR, type, arg0,
9069 fold_convert (TREE_TYPE (arg0), arg1));
9072 arg1 = const_binop (MINUS_EXPR, arg1,
9073 build_int_cst (TREE_TYPE (arg1), 1), 0);
9074 return fold_build2 (LE_EXPR, type, arg0,
9075 fold_convert (TREE_TYPE (arg0), arg1));
9082 /* Comparisons with the highest or lowest possible integer of
9083 the specified size will have known values. */
9085 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9087 if (TREE_CODE (arg1) == INTEGER_CST
9088 && ! TREE_CONSTANT_OVERFLOW (arg1)
9089 && width <= 2 * HOST_BITS_PER_WIDE_INT
9090 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9091 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9093 HOST_WIDE_INT signed_max_hi;
9094 unsigned HOST_WIDE_INT signed_max_lo;
9095 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9097 if (width <= HOST_BITS_PER_WIDE_INT)
9099 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9104 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9106 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9112 max_lo = signed_max_lo;
9113 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9119 width -= HOST_BITS_PER_WIDE_INT;
9121 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9126 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9128 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9133 max_hi = signed_max_hi;
9134 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9138 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9139 && TREE_INT_CST_LOW (arg1) == max_lo)
9143 return omit_one_operand (type, integer_zero_node, arg0);
9146 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9149 return omit_one_operand (type, integer_one_node, arg0);
9152 return fold_build2 (NE_EXPR, type, arg0, arg1);
9154 /* The GE_EXPR and LT_EXPR cases above are not normally
9155 reached because of previous transformations. */
9160 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9162 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9166 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9167 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9169 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9170 return fold_build2 (NE_EXPR, type, arg0, arg1);
9174 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9176 && TREE_INT_CST_LOW (arg1) == min_lo)
9180 return omit_one_operand (type, integer_zero_node, arg0);
9183 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9186 return omit_one_operand (type, integer_one_node, arg0);
9189 return fold_build2 (NE_EXPR, type, arg0, arg1);
9194 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9196 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9200 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9201 return fold_build2 (NE_EXPR, type, arg0, arg1);
9203 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9204 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9209 else if (!in_gimple_form
9210 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9211 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9212 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9213 /* signed_type does not work on pointer types. */
9214 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9216 /* The following case also applies to X < signed_max+1
9217 and X >= signed_max+1 because previous transformations. */
9218 if (code == LE_EXPR || code == GT_EXPR)
9221 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9222 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9224 (build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9225 type, fold_convert (st0, arg0),
9226 fold_convert (st1, integer_zero_node)));
9232 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9233 a MINUS_EXPR of a constant, we can convert it into a comparison with
9234 a revised constant as long as no overflow occurs. */
9235 if ((code == EQ_EXPR || code == NE_EXPR)
9236 && TREE_CODE (arg1) == INTEGER_CST
9237 && (TREE_CODE (arg0) == PLUS_EXPR
9238 || TREE_CODE (arg0) == MINUS_EXPR)
9239 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9240 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9241 ? MINUS_EXPR : PLUS_EXPR,
9242 arg1, TREE_OPERAND (arg0, 1), 0))
9243 && ! TREE_CONSTANT_OVERFLOW (tem))
9244 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9246 /* Similarly for a NEGATE_EXPR. */
9247 else if ((code == EQ_EXPR || code == NE_EXPR)
9248 && TREE_CODE (arg0) == NEGATE_EXPR
9249 && TREE_CODE (arg1) == INTEGER_CST
9250 && 0 != (tem = negate_expr (arg1))
9251 && TREE_CODE (tem) == INTEGER_CST
9252 && ! TREE_CONSTANT_OVERFLOW (tem))
9253 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9255 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9256 for !=. Don't do this for ordered comparisons due to overflow. */
9257 else if ((code == NE_EXPR || code == EQ_EXPR)
9258 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9259 return fold_build2 (code, type,
9260 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9262 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9263 && (TREE_CODE (arg0) == NOP_EXPR
9264 || TREE_CODE (arg0) == CONVERT_EXPR))
9266 /* If we are widening one operand of an integer comparison,
9267 see if the other operand is similarly being widened. Perhaps we
9268 can do the comparison in the narrower type. */
9269 tem = fold_widened_comparison (code, type, arg0, arg1);
9273 /* Or if we are changing signedness. */
9274 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9279 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9280 constant, we can simplify it. */
9281 else if (TREE_CODE (arg1) == INTEGER_CST
9282 && (TREE_CODE (arg0) == MIN_EXPR
9283 || TREE_CODE (arg0) == MAX_EXPR)
9284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9286 tem = optimize_minmax_comparison (code, type, op0, op1);
9293 /* If we are comparing an ABS_EXPR with a constant, we can
9294 convert all the cases into explicit comparisons, but they may
9295 well not be faster than doing the ABS and one comparison.
9296 But ABS (X) <= C is a range comparison, which becomes a subtraction
9297 and a comparison, and is probably faster. */
9298 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9299 && TREE_CODE (arg0) == ABS_EXPR
9300 && ! TREE_SIDE_EFFECTS (arg0)
9301 && (0 != (tem = negate_expr (arg1)))
9302 && TREE_CODE (tem) == INTEGER_CST
9303 && ! TREE_CONSTANT_OVERFLOW (tem))
9304 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9305 build2 (GE_EXPR, type,
9306 TREE_OPERAND (arg0, 0), tem),
9307 build2 (LE_EXPR, type,
9308 TREE_OPERAND (arg0, 0), arg1));
9310 /* Convert ABS_EXPR<x> >= 0 to true. */
9311 else if (code == GE_EXPR
9312 && tree_expr_nonnegative_p (arg0)
9313 && (integer_zerop (arg1)
9314 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9315 && real_zerop (arg1))))
9316 return omit_one_operand (type, integer_one_node, arg0);
9318 /* Convert ABS_EXPR<x> < 0 to false. */
9319 else if (code == LT_EXPR
9320 && tree_expr_nonnegative_p (arg0)
9321 && (integer_zerop (arg1) || real_zerop (arg1)))
9322 return omit_one_operand (type, integer_zero_node, arg0);
9324 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9325 else if ((code == EQ_EXPR || code == NE_EXPR)
9326 && TREE_CODE (arg0) == ABS_EXPR
9327 && (integer_zerop (arg1) || real_zerop (arg1)))
9328 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9330 /* If this is an EQ or NE comparison with zero and ARG0 is
9331 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9332 two operations, but the latter can be done in one less insn
9333 on machines that have only two-operand insns or on which a
9334 constant cannot be the first operand. */
9335 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9336 && TREE_CODE (arg0) == BIT_AND_EXPR)
9338 tree arg00 = TREE_OPERAND (arg0, 0);
9339 tree arg01 = TREE_OPERAND (arg0, 1);
9340 if (TREE_CODE (arg00) == LSHIFT_EXPR
9341 && integer_onep (TREE_OPERAND (arg00, 0)))
9343 fold_build2 (code, type,
9344 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9345 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9346 arg01, TREE_OPERAND (arg00, 1)),
9347 fold_convert (TREE_TYPE (arg0),
9350 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9351 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9353 fold_build2 (code, type,
9354 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9355 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9356 arg00, TREE_OPERAND (arg01, 1)),
9357 fold_convert (TREE_TYPE (arg0),
9362 /* If this is an NE or EQ comparison of zero against the result of a
9363 signed MOD operation whose second operand is a power of 2, make
9364 the MOD operation unsigned since it is simpler and equivalent. */
9365 if ((code == NE_EXPR || code == EQ_EXPR)
9366 && integer_zerop (arg1)
9367 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9368 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9369 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9370 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9371 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9372 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9374 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9375 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9376 fold_convert (newtype,
9377 TREE_OPERAND (arg0, 0)),
9378 fold_convert (newtype,
9379 TREE_OPERAND (arg0, 1)));
9381 return fold_build2 (code, type, newmod,
9382 fold_convert (newtype, arg1));
9385 /* If this is an NE comparison of zero with an AND of one, remove the
9386 comparison since the AND will give the correct value. */
9387 if (code == NE_EXPR && integer_zerop (arg1)
9388 && TREE_CODE (arg0) == BIT_AND_EXPR
9389 && integer_onep (TREE_OPERAND (arg0, 1)))
9390 return fold_convert (type, arg0);
9392 /* If we have (A & C) == C where C is a power of 2, convert this into
9393 (A & C) != 0. Similarly for NE_EXPR. */
9394 if ((code == EQ_EXPR || code == NE_EXPR)
9395 && TREE_CODE (arg0) == BIT_AND_EXPR
9396 && integer_pow2p (TREE_OPERAND (arg0, 1))
9397 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9398 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9399 arg0, fold_convert (TREE_TYPE (arg0),
9400 integer_zero_node));
9402 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9403 bit, then fold the expression into A < 0 or A >= 0. */
9404 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9408 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9409 Similarly for NE_EXPR. */
9410 if ((code == EQ_EXPR || code == NE_EXPR)
9411 && TREE_CODE (arg0) == BIT_AND_EXPR
9412 && TREE_CODE (arg1) == INTEGER_CST
9413 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9415 tree notc = fold_build1 (BIT_NOT_EXPR,
9416 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9417 TREE_OPERAND (arg0, 1));
9418 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9420 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9421 if (integer_nonzerop (dandnotc))
9422 return omit_one_operand (type, rslt, arg0);
9425 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9426 Similarly for NE_EXPR. */
9427 if ((code == EQ_EXPR || code == NE_EXPR)
9428 && TREE_CODE (arg0) == BIT_IOR_EXPR
9429 && TREE_CODE (arg1) == INTEGER_CST
9430 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9432 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9433 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9434 TREE_OPERAND (arg0, 1), notd);
9435 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9436 if (integer_nonzerop (candnotd))
9437 return omit_one_operand (type, rslt, arg0);
9440 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9441 and similarly for >= into !=. */
9442 if ((code == LT_EXPR || code == GE_EXPR)
9443 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9444 && TREE_CODE (arg1) == LSHIFT_EXPR
9445 && integer_onep (TREE_OPERAND (arg1, 0)))
9446 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9447 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9448 TREE_OPERAND (arg1, 1)),
9449 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9451 else if ((code == LT_EXPR || code == GE_EXPR)
9452 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9453 && (TREE_CODE (arg1) == NOP_EXPR
9454 || TREE_CODE (arg1) == CONVERT_EXPR)
9455 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9456 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9458 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9459 fold_convert (TREE_TYPE (arg0),
9460 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9461 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9463 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9465 /* Simplify comparison of something with itself. (For IEEE
9466 floating-point, we can only do some of these simplifications.) */
9467 if (operand_equal_p (arg0, arg1, 0))
9472 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9473 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9474 return constant_boolean_node (1, type);
9479 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9480 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9481 return constant_boolean_node (1, type);
9482 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9485 /* For NE, we can only do this simplification if integer
9486 or we don't honor IEEE floating point NaNs. */
9487 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9488 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9490 /* ... fall through ... */
9493 return constant_boolean_node (0, type);
9499 /* If we are comparing an expression that just has comparisons
9500 of two integer values, arithmetic expressions of those comparisons,
9501 and constants, we can simplify it. There are only three cases
9502 to check: the two values can either be equal, the first can be
9503 greater, or the second can be greater. Fold the expression for
9504 those three values. Since each value must be 0 or 1, we have
9505 eight possibilities, each of which corresponds to the constant 0
9506 or 1 or one of the six possible comparisons.
9508 This handles common cases like (a > b) == 0 but also handles
9509 expressions like ((x > y) - (y > x)) > 0, which supposedly
9510 occur in macroized code. */
9512 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9514 tree cval1 = 0, cval2 = 0;
9517 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9518 /* Don't handle degenerate cases here; they should already
9519 have been handled anyway. */
9520 && cval1 != 0 && cval2 != 0
9521 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9522 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9523 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9524 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9525 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9526 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9527 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9529 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9530 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9532 /* We can't just pass T to eval_subst in case cval1 or cval2
9533 was the same as ARG1. */
9536 = fold_build2 (code, type,
9537 eval_subst (arg0, cval1, maxval,
9541 = fold_build2 (code, type,
9542 eval_subst (arg0, cval1, maxval,
9546 = fold_build2 (code, type,
9547 eval_subst (arg0, cval1, minval,
9551 /* All three of these results should be 0 or 1. Confirm they
9552 are. Then use those values to select the proper code
9555 if ((integer_zerop (high_result)
9556 || integer_onep (high_result))
9557 && (integer_zerop (equal_result)
9558 || integer_onep (equal_result))
9559 && (integer_zerop (low_result)
9560 || integer_onep (low_result)))
9562 /* Make a 3-bit mask with the high-order bit being the
9563 value for `>', the next for '=', and the low for '<'. */
9564 switch ((integer_onep (high_result) * 4)
9565 + (integer_onep (equal_result) * 2)
9566 + integer_onep (low_result))
9570 return omit_one_operand (type, integer_zero_node, arg0);
9591 return omit_one_operand (type, integer_one_node, arg0);
9595 return save_expr (build2 (code, type, cval1, cval2));
9597 return fold_build2 (code, type, cval1, cval2);
9602 /* If this is a comparison of a field, we may be able to simplify it. */
9603 if (((TREE_CODE (arg0) == COMPONENT_REF
9604 && lang_hooks.can_use_bit_fields_p ())
9605 || TREE_CODE (arg0) == BIT_FIELD_REF)
9606 && (code == EQ_EXPR || code == NE_EXPR)
9607 /* Handle the constant case even without -O
9608 to make sure the warnings are given. */
9609 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9611 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9616 /* Fold a comparison of the address of COMPONENT_REFs with the same
9617 type and component to a comparison of the address of the base
9618 object. In short, &x->a OP &y->a to x OP y and
9619 &x->a OP &y.a to x OP &y */
9620 if (TREE_CODE (arg0) == ADDR_EXPR
9621 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9622 && TREE_CODE (arg1) == ADDR_EXPR
9623 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9625 tree cref0 = TREE_OPERAND (arg0, 0);
9626 tree cref1 = TREE_OPERAND (arg1, 0);
9627 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9629 tree op0 = TREE_OPERAND (cref0, 0);
9630 tree op1 = TREE_OPERAND (cref1, 0);
9631 return fold_build2 (code, type,
9632 build_fold_addr_expr (op0),
9633 build_fold_addr_expr (op1));
9637 /* Optimize comparisons of strlen vs zero to a compare of the
9638 first character of the string vs zero. To wit,
9639 strlen(ptr) == 0 => *ptr == 0
9640 strlen(ptr) != 0 => *ptr != 0
9641 Other cases should reduce to one of these two (or a constant)
9642 due to the return value of strlen being unsigned. */
9643 if ((code == EQ_EXPR || code == NE_EXPR)
9644 && integer_zerop (arg1)
9645 && TREE_CODE (arg0) == CALL_EXPR)
9647 tree fndecl = get_callee_fndecl (arg0);
9651 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9652 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9653 && (arglist = TREE_OPERAND (arg0, 1))
9654 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9655 && ! TREE_CHAIN (arglist))
9657 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9658 return fold_build2 (code, type, iref,
9659 build_int_cst (TREE_TYPE (iref), 0));
9663 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9664 into a single range test. */
9665 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9666 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9667 && TREE_CODE (arg1) == INTEGER_CST
9668 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9669 && !integer_zerop (TREE_OPERAND (arg0, 1))
9670 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9671 && !TREE_OVERFLOW (arg1))
9673 t1 = fold_div_compare (code, type, arg0, arg1);
9674 if (t1 != NULL_TREE)
9678 if ((code == EQ_EXPR || code == NE_EXPR)
9679 && !TREE_SIDE_EFFECTS (arg0)
9680 && integer_zerop (arg1)
9681 && tree_expr_nonzero_p (arg0))
9682 return constant_boolean_node (code==NE_EXPR, type);
9684 t1 = fold_relational_const (code, type, arg0, arg1);
9685 return t1 == NULL_TREE ? NULL_TREE : t1;
9687 case UNORDERED_EXPR:
9695 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9697 t1 = fold_relational_const (code, type, arg0, arg1);
9698 if (t1 != NULL_TREE)
9702 /* If the first operand is NaN, the result is constant. */
9703 if (TREE_CODE (arg0) == REAL_CST
9704 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9705 && (code != LTGT_EXPR || ! flag_trapping_math))
9707 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9710 return omit_one_operand (type, t1, arg1);
9713 /* If the second operand is NaN, the result is constant. */
9714 if (TREE_CODE (arg1) == REAL_CST
9715 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9716 && (code != LTGT_EXPR || ! flag_trapping_math))
9718 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9721 return omit_one_operand (type, t1, arg0);
9724 /* Simplify unordered comparison of something with itself. */
9725 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9726 && operand_equal_p (arg0, arg1, 0))
9727 return constant_boolean_node (1, type);
9729 if (code == LTGT_EXPR
9730 && !flag_trapping_math
9731 && operand_equal_p (arg0, arg1, 0))
9732 return constant_boolean_node (0, type);
9734 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9736 tree targ0 = strip_float_extensions (arg0);
9737 tree targ1 = strip_float_extensions (arg1);
9738 tree newtype = TREE_TYPE (targ0);
9740 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9741 newtype = TREE_TYPE (targ1);
9743 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9744 return fold_build2 (code, type, fold_convert (newtype, targ0),
9745 fold_convert (newtype, targ1));
9751 /* When pedantic, a compound expression can be neither an lvalue
9752 nor an integer constant expression. */
9753 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9755 /* Don't let (0, 0) be null pointer constant. */
9756 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9757 : fold_convert (type, arg1);
9758 return pedantic_non_lvalue (tem);
9762 return build_complex (type, arg0, arg1);
9766 /* An ASSERT_EXPR should never be passed to fold_binary. */
9771 } /* switch (code) */
9774 /* Callback for walk_tree, looking for LABEL_EXPR.
9775 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9776 Do not check the sub-tree of GOTO_EXPR. */
9779 contains_label_1 (tree *tp,
9781 void *data ATTRIBUTE_UNUSED)
9783 switch (TREE_CODE (*tp))
9795 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9796 accessible from outside the sub-tree. Returns NULL_TREE if no
9797 addressable label is found. */
9800 contains_label_p (tree st)
9802 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9805 /* Fold a ternary expression of code CODE and type TYPE with operands
9806 OP0, OP1, and OP2. Return the folded expression if folding is
9807 successful. Otherwise, return NULL_TREE. */
9810 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9813 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9814 enum tree_code_class kind = TREE_CODE_CLASS (code);
9816 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9817 && TREE_CODE_LENGTH (code) == 3);
9819 /* Strip any conversions that don't change the mode. This is safe
9820 for every expression, except for a comparison expression because
9821 its signedness is derived from its operands. So, in the latter
9822 case, only strip conversions that don't change the signedness.
9824 Note that this is done as an internal manipulation within the
9825 constant folder, in order to find the simplest representation of
9826 the arguments so that their form can be studied. In any cases,
9827 the appropriate type conversions should be put back in the tree
9828 that will get out of the constant folder. */
9844 if (TREE_CODE (arg0) == CONSTRUCTOR
9845 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
9847 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
9849 return TREE_VALUE (m);
9854 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
9855 so all simple results must be passed through pedantic_non_lvalue. */
9856 if (TREE_CODE (arg0) == INTEGER_CST)
9858 tree unused_op = integer_zerop (arg0) ? op1 : op2;
9859 tem = integer_zerop (arg0) ? op2 : op1;
9860 /* Only optimize constant conditions when the selected branch
9861 has the same type as the COND_EXPR. This avoids optimizing
9862 away "c ? x : throw", where the throw has a void type.
9863 Avoid throwing away that operand which contains label. */
9864 if ((!TREE_SIDE_EFFECTS (unused_op)
9865 || !contains_label_p (unused_op))
9866 && (! VOID_TYPE_P (TREE_TYPE (tem))
9867 || VOID_TYPE_P (type)))
9868 return pedantic_non_lvalue (tem);
9871 if (operand_equal_p (arg1, op2, 0))
9872 return pedantic_omit_one_operand (type, arg1, arg0);
9874 /* If we have A op B ? A : C, we may be able to convert this to a
9875 simpler expression, depending on the operation and the values
9876 of B and C. Signed zeros prevent all of these transformations,
9877 for reasons given above each one.
9879 Also try swapping the arguments and inverting the conditional. */
9880 if (COMPARISON_CLASS_P (arg0)
9881 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9882 arg1, TREE_OPERAND (arg0, 1))
9883 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
9885 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
9890 if (COMPARISON_CLASS_P (arg0)
9891 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
9893 TREE_OPERAND (arg0, 1))
9894 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
9896 tem = invert_truthvalue (arg0);
9897 if (COMPARISON_CLASS_P (tem))
9899 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
9905 /* If the second operand is simpler than the third, swap them
9906 since that produces better jump optimization results. */
9907 if (tree_swap_operands_p (op1, op2, false))
9909 /* See if this can be inverted. If it can't, possibly because
9910 it was a floating-point inequality comparison, don't do
9912 tem = invert_truthvalue (arg0);
9914 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9915 return fold_build3 (code, type, tem, op2, op1);
9918 /* Convert A ? 1 : 0 to simply A. */
9919 if (integer_onep (op1)
9920 && integer_zerop (op2)
9921 /* If we try to convert OP0 to our type, the
9922 call to fold will try to move the conversion inside
9923 a COND, which will recurse. In that case, the COND_EXPR
9924 is probably the best choice, so leave it alone. */
9925 && type == TREE_TYPE (arg0))
9926 return pedantic_non_lvalue (arg0);
9928 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
9929 over COND_EXPR in cases such as floating point comparisons. */
9930 if (integer_zerop (op1)
9931 && integer_onep (op2)
9932 && truth_value_p (TREE_CODE (arg0)))
9933 return pedantic_non_lvalue (fold_convert (type,
9934 invert_truthvalue (arg0)));
9936 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
9937 if (TREE_CODE (arg0) == LT_EXPR
9938 && integer_zerop (TREE_OPERAND (arg0, 1))
9939 && integer_zerop (op2)
9940 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
9941 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
9942 TREE_TYPE (tem), tem, arg1));
9944 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
9945 already handled above. */
9946 if (TREE_CODE (arg0) == BIT_AND_EXPR
9947 && integer_onep (TREE_OPERAND (arg0, 1))
9948 && integer_zerop (op2)
9949 && integer_pow2p (arg1))
9951 tree tem = TREE_OPERAND (arg0, 0);
9953 if (TREE_CODE (tem) == RSHIFT_EXPR
9954 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9955 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
9956 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
9957 return fold_build2 (BIT_AND_EXPR, type,
9958 TREE_OPERAND (tem, 0), arg1);
9961 /* A & N ? N : 0 is simply A & N if N is a power of two. This
9962 is probably obsolete because the first operand should be a
9963 truth value (that's why we have the two cases above), but let's
9964 leave it in until we can confirm this for all front-ends. */
9965 if (integer_zerop (op2)
9966 && TREE_CODE (arg0) == NE_EXPR
9967 && integer_zerop (TREE_OPERAND (arg0, 1))
9968 && integer_pow2p (arg1)
9969 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
9970 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
9971 arg1, OEP_ONLY_CONST))
9972 return pedantic_non_lvalue (fold_convert (type,
9973 TREE_OPERAND (arg0, 0)));
9975 /* Convert A ? B : 0 into A && B if A and B are truth values. */
9976 if (integer_zerop (op2)
9977 && truth_value_p (TREE_CODE (arg0))
9978 && truth_value_p (TREE_CODE (arg1)))
9979 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
9981 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
9982 if (integer_onep (op2)
9983 && truth_value_p (TREE_CODE (arg0))
9984 && truth_value_p (TREE_CODE (arg1)))
9986 /* Only perform transformation if ARG0 is easily inverted. */
9987 tem = invert_truthvalue (arg0);
9988 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
9989 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
9992 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
9993 if (integer_zerop (arg1)
9994 && truth_value_p (TREE_CODE (arg0))
9995 && truth_value_p (TREE_CODE (op2)))
9997 /* Only perform transformation if ARG0 is easily inverted. */
9998 tem = invert_truthvalue (arg0);
9999 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10000 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10003 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10004 if (integer_onep (arg1)
10005 && truth_value_p (TREE_CODE (arg0))
10006 && truth_value_p (TREE_CODE (op2)))
10007 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10012 /* Check for a built-in function. */
10013 if (TREE_CODE (op0) == ADDR_EXPR
10014 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10015 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10017 tree fndecl = TREE_OPERAND (op0, 0);
10018 tree arglist = op1;
10019 tree tmp = fold_builtin (fndecl, arglist, false);
10025 case BIT_FIELD_REF:
10026 if (TREE_CODE (arg0) == VECTOR_CST
10027 && type == TREE_TYPE (TREE_TYPE (arg0))
10028 && host_integerp (arg1, 1)
10029 && host_integerp (op2, 1))
10031 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10032 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10035 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10036 && (idx % width) == 0
10037 && (idx = idx / width)
10038 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10040 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10041 while (idx-- > 0 && elements)
10042 elements = TREE_CHAIN (elements);
10044 return TREE_VALUE (elements);
10046 return fold_convert (type, integer_zero_node);
10053 } /* switch (code) */
10056 /* Perform constant folding and related simplification of EXPR.
10057 The related simplifications include x*1 => x, x*0 => 0, etc.,
10058 and application of the associative law.
10059 NOP_EXPR conversions may be removed freely (as long as we
10060 are careful not to change the type of the overall expression).
10061 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10062 but we can constant-fold them if they have constant operands. */
10064 #ifdef ENABLE_FOLD_CHECKING
10065 # define fold(x) fold_1 (x)
10066 static tree fold_1 (tree);
10072 const tree t = expr;
10073 enum tree_code code = TREE_CODE (t);
10074 enum tree_code_class kind = TREE_CODE_CLASS (code);
10077 /* Return right away if a constant. */
10078 if (kind == tcc_constant)
10081 if (IS_EXPR_CODE_CLASS (kind))
10083 tree type = TREE_TYPE (t);
10084 tree op0, op1, op2;
10086 switch (TREE_CODE_LENGTH (code))
10089 op0 = TREE_OPERAND (t, 0);
10090 tem = fold_unary (code, type, op0);
10091 return tem ? tem : expr;
10093 op0 = TREE_OPERAND (t, 0);
10094 op1 = TREE_OPERAND (t, 1);
10095 tem = fold_binary (code, type, op0, op1);
10096 return tem ? tem : expr;
10098 op0 = TREE_OPERAND (t, 0);
10099 op1 = TREE_OPERAND (t, 1);
10100 op2 = TREE_OPERAND (t, 2);
10101 tem = fold_ternary (code, type, op0, op1, op2);
10102 return tem ? tem : expr;
10111 return fold (DECL_INITIAL (t));
10115 } /* switch (code) */
10118 #ifdef ENABLE_FOLD_CHECKING
10121 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10122 static void fold_check_failed (tree, tree);
10123 void print_fold_checksum (tree);
10125 /* When --enable-checking=fold, compute a digest of expr before
10126 and after actual fold call to see if fold did not accidentally
10127 change original expr. */
10133 struct md5_ctx ctx;
10134 unsigned char checksum_before[16], checksum_after[16];
10137 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10138 md5_init_ctx (&ctx);
10139 fold_checksum_tree (expr, &ctx, ht);
10140 md5_finish_ctx (&ctx, checksum_before);
10143 ret = fold_1 (expr);
10145 md5_init_ctx (&ctx);
10146 fold_checksum_tree (expr, &ctx, ht);
10147 md5_finish_ctx (&ctx, checksum_after);
10150 if (memcmp (checksum_before, checksum_after, 16))
10151 fold_check_failed (expr, ret);
10157 print_fold_checksum (tree expr)
10159 struct md5_ctx ctx;
10160 unsigned char checksum[16], cnt;
10163 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10164 md5_init_ctx (&ctx);
10165 fold_checksum_tree (expr, &ctx, ht);
10166 md5_finish_ctx (&ctx, checksum);
10168 for (cnt = 0; cnt < 16; ++cnt)
10169 fprintf (stderr, "%02x", checksum[cnt]);
10170 putc ('\n', stderr);
10174 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10176 internal_error ("fold check: original tree changed by fold");
10180 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10183 enum tree_code code;
10184 char buf[sizeof (struct tree_decl)];
10189 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10190 <= sizeof (struct tree_decl))
10191 && sizeof (struct tree_type) <= sizeof (struct tree_decl));
10194 slot = htab_find_slot (ht, expr, INSERT);
10198 code = TREE_CODE (expr);
10199 if (TREE_CODE_CLASS (code) == tcc_declaration
10200 && DECL_ASSEMBLER_NAME_SET_P (expr))
10202 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10203 memcpy (buf, expr, tree_size (expr));
10205 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10207 else if (TREE_CODE_CLASS (code) == tcc_type
10208 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10209 || TYPE_CACHED_VALUES_P (expr)
10210 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10212 /* Allow these fields to be modified. */
10213 memcpy (buf, expr, tree_size (expr));
10215 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10216 TYPE_POINTER_TO (expr) = NULL;
10217 TYPE_REFERENCE_TO (expr) = NULL;
10218 if (TYPE_CACHED_VALUES_P (expr))
10220 TYPE_CACHED_VALUES_P (expr) = 0;
10221 TYPE_CACHED_VALUES (expr) = NULL;
10224 md5_process_bytes (expr, tree_size (expr), ctx);
10225 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10226 if (TREE_CODE_CLASS (code) != tcc_type
10227 && TREE_CODE_CLASS (code) != tcc_declaration
10228 && code != TREE_LIST)
10229 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10230 switch (TREE_CODE_CLASS (code))
10236 md5_process_bytes (TREE_STRING_POINTER (expr),
10237 TREE_STRING_LENGTH (expr), ctx);
10240 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10241 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10244 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10250 case tcc_exceptional:
10254 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10255 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10256 expr = TREE_CHAIN (expr);
10257 goto recursive_label;
10260 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10261 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10267 case tcc_expression:
10268 case tcc_reference:
10269 case tcc_comparison:
10272 case tcc_statement:
10273 len = TREE_CODE_LENGTH (code);
10274 for (i = 0; i < len; ++i)
10275 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10277 case tcc_declaration:
10278 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10279 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10280 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10281 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10282 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
10283 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10284 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10285 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10286 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10287 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10288 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10291 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10292 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10293 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10294 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10295 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10296 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10297 if (INTEGRAL_TYPE_P (expr)
10298 || SCALAR_FLOAT_TYPE_P (expr))
10300 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10301 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10303 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10304 if (TREE_CODE (expr) == RECORD_TYPE
10305 || TREE_CODE (expr) == UNION_TYPE
10306 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10307 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10308 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10317 /* Fold a unary tree expression with code CODE of type TYPE with an
10318 operand OP0. Return a folded expression if successful. Otherwise,
10319 return a tree expression with code CODE of type TYPE with an
10323 fold_build1 (enum tree_code code, tree type, tree op0)
10325 tree tem = fold_unary (code, type, op0);
10329 return build1 (code, type, op0);
10332 /* Fold a binary tree expression with code CODE of type TYPE with
10333 operands OP0 and OP1. Return a folded expression if successful.
10334 Otherwise, return a tree expression with code CODE of type TYPE
10335 with operands OP0 and OP1. */
10338 fold_build2 (enum tree_code code, tree type, tree op0, tree op1)
10340 tree tem = fold_binary (code, type, op0, op1);
10344 return build2 (code, type, op0, op1);
10347 /* Fold a ternary tree expression with code CODE of type TYPE with
10348 operands OP0, OP1, and OP2. Return a folded expression if
10349 successful. Otherwise, return a tree expression with code CODE of
10350 type TYPE with operands OP0, OP1, and OP2. */
10353 fold_build3 (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10355 tree tem = fold_ternary (code, type, op0, op1, op2);
10359 return build3 (code, type, op0, op1, op2);
10362 /* Perform constant folding and related simplification of initializer
10363 expression EXPR. This behaves identically to "fold" but ignores
10364 potential run-time traps and exceptions that fold must preserve. */
10367 fold_initializer (tree expr)
10369 int saved_signaling_nans = flag_signaling_nans;
10370 int saved_trapping_math = flag_trapping_math;
10371 int saved_rounding_math = flag_rounding_math;
10372 int saved_trapv = flag_trapv;
10375 flag_signaling_nans = 0;
10376 flag_trapping_math = 0;
10377 flag_rounding_math = 0;
10380 result = fold (expr);
10382 flag_signaling_nans = saved_signaling_nans;
10383 flag_trapping_math = saved_trapping_math;
10384 flag_rounding_math = saved_rounding_math;
10385 flag_trapv = saved_trapv;
10390 /* Determine if first argument is a multiple of second argument. Return 0 if
10391 it is not, or we cannot easily determined it to be.
10393 An example of the sort of thing we care about (at this point; this routine
10394 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10395 fold cases do now) is discovering that
10397 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10403 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10405 This code also handles discovering that
10407 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10409 is a multiple of 8 so we don't have to worry about dealing with a
10410 possible remainder.
10412 Note that we *look* inside a SAVE_EXPR only to determine how it was
10413 calculated; it is not safe for fold to do much of anything else with the
10414 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10415 at run time. For example, the latter example above *cannot* be implemented
10416 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10417 evaluation time of the original SAVE_EXPR is not necessarily the same at
10418 the time the new expression is evaluated. The only optimization of this
10419 sort that would be valid is changing
10421 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10425 SAVE_EXPR (I) * SAVE_EXPR (J)
10427 (where the same SAVE_EXPR (J) is used in the original and the
10428 transformed version). */
10431 multiple_of_p (tree type, tree top, tree bottom)
10433 if (operand_equal_p (top, bottom, 0))
10436 if (TREE_CODE (type) != INTEGER_TYPE)
10439 switch (TREE_CODE (top))
10442 /* Bitwise and provides a power of two multiple. If the mask is
10443 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10444 if (!integer_pow2p (bottom))
10449 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10450 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10454 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10455 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10458 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10462 op1 = TREE_OPERAND (top, 1);
10463 /* const_binop may not detect overflow correctly,
10464 so check for it explicitly here. */
10465 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10466 > TREE_INT_CST_LOW (op1)
10467 && TREE_INT_CST_HIGH (op1) == 0
10468 && 0 != (t1 = fold_convert (type,
10469 const_binop (LSHIFT_EXPR,
10472 && ! TREE_OVERFLOW (t1))
10473 return multiple_of_p (type, t1, bottom);
10478 /* Can't handle conversions from non-integral or wider integral type. */
10479 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10480 || (TYPE_PRECISION (type)
10481 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10484 /* .. fall through ... */
10487 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10490 if (TREE_CODE (bottom) != INTEGER_CST
10491 || (TYPE_UNSIGNED (type)
10492 && (tree_int_cst_sgn (top) < 0
10493 || tree_int_cst_sgn (bottom) < 0)))
10495 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10503 /* Return true if `t' is known to be non-negative. */
10506 tree_expr_nonnegative_p (tree t)
10508 switch (TREE_CODE (t))
10514 return tree_int_cst_sgn (t) >= 0;
10517 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10520 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10521 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10522 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10524 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10525 both unsigned and at least 2 bits shorter than the result. */
10526 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10527 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10528 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10530 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10531 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10532 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10533 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10535 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10536 TYPE_PRECISION (inner2)) + 1;
10537 return prec < TYPE_PRECISION (TREE_TYPE (t));
10543 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10545 /* x * x for floating point x is always non-negative. */
10546 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10548 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10549 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10552 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10553 both unsigned and their total bits is shorter than the result. */
10554 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10555 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10556 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10558 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10559 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10560 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10561 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10562 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10563 < TYPE_PRECISION (TREE_TYPE (t));
10567 case TRUNC_DIV_EXPR:
10568 case CEIL_DIV_EXPR:
10569 case FLOOR_DIV_EXPR:
10570 case ROUND_DIV_EXPR:
10571 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10572 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10574 case TRUNC_MOD_EXPR:
10575 case CEIL_MOD_EXPR:
10576 case FLOOR_MOD_EXPR:
10577 case ROUND_MOD_EXPR:
10578 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10581 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10582 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10585 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10586 || tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10589 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10590 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10594 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10595 tree outer_type = TREE_TYPE (t);
10597 if (TREE_CODE (outer_type) == REAL_TYPE)
10599 if (TREE_CODE (inner_type) == REAL_TYPE)
10600 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10601 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10603 if (TYPE_UNSIGNED (inner_type))
10605 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10608 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10610 if (TREE_CODE (inner_type) == REAL_TYPE)
10611 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10612 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10613 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10614 && TYPE_UNSIGNED (inner_type);
10620 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10621 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10622 case COMPOUND_EXPR:
10623 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10625 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10626 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10628 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10629 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10631 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10633 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10635 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10636 case NON_LVALUE_EXPR:
10637 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10639 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10643 tree temp = TARGET_EXPR_SLOT (t);
10644 t = TARGET_EXPR_INITIAL (t);
10646 /* If the initializer is non-void, then it's a normal expression
10647 that will be assigned to the slot. */
10648 if (!VOID_TYPE_P (t))
10649 return tree_expr_nonnegative_p (t);
10651 /* Otherwise, the initializer sets the slot in some way. One common
10652 way is an assignment statement at the end of the initializer. */
10655 if (TREE_CODE (t) == BIND_EXPR)
10656 t = expr_last (BIND_EXPR_BODY (t));
10657 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10658 || TREE_CODE (t) == TRY_CATCH_EXPR)
10659 t = expr_last (TREE_OPERAND (t, 0));
10660 else if (TREE_CODE (t) == STATEMENT_LIST)
10665 if (TREE_CODE (t) == MODIFY_EXPR
10666 && TREE_OPERAND (t, 0) == temp)
10667 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10674 tree fndecl = get_callee_fndecl (t);
10675 tree arglist = TREE_OPERAND (t, 1);
10676 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10677 switch (DECL_FUNCTION_CODE (fndecl))
10679 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10680 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10681 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10682 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10684 CASE_BUILTIN_F (BUILT_IN_ACOS)
10685 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10686 CASE_BUILTIN_F (BUILT_IN_CABS)
10687 CASE_BUILTIN_F (BUILT_IN_COSH)
10688 CASE_BUILTIN_F (BUILT_IN_ERFC)
10689 CASE_BUILTIN_F (BUILT_IN_EXP)
10690 CASE_BUILTIN_F (BUILT_IN_EXP10)
10691 CASE_BUILTIN_F (BUILT_IN_EXP2)
10692 CASE_BUILTIN_F (BUILT_IN_FABS)
10693 CASE_BUILTIN_F (BUILT_IN_FDIM)
10694 CASE_BUILTIN_F (BUILT_IN_FREXP)
10695 CASE_BUILTIN_F (BUILT_IN_HYPOT)
10696 CASE_BUILTIN_F (BUILT_IN_POW10)
10697 CASE_BUILTIN_I (BUILT_IN_FFS)
10698 CASE_BUILTIN_I (BUILT_IN_PARITY)
10699 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
10703 CASE_BUILTIN_F (BUILT_IN_SQRT)
10704 /* sqrt(-0.0) is -0.0. */
10705 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
10707 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10709 CASE_BUILTIN_F (BUILT_IN_ASINH)
10710 CASE_BUILTIN_F (BUILT_IN_ATAN)
10711 CASE_BUILTIN_F (BUILT_IN_ATANH)
10712 CASE_BUILTIN_F (BUILT_IN_CBRT)
10713 CASE_BUILTIN_F (BUILT_IN_CEIL)
10714 CASE_BUILTIN_F (BUILT_IN_ERF)
10715 CASE_BUILTIN_F (BUILT_IN_EXPM1)
10716 CASE_BUILTIN_F (BUILT_IN_FLOOR)
10717 CASE_BUILTIN_F (BUILT_IN_FMOD)
10718 CASE_BUILTIN_F (BUILT_IN_LCEIL)
10719 CASE_BUILTIN_F (BUILT_IN_LDEXP)
10720 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
10721 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
10722 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
10723 CASE_BUILTIN_F (BUILT_IN_LLRINT)
10724 CASE_BUILTIN_F (BUILT_IN_LLROUND)
10725 CASE_BUILTIN_F (BUILT_IN_LRINT)
10726 CASE_BUILTIN_F (BUILT_IN_LROUND)
10727 CASE_BUILTIN_F (BUILT_IN_MODF)
10728 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
10729 CASE_BUILTIN_F (BUILT_IN_POW)
10730 CASE_BUILTIN_F (BUILT_IN_RINT)
10731 CASE_BUILTIN_F (BUILT_IN_ROUND)
10732 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
10733 CASE_BUILTIN_F (BUILT_IN_SINH)
10734 CASE_BUILTIN_F (BUILT_IN_TANH)
10735 CASE_BUILTIN_F (BUILT_IN_TRUNC)
10736 /* True if the 1st argument is nonnegative. */
10737 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
10739 CASE_BUILTIN_F (BUILT_IN_FMAX)
10740 /* True if the 1st OR 2nd arguments are nonnegative. */
10741 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10742 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10744 CASE_BUILTIN_F (BUILT_IN_FMIN)
10745 /* True if the 1st AND 2nd arguments are nonnegative. */
10746 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
10747 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10749 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
10750 /* True if the 2nd argument is nonnegative. */
10751 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
10755 #undef CASE_BUILTIN_F
10756 #undef CASE_BUILTIN_I
10760 /* ... fall through ... */
10763 if (truth_value_p (TREE_CODE (t)))
10764 /* Truth values evaluate to 0 or 1, which is nonnegative. */
10768 /* We don't know sign of `t', so be conservative and return false. */
10772 /* Return true when T is an address and is known to be nonzero.
10773 For floating point we further ensure that T is not denormal.
10774 Similar logic is present in nonzero_address in rtlanal.h. */
10777 tree_expr_nonzero_p (tree t)
10779 tree type = TREE_TYPE (t);
10781 /* Doing something useful for floating point would need more work. */
10782 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
10785 switch (TREE_CODE (t))
10788 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10789 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10792 /* We used to test for !integer_zerop here. This does not work correctly
10793 if TREE_CONSTANT_OVERFLOW (t). */
10794 return (TREE_INT_CST_LOW (t) != 0
10795 || TREE_INT_CST_HIGH (t) != 0);
10798 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10800 /* With the presence of negative values it is hard
10801 to say something. */
10802 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10803 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10805 /* One of operands must be positive and the other non-negative. */
10806 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10807 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10812 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
10814 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10815 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10821 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10822 tree outer_type = TREE_TYPE (t);
10824 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
10825 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
10831 tree base = get_base_address (TREE_OPERAND (t, 0));
10836 /* Weak declarations may link to NULL. */
10838 return !DECL_WEAK (base);
10840 /* Constants are never weak. */
10841 if (CONSTANT_CLASS_P (base))
10848 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10849 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
10852 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
10853 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
10856 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
10858 /* When both operands are nonzero, then MAX must be too. */
10859 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
10862 /* MAX where operand 0 is positive is positive. */
10863 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10865 /* MAX where operand 1 is positive is positive. */
10866 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10867 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
10871 case COMPOUND_EXPR:
10874 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
10877 case NON_LVALUE_EXPR:
10878 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10881 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
10882 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
10890 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
10891 attempt to fold the expression to a constant without modifying TYPE,
10894 If the expression could be simplified to a constant, then return
10895 the constant. If the expression would not be simplified to a
10896 constant, then return NULL_TREE. */
10899 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
10901 tree tem = fold_binary (code, type, op0, op1);
10902 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
10905 /* Given the components of a unary expression CODE, TYPE and OP0,
10906 attempt to fold the expression to a constant without modifying
10909 If the expression could be simplified to a constant, then return
10910 the constant. If the expression would not be simplified to a
10911 constant, then return NULL_TREE. */
10914 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
10916 tree tem = fold_unary (code, type, op0);
10917 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
10920 /* If EXP represents referencing an element in a constant string
10921 (either via pointer arithmetic or array indexing), return the
10922 tree representing the value accessed, otherwise return NULL. */
10925 fold_read_from_constant_string (tree exp)
10927 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
10929 tree exp1 = TREE_OPERAND (exp, 0);
10933 if (TREE_CODE (exp) == INDIRECT_REF)
10934 string = string_constant (exp1, &index);
10937 tree low_bound = array_ref_low_bound (exp);
10938 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
10940 /* Optimize the special-case of a zero lower bound.
10942 We convert the low_bound to sizetype to avoid some problems
10943 with constant folding. (E.g. suppose the lower bound is 1,
10944 and its mode is QI. Without the conversion,l (ARRAY
10945 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
10946 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
10947 if (! integer_zerop (low_bound))
10948 index = size_diffop (index, fold_convert (sizetype, low_bound));
10954 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
10955 && TREE_CODE (string) == STRING_CST
10956 && TREE_CODE (index) == INTEGER_CST
10957 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
10958 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
10960 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
10961 return fold_convert (TREE_TYPE (exp),
10962 build_int_cst (NULL_TREE,
10963 (TREE_STRING_POINTER (string)
10964 [TREE_INT_CST_LOW (index)])));
10969 /* Return the tree for neg (ARG0) when ARG0 is known to be either
10970 an integer constant or real constant.
10972 TYPE is the type of the result. */
10975 fold_negate_const (tree arg0, tree type)
10977 tree t = NULL_TREE;
10979 switch (TREE_CODE (arg0))
10983 unsigned HOST_WIDE_INT low;
10984 HOST_WIDE_INT high;
10985 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
10986 TREE_INT_CST_HIGH (arg0),
10988 t = build_int_cst_wide (type, low, high);
10989 t = force_fit_type (t, 1,
10990 (overflow | TREE_OVERFLOW (arg0))
10991 && !TYPE_UNSIGNED (type),
10992 TREE_CONSTANT_OVERFLOW (arg0));
10997 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11001 gcc_unreachable ();
11007 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11008 an integer constant or real constant.
11010 TYPE is the type of the result. */
11013 fold_abs_const (tree arg0, tree type)
11015 tree t = NULL_TREE;
11017 switch (TREE_CODE (arg0))
11020 /* If the value is unsigned, then the absolute value is
11021 the same as the ordinary value. */
11022 if (TYPE_UNSIGNED (type))
11024 /* Similarly, if the value is non-negative. */
11025 else if (INT_CST_LT (integer_minus_one_node, arg0))
11027 /* If the value is negative, then the absolute value is
11031 unsigned HOST_WIDE_INT low;
11032 HOST_WIDE_INT high;
11033 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11034 TREE_INT_CST_HIGH (arg0),
11036 t = build_int_cst_wide (type, low, high);
11037 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11038 TREE_CONSTANT_OVERFLOW (arg0));
11043 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11044 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11050 gcc_unreachable ();
11056 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11057 constant. TYPE is the type of the result. */
11060 fold_not_const (tree arg0, tree type)
11062 tree t = NULL_TREE;
11064 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11066 t = build_int_cst_wide (type,
11067 ~ TREE_INT_CST_LOW (arg0),
11068 ~ TREE_INT_CST_HIGH (arg0));
11069 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11070 TREE_CONSTANT_OVERFLOW (arg0));
11075 /* Given CODE, a relational operator, the target type, TYPE and two
11076 constant operands OP0 and OP1, return the result of the
11077 relational operation. If the result is not a compile time
11078 constant, then return NULL_TREE. */
11081 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11083 int result, invert;
11085 /* From here on, the only cases we handle are when the result is
11086 known to be a constant. */
11088 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11090 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11091 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11093 /* Handle the cases where either operand is a NaN. */
11094 if (real_isnan (c0) || real_isnan (c1))
11104 case UNORDERED_EXPR:
11118 if (flag_trapping_math)
11124 gcc_unreachable ();
11127 return constant_boolean_node (result, type);
11130 return constant_boolean_node (real_compare (code, c0, c1), type);
11133 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11135 To compute GT, swap the arguments and do LT.
11136 To compute GE, do LT and invert the result.
11137 To compute LE, swap the arguments, do LT and invert the result.
11138 To compute NE, do EQ and invert the result.
11140 Therefore, the code below must handle only EQ and LT. */
11142 if (code == LE_EXPR || code == GT_EXPR)
11147 code = swap_tree_comparison (code);
11150 /* Note that it is safe to invert for real values here because we
11151 have already handled the one case that it matters. */
11154 if (code == NE_EXPR || code == GE_EXPR)
11157 code = invert_tree_comparison (code, false);
11160 /* Compute a result for LT or EQ if args permit;
11161 Otherwise return T. */
11162 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11164 if (code == EQ_EXPR)
11165 result = tree_int_cst_equal (op0, op1);
11166 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11167 result = INT_CST_LT_UNSIGNED (op0, op1);
11169 result = INT_CST_LT (op0, op1);
11176 return constant_boolean_node (result, type);
11179 /* Build an expression for the a clean point containing EXPR with type TYPE.
11180 Don't build a cleanup point expression for EXPR which don't have side
11184 fold_build_cleanup_point_expr (tree type, tree expr)
11186 /* If the expression does not have side effects then we don't have to wrap
11187 it with a cleanup point expression. */
11188 if (!TREE_SIDE_EFFECTS (expr))
11191 /* If the expression is a return, check to see if the expression inside the
11192 return has no side effects or the right hand side of the modify expression
11193 inside the return. If either don't have side effects set we don't need to
11194 wrap the expression in a cleanup point expression. Note we don't check the
11195 left hand side of the modify because it should always be a return decl. */
11196 if (TREE_CODE (expr) == RETURN_EXPR)
11198 tree op = TREE_OPERAND (expr, 0);
11199 if (!op || !TREE_SIDE_EFFECTS (op))
11201 op = TREE_OPERAND (op, 1);
11202 if (!TREE_SIDE_EFFECTS (op))
11206 return build1 (CLEANUP_POINT_EXPR, type, expr);
11209 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11210 avoid confusing the gimplify process. */
11213 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11215 /* The size of the object is not relevant when talking about its address. */
11216 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11217 t = TREE_OPERAND (t, 0);
11219 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11220 if (TREE_CODE (t) == INDIRECT_REF
11221 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11223 t = TREE_OPERAND (t, 0);
11224 if (TREE_TYPE (t) != ptrtype)
11225 t = build1 (NOP_EXPR, ptrtype, t);
11231 while (handled_component_p (base))
11232 base = TREE_OPERAND (base, 0);
11234 TREE_ADDRESSABLE (base) = 1;
11236 t = build1 (ADDR_EXPR, ptrtype, t);
11243 build_fold_addr_expr (tree t)
11245 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11248 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11249 of an indirection through OP0, or NULL_TREE if no simplification is
11253 fold_indirect_ref_1 (tree type, tree op0)
11259 subtype = TREE_TYPE (sub);
11260 if (!POINTER_TYPE_P (subtype))
11263 if (TREE_CODE (sub) == ADDR_EXPR)
11265 tree op = TREE_OPERAND (sub, 0);
11266 tree optype = TREE_TYPE (op);
11268 if (type == optype)
11270 /* *(foo *)&fooarray => fooarray[0] */
11271 else if (TREE_CODE (optype) == ARRAY_TYPE
11272 && type == TREE_TYPE (optype))
11274 tree type_domain = TYPE_DOMAIN (optype);
11275 tree min_val = size_zero_node;
11276 if (type_domain && TYPE_MIN_VALUE (type_domain))
11277 min_val = TYPE_MIN_VALUE (type_domain);
11278 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11282 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11283 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11284 && type == TREE_TYPE (TREE_TYPE (subtype)))
11287 tree min_val = size_zero_node;
11288 sub = build_fold_indirect_ref (sub);
11289 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11290 if (type_domain && TYPE_MIN_VALUE (type_domain))
11291 min_val = TYPE_MIN_VALUE (type_domain);
11292 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11298 /* Builds an expression for an indirection through T, simplifying some
11302 build_fold_indirect_ref (tree t)
11304 tree type = TREE_TYPE (TREE_TYPE (t));
11305 tree sub = fold_indirect_ref_1 (type, t);
11310 return build1 (INDIRECT_REF, type, t);
11313 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11316 fold_indirect_ref (tree t)
11318 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11326 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11327 whose result is ignored. The type of the returned tree need not be
11328 the same as the original expression. */
11331 fold_ignored_result (tree t)
11333 if (!TREE_SIDE_EFFECTS (t))
11334 return integer_zero_node;
11337 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11340 t = TREE_OPERAND (t, 0);
11344 case tcc_comparison:
11345 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11346 t = TREE_OPERAND (t, 0);
11347 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11348 t = TREE_OPERAND (t, 1);
11353 case tcc_expression:
11354 switch (TREE_CODE (t))
11356 case COMPOUND_EXPR:
11357 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11359 t = TREE_OPERAND (t, 0);
11363 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11364 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11366 t = TREE_OPERAND (t, 0);
11379 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11380 This can only be applied to objects of a sizetype. */
11383 round_up (tree value, int divisor)
11385 tree div = NULL_TREE;
11387 gcc_assert (divisor > 0);
11391 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11392 have to do anything. Only do this when we are not given a const,
11393 because in that case, this check is more expensive than just
11395 if (TREE_CODE (value) != INTEGER_CST)
11397 div = build_int_cst (TREE_TYPE (value), divisor);
11399 if (multiple_of_p (TREE_TYPE (value), value, div))
11403 /* If divisor is a power of two, simplify this to bit manipulation. */
11404 if (divisor == (divisor & -divisor))
11408 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11409 value = size_binop (PLUS_EXPR, value, t);
11410 t = build_int_cst (TREE_TYPE (value), -divisor);
11411 value = size_binop (BIT_AND_EXPR, value, t);
11416 div = build_int_cst (TREE_TYPE (value), divisor);
11417 value = size_binop (CEIL_DIV_EXPR, value, div);
11418 value = size_binop (MULT_EXPR, value, div);
11424 /* Likewise, but round down. */
11427 round_down (tree value, int divisor)
11429 tree div = NULL_TREE;
11431 gcc_assert (divisor > 0);
11435 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11436 have to do anything. Only do this when we are not given a const,
11437 because in that case, this check is more expensive than just
11439 if (TREE_CODE (value) != INTEGER_CST)
11441 div = build_int_cst (TREE_TYPE (value), divisor);
11443 if (multiple_of_p (TREE_TYPE (value), value, div))
11447 /* If divisor is a power of two, simplify this to bit manipulation. */
11448 if (divisor == (divisor & -divisor))
11452 t = build_int_cst (TREE_TYPE (value), -divisor);
11453 value = size_binop (BIT_AND_EXPR, value, t);
11458 div = build_int_cst (TREE_TYPE (value), divisor);
11459 value = size_binop (FLOOR_DIV_EXPR, value, div);
11460 value = size_binop (MULT_EXPR, value, div);
11466 /* Returns the pointer to the base of the object addressed by EXP and
11467 extracts the information about the offset of the access, storing it
11468 to PBITPOS and POFFSET. */
11471 split_address_to_core_and_offset (tree exp,
11472 HOST_WIDE_INT *pbitpos, tree *poffset)
11475 enum machine_mode mode;
11476 int unsignedp, volatilep;
11477 HOST_WIDE_INT bitsize;
11479 if (TREE_CODE (exp) == ADDR_EXPR)
11481 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11482 poffset, &mode, &unsignedp, &volatilep,
11484 core = build_fold_addr_expr (core);
11490 *poffset = NULL_TREE;
11496 /* Returns true if addresses of E1 and E2 differ by a constant, false
11497 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11500 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11503 HOST_WIDE_INT bitpos1, bitpos2;
11504 tree toffset1, toffset2, tdiff, type;
11506 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11507 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11509 if (bitpos1 % BITS_PER_UNIT != 0
11510 || bitpos2 % BITS_PER_UNIT != 0
11511 || !operand_equal_p (core1, core2, 0))
11514 if (toffset1 && toffset2)
11516 type = TREE_TYPE (toffset1);
11517 if (type != TREE_TYPE (toffset2))
11518 toffset2 = fold_convert (type, toffset2);
11520 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11521 if (!host_integerp (tdiff, 0))
11524 *diff = tree_low_cst (tdiff, 0);
11526 else if (toffset1 || toffset2)
11528 /* If only one of the offsets is non-constant, the difference cannot
11535 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11539 /* Simplify the floating point expression EXP when the sign of the
11540 result is not significant. Return NULL_TREE if no simplification
11544 fold_strip_sign_ops (tree exp)
11548 switch (TREE_CODE (exp))
11552 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11553 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11557 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11559 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11560 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11561 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11562 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11563 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11564 arg1 ? arg1 : TREE_OPERAND (exp, 1));