1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
284 int sign_extended_type;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
326 unsigned HOST_WIDE_INT l;
330 h = h1 + h2 + (l < l1);
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 return (*hv & h1) < 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
392 for (j = 0; j < 4; j++)
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 if (SHIFT_COUNT_TRUNCATED)
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
501 unsigned HOST_WIDE_INT signmask;
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
507 if (SHIFT_COUNT_TRUNCATED)
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
517 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
651 /* This unsigned division rounds toward zero. */
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
704 { /* scale divisor and dividend */
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
783 decode (quo, lquo, hquo);
786 /* If result is negative, make it so. */
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)) */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, <wice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den < ltwice)))
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
954 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = code;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
976 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
979 /* Use the smallest code level when deciding to issue the
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
984 if (!issue_strict_overflow_warning (code))
987 if (stmt == NULL_TREE || !expr_has_location (stmt))
988 locus = input_location;
990 locus = expr_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
994 /* Stop deferring overflow warnings, ignoring any deferred
998 fold_undefer_and_ignore_overflow_warnings (void)
1000 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1003 /* Whether we are deferring overflow warnings. */
1006 fold_deferring_overflow_warnings_p (void)
1008 return fold_deferring_overflow_warnings > 0;
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1017 gcc_assert (!flag_wrapv && !flag_trapv);
1018 if (fold_deferring_overflow_warnings > 0)
1020 if (fold_deferred_overflow_warning == NULL
1021 || wc < fold_deferred_overflow_code)
1023 fold_deferred_overflow_warning = gmsgid;
1024 fold_deferred_overflow_code = wc;
1027 else if (issue_strict_overflow_warning (wc))
1028 warning (OPT_Wstrict_overflow, gmsgid);
1031 /* Return true if the built-in mathematical function specified by CODE
1032 is odd, i.e. -f(x) == f(-x). */
1035 negate_mathfn_p (enum built_in_function code)
1039 CASE_FLT_FN (BUILT_IN_ASIN):
1040 CASE_FLT_FN (BUILT_IN_ASINH):
1041 CASE_FLT_FN (BUILT_IN_ATAN):
1042 CASE_FLT_FN (BUILT_IN_ATANH):
1043 CASE_FLT_FN (BUILT_IN_CASIN):
1044 CASE_FLT_FN (BUILT_IN_CASINH):
1045 CASE_FLT_FN (BUILT_IN_CATAN):
1046 CASE_FLT_FN (BUILT_IN_CATANH):
1047 CASE_FLT_FN (BUILT_IN_CBRT):
1048 CASE_FLT_FN (BUILT_IN_CPROJ):
1049 CASE_FLT_FN (BUILT_IN_CSIN):
1050 CASE_FLT_FN (BUILT_IN_CSINH):
1051 CASE_FLT_FN (BUILT_IN_CTAN):
1052 CASE_FLT_FN (BUILT_IN_CTANH):
1053 CASE_FLT_FN (BUILT_IN_ERF):
1054 CASE_FLT_FN (BUILT_IN_LLROUND):
1055 CASE_FLT_FN (BUILT_IN_LROUND):
1056 CASE_FLT_FN (BUILT_IN_ROUND):
1057 CASE_FLT_FN (BUILT_IN_SIN):
1058 CASE_FLT_FN (BUILT_IN_SINH):
1059 CASE_FLT_FN (BUILT_IN_TAN):
1060 CASE_FLT_FN (BUILT_IN_TANH):
1061 CASE_FLT_FN (BUILT_IN_TRUNC):
1064 CASE_FLT_FN (BUILT_IN_LLRINT):
1065 CASE_FLT_FN (BUILT_IN_LRINT):
1066 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1067 CASE_FLT_FN (BUILT_IN_RINT):
1068 return !flag_rounding_math;
1076 /* Check whether we may negate an integer constant T without causing
1080 may_negate_without_overflow_p (const_tree t)
1082 unsigned HOST_WIDE_INT val;
1086 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1088 type = TREE_TYPE (t);
1089 if (TYPE_UNSIGNED (type))
1092 prec = TYPE_PRECISION (type);
1093 if (prec > HOST_BITS_PER_WIDE_INT)
1095 if (TREE_INT_CST_LOW (t) != 0)
1097 prec -= HOST_BITS_PER_WIDE_INT;
1098 val = TREE_INT_CST_HIGH (t);
1101 val = TREE_INT_CST_LOW (t);
1102 if (prec < HOST_BITS_PER_WIDE_INT)
1103 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1104 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1107 /* Determine whether an expression T can be cheaply negated using
1108 the function negate_expr without introducing undefined overflow. */
1111 negate_expr_p (tree t)
1118 type = TREE_TYPE (t);
1120 STRIP_SIGN_NOPS (t);
1121 switch (TREE_CODE (t))
1124 if (TYPE_OVERFLOW_WRAPS (type))
1127 /* Check that -CST will not overflow type. */
1128 return may_negate_without_overflow_p (t);
1130 return (INTEGRAL_TYPE_P (type)
1131 && TYPE_OVERFLOW_WRAPS (type));
1139 return negate_expr_p (TREE_REALPART (t))
1140 && negate_expr_p (TREE_IMAGPART (t));
1143 return negate_expr_p (TREE_OPERAND (t, 0))
1144 && negate_expr_p (TREE_OPERAND (t, 1));
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1150 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1151 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1153 /* -(A + B) -> (-B) - A. */
1154 if (negate_expr_p (TREE_OPERAND (t, 1))
1155 && reorder_operands_p (TREE_OPERAND (t, 0),
1156 TREE_OPERAND (t, 1)))
1158 /* -(A + B) -> (-A) - B. */
1159 return negate_expr_p (TREE_OPERAND (t, 0));
1162 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1163 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1164 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1165 && reorder_operands_p (TREE_OPERAND (t, 0),
1166 TREE_OPERAND (t, 1));
1169 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1175 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1176 return negate_expr_p (TREE_OPERAND (t, 1))
1177 || negate_expr_p (TREE_OPERAND (t, 0));
1180 case TRUNC_DIV_EXPR:
1181 case ROUND_DIV_EXPR:
1182 case FLOOR_DIV_EXPR:
1184 case EXACT_DIV_EXPR:
1185 /* In general we can't negate A / B, because if A is INT_MIN and
1186 B is 1, we may turn this into INT_MIN / -1 which is undefined
1187 and actually traps on some architectures. But if overflow is
1188 undefined, we can negate, because - (INT_MIN / 1) is an
1190 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1191 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1193 return negate_expr_p (TREE_OPERAND (t, 1))
1194 || negate_expr_p (TREE_OPERAND (t, 0));
1197 /* Negate -((double)float) as (double)(-float). */
1198 if (TREE_CODE (type) == REAL_TYPE)
1200 tree tem = strip_float_extensions (t);
1202 return negate_expr_p (tem);
1207 /* Negate -f(x) as f(-x). */
1208 if (negate_mathfn_p (builtin_mathfn_code (t)))
1209 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1213 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1214 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1216 tree op1 = TREE_OPERAND (t, 1);
1217 if (TREE_INT_CST_HIGH (op1) == 0
1218 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1219 == TREE_INT_CST_LOW (op1))
1230 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1231 simplification is possible.
1232 If negate_expr_p would return true for T, NULL_TREE will never be
1236 fold_negate_expr (tree t)
1238 tree type = TREE_TYPE (t);
1241 switch (TREE_CODE (t))
1243 /* Convert - (~A) to A + 1. */
1245 if (INTEGRAL_TYPE_P (type))
1246 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1247 build_int_cst (type, 1));
1251 tem = fold_negate_const (t, type);
1252 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1253 || !TYPE_OVERFLOW_TRAPS (type))
1258 tem = fold_negate_const (t, type);
1259 /* Two's complement FP formats, such as c4x, may overflow. */
1260 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1265 tem = fold_negate_const (t, type);
1270 tree rpart = negate_expr (TREE_REALPART (t));
1271 tree ipart = negate_expr (TREE_IMAGPART (t));
1273 if ((TREE_CODE (rpart) == REAL_CST
1274 && TREE_CODE (ipart) == REAL_CST)
1275 || (TREE_CODE (rpart) == INTEGER_CST
1276 && TREE_CODE (ipart) == INTEGER_CST))
1277 return build_complex (type, rpart, ipart);
1282 if (negate_expr_p (t))
1283 return fold_build2 (COMPLEX_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)),
1285 fold_negate_expr (TREE_OPERAND (t, 1)));
1289 if (negate_expr_p (t))
1290 return fold_build1 (CONJ_EXPR, type,
1291 fold_negate_expr (TREE_OPERAND (t, 0)));
1295 return TREE_OPERAND (t, 0);
1298 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1299 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1301 /* -(A + B) -> (-B) - A. */
1302 if (negate_expr_p (TREE_OPERAND (t, 1))
1303 && reorder_operands_p (TREE_OPERAND (t, 0),
1304 TREE_OPERAND (t, 1)))
1306 tem = negate_expr (TREE_OPERAND (t, 1));
1307 return fold_build2 (MINUS_EXPR, type,
1308 tem, TREE_OPERAND (t, 0));
1311 /* -(A + B) -> (-A) - B. */
1312 if (negate_expr_p (TREE_OPERAND (t, 0)))
1314 tem = negate_expr (TREE_OPERAND (t, 0));
1315 return fold_build2 (MINUS_EXPR, type,
1316 tem, TREE_OPERAND (t, 1));
1322 /* - (A - B) -> B - A */
1323 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1324 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1325 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1326 return fold_build2 (MINUS_EXPR, type,
1327 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1331 if (TYPE_UNSIGNED (type))
1337 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1339 tem = TREE_OPERAND (t, 1);
1340 if (negate_expr_p (tem))
1341 return fold_build2 (TREE_CODE (t), type,
1342 TREE_OPERAND (t, 0), negate_expr (tem));
1343 tem = TREE_OPERAND (t, 0);
1344 if (negate_expr_p (tem))
1345 return fold_build2 (TREE_CODE (t), type,
1346 negate_expr (tem), TREE_OPERAND (t, 1));
1350 case TRUNC_DIV_EXPR:
1351 case ROUND_DIV_EXPR:
1352 case FLOOR_DIV_EXPR:
1354 case EXACT_DIV_EXPR:
1355 /* In general we can't negate A / B, because if A is INT_MIN and
1356 B is 1, we may turn this into INT_MIN / -1 which is undefined
1357 and actually traps on some architectures. But if overflow is
1358 undefined, we can negate, because - (INT_MIN / 1) is an
1360 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1362 const char * const warnmsg = G_("assuming signed overflow does not "
1363 "occur when negating a division");
1364 tem = TREE_OPERAND (t, 1);
1365 if (negate_expr_p (tem))
1367 if (INTEGRAL_TYPE_P (type)
1368 && (TREE_CODE (tem) != INTEGER_CST
1369 || integer_onep (tem)))
1370 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1371 return fold_build2 (TREE_CODE (t), type,
1372 TREE_OPERAND (t, 0), negate_expr (tem));
1374 tem = TREE_OPERAND (t, 0);
1375 if (negate_expr_p (tem))
1377 if (INTEGRAL_TYPE_P (type)
1378 && (TREE_CODE (tem) != INTEGER_CST
1379 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1380 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1381 return fold_build2 (TREE_CODE (t), type,
1382 negate_expr (tem), TREE_OPERAND (t, 1));
1388 /* Convert -((double)float) into (double)(-float). */
1389 if (TREE_CODE (type) == REAL_TYPE)
1391 tem = strip_float_extensions (t);
1392 if (tem != t && negate_expr_p (tem))
1393 return fold_convert (type, negate_expr (tem));
1398 /* Negate -f(x) as f(-x). */
1399 if (negate_mathfn_p (builtin_mathfn_code (t))
1400 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1404 fndecl = get_callee_fndecl (t);
1405 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1406 return build_call_expr (fndecl, 1, arg);
1411 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1412 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1414 tree op1 = TREE_OPERAND (t, 1);
1415 if (TREE_INT_CST_HIGH (op1) == 0
1416 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1417 == TREE_INT_CST_LOW (op1))
1419 tree ntype = TYPE_UNSIGNED (type)
1420 ? signed_type_for (type)
1421 : unsigned_type_for (type);
1422 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1423 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1424 return fold_convert (type, temp);
1436 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1437 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1438 return NULL_TREE. */
1441 negate_expr (tree t)
1448 type = TREE_TYPE (t);
1449 STRIP_SIGN_NOPS (t);
1451 tem = fold_negate_expr (t);
1453 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1454 return fold_convert (type, tem);
1457 /* Split a tree IN into a constant, literal and variable parts that could be
1458 combined with CODE to make IN. "constant" means an expression with
1459 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1460 commutative arithmetic operation. Store the constant part into *CONP,
1461 the literal in *LITP and return the variable part. If a part isn't
1462 present, set it to null. If the tree does not decompose in this way,
1463 return the entire tree as the variable part and the other parts as null.
1465 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1466 case, we negate an operand that was subtracted. Except if it is a
1467 literal for which we use *MINUS_LITP instead.
1469 If NEGATE_P is true, we are negating all of IN, again except a literal
1470 for which we use *MINUS_LITP instead.
1472 If IN is itself a literal or constant, return it as appropriate.
1474 Note that we do not guarantee that any of the three values will be the
1475 same type as IN, but they will have the same signedness and mode. */
1478 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1479 tree *minus_litp, int negate_p)
1487 /* Strip any conversions that don't change the machine mode or signedness. */
1488 STRIP_SIGN_NOPS (in);
1490 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1491 || TREE_CODE (in) == FIXED_CST)
1493 else if (TREE_CODE (in) == code
1494 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1495 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1496 /* We can associate addition and subtraction together (even
1497 though the C standard doesn't say so) for integers because
1498 the value is not affected. For reals, the value might be
1499 affected, so we can't. */
1500 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1501 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1503 tree op0 = TREE_OPERAND (in, 0);
1504 tree op1 = TREE_OPERAND (in, 1);
1505 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1506 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1508 /* First see if either of the operands is a literal, then a constant. */
1509 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1510 || TREE_CODE (op0) == FIXED_CST)
1511 *litp = op0, op0 = 0;
1512 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1513 || TREE_CODE (op1) == FIXED_CST)
1514 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1516 if (op0 != 0 && TREE_CONSTANT (op0))
1517 *conp = op0, op0 = 0;
1518 else if (op1 != 0 && TREE_CONSTANT (op1))
1519 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1521 /* If we haven't dealt with either operand, this is not a case we can
1522 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1523 if (op0 != 0 && op1 != 0)
1528 var = op1, neg_var_p = neg1_p;
1530 /* Now do any needed negations. */
1532 *minus_litp = *litp, *litp = 0;
1534 *conp = negate_expr (*conp);
1536 var = negate_expr (var);
1538 else if (TREE_CONSTANT (in))
1546 *minus_litp = *litp, *litp = 0;
1547 else if (*minus_litp)
1548 *litp = *minus_litp, *minus_litp = 0;
1549 *conp = negate_expr (*conp);
1550 var = negate_expr (var);
1556 /* Re-associate trees split by the above function. T1 and T2 are either
1557 expressions to associate or null. Return the new expression, if any. If
1558 we build an operation, do it in TYPE and with CODE. */
1561 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1568 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1569 try to fold this since we will have infinite recursion. But do
1570 deal with any NEGATE_EXPRs. */
1571 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1572 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1574 if (code == PLUS_EXPR)
1576 if (TREE_CODE (t1) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1578 fold_convert (type, TREE_OPERAND (t1, 0)));
1579 else if (TREE_CODE (t2) == NEGATE_EXPR)
1580 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1581 fold_convert (type, TREE_OPERAND (t2, 0)));
1582 else if (integer_zerop (t2))
1583 return fold_convert (type, t1);
1585 else if (code == MINUS_EXPR)
1587 if (integer_zerop (t2))
1588 return fold_convert (type, t1);
1591 return build2 (code, type, fold_convert (type, t1),
1592 fold_convert (type, t2));
1595 return fold_build2 (code, type, fold_convert (type, t1),
1596 fold_convert (type, t2));
1599 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1600 for use in int_const_binop, size_binop and size_diffop. */
1603 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1605 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1607 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1623 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1624 && TYPE_MODE (type1) == TYPE_MODE (type2);
1628 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1629 to produce a new constant. Return NULL_TREE if we don't know how
1630 to evaluate CODE at compile-time.
1632 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1635 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1637 unsigned HOST_WIDE_INT int1l, int2l;
1638 HOST_WIDE_INT int1h, int2h;
1639 unsigned HOST_WIDE_INT low;
1641 unsigned HOST_WIDE_INT garbagel;
1642 HOST_WIDE_INT garbageh;
1644 tree type = TREE_TYPE (arg1);
1645 int uns = TYPE_UNSIGNED (type);
1647 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1650 int1l = TREE_INT_CST_LOW (arg1);
1651 int1h = TREE_INT_CST_HIGH (arg1);
1652 int2l = TREE_INT_CST_LOW (arg2);
1653 int2h = TREE_INT_CST_HIGH (arg2);
1658 low = int1l | int2l, hi = int1h | int2h;
1662 low = int1l ^ int2l, hi = int1h ^ int2h;
1666 low = int1l & int2l, hi = int1h & int2h;
1672 /* It's unclear from the C standard whether shifts can overflow.
1673 The following code ignores overflow; perhaps a C standard
1674 interpretation ruling is needed. */
1675 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1687 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1691 neg_double (int2l, int2h, &low, &hi);
1692 add_double (int1l, int1h, low, hi, &low, &hi);
1693 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1697 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1700 case TRUNC_DIV_EXPR:
1701 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1702 case EXACT_DIV_EXPR:
1703 /* This is a shortcut for a common special case. */
1704 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1705 && !TREE_OVERFLOW (arg1)
1706 && !TREE_OVERFLOW (arg2)
1707 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1709 if (code == CEIL_DIV_EXPR)
1712 low = int1l / int2l, hi = 0;
1716 /* ... fall through ... */
1718 case ROUND_DIV_EXPR:
1719 if (int2h == 0 && int2l == 0)
1721 if (int2h == 0 && int2l == 1)
1723 low = int1l, hi = int1h;
1726 if (int1l == int2l && int1h == int2h
1727 && ! (int1l == 0 && int1h == 0))
1732 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1733 &low, &hi, &garbagel, &garbageh);
1736 case TRUNC_MOD_EXPR:
1737 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1738 /* This is a shortcut for a common special case. */
1739 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1740 && !TREE_OVERFLOW (arg1)
1741 && !TREE_OVERFLOW (arg2)
1742 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1744 if (code == CEIL_MOD_EXPR)
1746 low = int1l % int2l, hi = 0;
1750 /* ... fall through ... */
1752 case ROUND_MOD_EXPR:
1753 if (int2h == 0 && int2l == 0)
1755 overflow = div_and_round_double (code, uns,
1756 int1l, int1h, int2l, int2h,
1757 &garbagel, &garbageh, &low, &hi);
1763 low = (((unsigned HOST_WIDE_INT) int1h
1764 < (unsigned HOST_WIDE_INT) int2h)
1765 || (((unsigned HOST_WIDE_INT) int1h
1766 == (unsigned HOST_WIDE_INT) int2h)
1769 low = (int1h < int2h
1770 || (int1h == int2h && int1l < int2l));
1772 if (low == (code == MIN_EXPR))
1773 low = int1l, hi = int1h;
1775 low = int2l, hi = int2h;
1784 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1786 /* Propagate overflow flags ourselves. */
1787 if (((!uns || is_sizetype) && overflow)
1788 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1791 TREE_OVERFLOW (t) = 1;
1795 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1796 ((!uns || is_sizetype) && overflow)
1797 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1802 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1803 constant. We assume ARG1 and ARG2 have the same data type, or at least
1804 are the same kind of constant and the same machine mode. Return zero if
1805 combining the constants is not allowed in the current operating mode.
1807 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1810 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1812 /* Sanity check for the recursive cases. */
1819 if (TREE_CODE (arg1) == INTEGER_CST)
1820 return int_const_binop (code, arg1, arg2, notrunc);
1822 if (TREE_CODE (arg1) == REAL_CST)
1824 enum machine_mode mode;
1827 REAL_VALUE_TYPE value;
1828 REAL_VALUE_TYPE result;
1832 /* The following codes are handled by real_arithmetic. */
1847 d1 = TREE_REAL_CST (arg1);
1848 d2 = TREE_REAL_CST (arg2);
1850 type = TREE_TYPE (arg1);
1851 mode = TYPE_MODE (type);
1853 /* Don't perform operation if we honor signaling NaNs and
1854 either operand is a NaN. */
1855 if (HONOR_SNANS (mode)
1856 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1859 /* Don't perform operation if it would raise a division
1860 by zero exception. */
1861 if (code == RDIV_EXPR
1862 && REAL_VALUES_EQUAL (d2, dconst0)
1863 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1866 /* If either operand is a NaN, just return it. Otherwise, set up
1867 for floating-point trap; we return an overflow. */
1868 if (REAL_VALUE_ISNAN (d1))
1870 else if (REAL_VALUE_ISNAN (d2))
1873 inexact = real_arithmetic (&value, code, &d1, &d2);
1874 real_convert (&result, mode, &value);
1876 /* Don't constant fold this floating point operation if
1877 the result has overflowed and flag_trapping_math. */
1878 if (flag_trapping_math
1879 && MODE_HAS_INFINITIES (mode)
1880 && REAL_VALUE_ISINF (result)
1881 && !REAL_VALUE_ISINF (d1)
1882 && !REAL_VALUE_ISINF (d2))
1885 /* Don't constant fold this floating point operation if the
1886 result may dependent upon the run-time rounding mode and
1887 flag_rounding_math is set, or if GCC's software emulation
1888 is unable to accurately represent the result. */
1889 if ((flag_rounding_math
1890 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1891 && !flag_unsafe_math_optimizations))
1892 && (inexact || !real_identical (&result, &value)))
1895 t = build_real (type, result);
1897 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1901 if (TREE_CODE (arg1) == FIXED_CST)
1903 FIXED_VALUE_TYPE f1;
1904 FIXED_VALUE_TYPE f2;
1905 FIXED_VALUE_TYPE result;
1910 /* The following codes are handled by fixed_arithmetic. */
1916 case TRUNC_DIV_EXPR:
1917 f2 = TREE_FIXED_CST (arg2);
1922 f2.data.high = TREE_INT_CST_HIGH (arg2);
1923 f2.data.low = TREE_INT_CST_LOW (arg2);
1931 f1 = TREE_FIXED_CST (arg1);
1932 type = TREE_TYPE (arg1);
1933 sat_p = TYPE_SATURATING (type);
1934 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1935 t = build_fixed (type, result);
1936 /* Propagate overflow flags. */
1937 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1939 TREE_OVERFLOW (t) = 1;
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1942 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1943 TREE_CONSTANT_OVERFLOW (t) = 1;
1947 if (TREE_CODE (arg1) == COMPLEX_CST)
1949 tree type = TREE_TYPE (arg1);
1950 tree r1 = TREE_REALPART (arg1);
1951 tree i1 = TREE_IMAGPART (arg1);
1952 tree r2 = TREE_REALPART (arg2);
1953 tree i2 = TREE_IMAGPART (arg2);
1960 real = const_binop (code, r1, r2, notrunc);
1961 imag = const_binop (code, i1, i2, notrunc);
1965 real = const_binop (MINUS_EXPR,
1966 const_binop (MULT_EXPR, r1, r2, notrunc),
1967 const_binop (MULT_EXPR, i1, i2, notrunc),
1969 imag = const_binop (PLUS_EXPR,
1970 const_binop (MULT_EXPR, r1, i2, notrunc),
1971 const_binop (MULT_EXPR, i1, r2, notrunc),
1978 = const_binop (PLUS_EXPR,
1979 const_binop (MULT_EXPR, r2, r2, notrunc),
1980 const_binop (MULT_EXPR, i2, i2, notrunc),
1983 = const_binop (PLUS_EXPR,
1984 const_binop (MULT_EXPR, r1, r2, notrunc),
1985 const_binop (MULT_EXPR, i1, i2, notrunc),
1988 = const_binop (MINUS_EXPR,
1989 const_binop (MULT_EXPR, i1, r2, notrunc),
1990 const_binop (MULT_EXPR, r1, i2, notrunc),
1993 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1994 code = TRUNC_DIV_EXPR;
1996 real = const_binop (code, t1, magsquared, notrunc);
1997 imag = const_binop (code, t2, magsquared, notrunc);
2006 return build_complex (type, real, imag);
2012 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2013 indicates which particular sizetype to create. */
2016 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2018 return build_int_cst (sizetype_tab[(int) kind], number);
2021 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2022 is a tree code. The type of the result is taken from the operands.
2023 Both must be equivalent integer types, ala int_binop_types_match_p.
2024 If the operands are constant, so is the result. */
2027 size_binop (enum tree_code code, tree arg0, tree arg1)
2029 tree type = TREE_TYPE (arg0);
2031 if (arg0 == error_mark_node || arg1 == error_mark_node)
2032 return error_mark_node;
2034 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2037 /* Handle the special case of two integer constants faster. */
2038 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2040 /* And some specific cases even faster than that. */
2041 if (code == PLUS_EXPR)
2043 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2045 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 else if (code == MINUS_EXPR)
2050 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2053 else if (code == MULT_EXPR)
2055 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2059 /* Handle general case of two integer constants. */
2060 return int_const_binop (code, arg0, arg1, 0);
2063 return fold_build2 (code, type, arg0, arg1);
2066 /* Given two values, either both of sizetype or both of bitsizetype,
2067 compute the difference between the two values. Return the value
2068 in signed type corresponding to the type of the operands. */
2071 size_diffop (tree arg0, tree arg1)
2073 tree type = TREE_TYPE (arg0);
2076 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2079 /* If the type is already signed, just do the simple thing. */
2080 if (!TYPE_UNSIGNED (type))
2081 return size_binop (MINUS_EXPR, arg0, arg1);
2083 if (type == sizetype)
2085 else if (type == bitsizetype)
2086 ctype = sbitsizetype;
2088 ctype = signed_type_for (type);
2090 /* If either operand is not a constant, do the conversions to the signed
2091 type and subtract. The hardware will do the right thing with any
2092 overflow in the subtraction. */
2093 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2094 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2095 fold_convert (ctype, arg1));
2097 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2098 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2099 overflow) and negate (which can't either). Special-case a result
2100 of zero while we're here. */
2101 if (tree_int_cst_equal (arg0, arg1))
2102 return build_int_cst (ctype, 0);
2103 else if (tree_int_cst_lt (arg1, arg0))
2104 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2106 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2107 fold_convert (ctype, size_binop (MINUS_EXPR,
2111 /* A subroutine of fold_convert_const handling conversions of an
2112 INTEGER_CST to another integer type. */
2115 fold_convert_const_int_from_int (tree type, const_tree arg1)
2119 /* Given an integer constant, make new constant with new type,
2120 appropriately sign-extended or truncated. */
2121 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2122 TREE_INT_CST_HIGH (arg1),
2123 /* Don't set the overflow when
2124 converting from a pointer, */
2125 !POINTER_TYPE_P (TREE_TYPE (arg1))
2126 /* or to a sizetype with same signedness
2127 and the precision is unchanged.
2128 ??? sizetype is always sign-extended,
2129 but its signedness depends on the
2130 frontend. Thus we see spurious overflows
2131 here if we do not check this. */
2132 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2133 == TYPE_PRECISION (type))
2134 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2135 == TYPE_UNSIGNED (type))
2136 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2137 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2138 || (TREE_CODE (type) == INTEGER_TYPE
2139 && TYPE_IS_SIZETYPE (type)))),
2140 (TREE_INT_CST_HIGH (arg1) < 0
2141 && (TYPE_UNSIGNED (type)
2142 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2143 | TREE_OVERFLOW (arg1));
2148 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2149 to an integer type. */
2152 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2157 /* The following code implements the floating point to integer
2158 conversion rules required by the Java Language Specification,
2159 that IEEE NaNs are mapped to zero and values that overflow
2160 the target precision saturate, i.e. values greater than
2161 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2162 are mapped to INT_MIN. These semantics are allowed by the
2163 C and C++ standards that simply state that the behavior of
2164 FP-to-integer conversion is unspecified upon overflow. */
2166 HOST_WIDE_INT high, low;
2168 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2172 case FIX_TRUNC_EXPR:
2173 real_trunc (&r, VOIDmode, &x);
2180 /* If R is NaN, return zero and show we have an overflow. */
2181 if (REAL_VALUE_ISNAN (r))
2188 /* See if R is less than the lower bound or greater than the
2193 tree lt = TYPE_MIN_VALUE (type);
2194 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2195 if (REAL_VALUES_LESS (r, l))
2198 high = TREE_INT_CST_HIGH (lt);
2199 low = TREE_INT_CST_LOW (lt);
2205 tree ut = TYPE_MAX_VALUE (type);
2208 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2209 if (REAL_VALUES_LESS (u, r))
2212 high = TREE_INT_CST_HIGH (ut);
2213 low = TREE_INT_CST_LOW (ut);
2219 REAL_VALUE_TO_INT (&low, &high, r);
2221 t = force_fit_type_double (type, low, high, -1,
2222 overflow | TREE_OVERFLOW (arg1));
2226 /* A subroutine of fold_convert_const handling conversions of a
2227 FIXED_CST to an integer type. */
2230 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2233 double_int temp, temp_trunc;
2236 /* Right shift FIXED_CST to temp by fbit. */
2237 temp = TREE_FIXED_CST (arg1).data;
2238 mode = TREE_FIXED_CST (arg1).mode;
2239 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2241 lshift_double (temp.low, temp.high,
2242 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2243 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2245 /* Left shift temp to temp_trunc by fbit. */
2246 lshift_double (temp.low, temp.high,
2247 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2248 &temp_trunc.low, &temp_trunc.high,
2249 SIGNED_FIXED_POINT_MODE_P (mode));
2256 temp_trunc.high = 0;
2259 /* If FIXED_CST is negative, we need to round the value toward 0.
2260 By checking if the fractional bits are not zero to add 1 to temp. */
2261 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2262 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2267 temp = double_int_add (temp, one);
2270 /* Given a fixed-point constant, make new constant with new type,
2271 appropriately sign-extended or truncated. */
2272 t = force_fit_type_double (type, temp.low, temp.high, -1,
2274 && (TYPE_UNSIGNED (type)
2275 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2276 | TREE_OVERFLOW (arg1));
2281 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2282 to another floating point type. */
2285 fold_convert_const_real_from_real (tree type, const_tree arg1)
2287 REAL_VALUE_TYPE value;
2290 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2291 t = build_real (type, value);
2293 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2297 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2298 to a floating point type. */
2301 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2303 REAL_VALUE_TYPE value;
2306 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2307 t = build_real (type, value);
2309 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2310 TREE_CONSTANT_OVERFLOW (t)
2311 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2315 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2316 to another fixed-point type. */
2319 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2321 FIXED_VALUE_TYPE value;
2325 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2326 TYPE_SATURATING (type));
2327 t = build_fixed (type, value);
2329 /* Propagate overflow flags. */
2330 if (overflow_p | TREE_OVERFLOW (arg1))
2332 TREE_OVERFLOW (t) = 1;
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2335 else if (TREE_CONSTANT_OVERFLOW (arg1))
2336 TREE_CONSTANT_OVERFLOW (t) = 1;
2340 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2341 to a fixed-point type. */
2344 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2346 FIXED_VALUE_TYPE value;
2350 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2351 TREE_INT_CST (arg1),
2352 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2353 TYPE_SATURATING (type));
2354 t = build_fixed (type, value);
2356 /* Propagate overflow flags. */
2357 if (overflow_p | TREE_OVERFLOW (arg1))
2359 TREE_OVERFLOW (t) = 1;
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2362 else if (TREE_CONSTANT_OVERFLOW (arg1))
2363 TREE_CONSTANT_OVERFLOW (t) = 1;
2367 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2368 to a fixed-point type. */
2371 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2373 FIXED_VALUE_TYPE value;
2377 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2378 &TREE_REAL_CST (arg1),
2379 TYPE_SATURATING (type));
2380 t = build_fixed (type, value);
2382 /* Propagate overflow flags. */
2383 if (overflow_p | TREE_OVERFLOW (arg1))
2385 TREE_OVERFLOW (t) = 1;
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2388 else if (TREE_CONSTANT_OVERFLOW (arg1))
2389 TREE_CONSTANT_OVERFLOW (t) = 1;
2393 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2394 type TYPE. If no simplification can be done return NULL_TREE. */
2397 fold_convert_const (enum tree_code code, tree type, tree arg1)
2399 if (TREE_TYPE (arg1) == type)
2402 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2404 if (TREE_CODE (arg1) == INTEGER_CST)
2405 return fold_convert_const_int_from_int (type, arg1);
2406 else if (TREE_CODE (arg1) == REAL_CST)
2407 return fold_convert_const_int_from_real (code, type, arg1);
2408 else if (TREE_CODE (arg1) == FIXED_CST)
2409 return fold_convert_const_int_from_fixed (type, arg1);
2411 else if (TREE_CODE (type) == REAL_TYPE)
2413 if (TREE_CODE (arg1) == INTEGER_CST)
2414 return build_real_from_int_cst (type, arg1);
2415 else if (TREE_CODE (arg1) == REAL_CST)
2416 return fold_convert_const_real_from_real (type, arg1);
2417 else if (TREE_CODE (arg1) == FIXED_CST)
2418 return fold_convert_const_real_from_fixed (type, arg1);
2420 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2422 if (TREE_CODE (arg1) == FIXED_CST)
2423 return fold_convert_const_fixed_from_fixed (type, arg1);
2424 else if (TREE_CODE (arg1) == INTEGER_CST)
2425 return fold_convert_const_fixed_from_int (type, arg1);
2426 else if (TREE_CODE (arg1) == REAL_CST)
2427 return fold_convert_const_fixed_from_real (type, arg1);
2432 /* Construct a vector of zero elements of vector type TYPE. */
2435 build_zero_vector (tree type)
2440 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2441 units = TYPE_VECTOR_SUBPARTS (type);
2444 for (i = 0; i < units; i++)
2445 list = tree_cons (NULL_TREE, elem, list);
2446 return build_vector (type, list);
2449 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2452 fold_convertible_p (const_tree type, const_tree arg)
2454 tree orig = TREE_TYPE (arg);
2459 if (TREE_CODE (arg) == ERROR_MARK
2460 || TREE_CODE (type) == ERROR_MARK
2461 || TREE_CODE (orig) == ERROR_MARK)
2464 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2467 switch (TREE_CODE (type))
2469 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2470 case POINTER_TYPE: case REFERENCE_TYPE:
2472 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2473 || TREE_CODE (orig) == OFFSET_TYPE)
2475 return (TREE_CODE (orig) == VECTOR_TYPE
2476 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2479 return TREE_CODE (type) == TREE_CODE (orig);
2483 /* Convert expression ARG to type TYPE. Used by the middle-end for
2484 simple conversions in preference to calling the front-end's convert. */
2487 fold_convert (tree type, tree arg)
2489 tree orig = TREE_TYPE (arg);
2495 if (TREE_CODE (arg) == ERROR_MARK
2496 || TREE_CODE (type) == ERROR_MARK
2497 || TREE_CODE (orig) == ERROR_MARK)
2498 return error_mark_node;
2500 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2501 return fold_build1 (NOP_EXPR, type, arg);
2503 switch (TREE_CODE (type))
2505 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2506 case POINTER_TYPE: case REFERENCE_TYPE:
2508 if (TREE_CODE (arg) == INTEGER_CST)
2510 tem = fold_convert_const (NOP_EXPR, type, arg);
2511 if (tem != NULL_TREE)
2514 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2515 || TREE_CODE (orig) == OFFSET_TYPE)
2516 return fold_build1 (NOP_EXPR, type, arg);
2517 if (TREE_CODE (orig) == COMPLEX_TYPE)
2519 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2520 return fold_convert (type, tem);
2522 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2523 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2524 return fold_build1 (NOP_EXPR, type, arg);
2527 if (TREE_CODE (arg) == INTEGER_CST)
2529 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2530 if (tem != NULL_TREE)
2533 else if (TREE_CODE (arg) == REAL_CST)
2535 tem = fold_convert_const (NOP_EXPR, type, arg);
2536 if (tem != NULL_TREE)
2539 else if (TREE_CODE (arg) == FIXED_CST)
2541 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2542 if (tem != NULL_TREE)
2546 switch (TREE_CODE (orig))
2549 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2550 case POINTER_TYPE: case REFERENCE_TYPE:
2551 return fold_build1 (FLOAT_EXPR, type, arg);
2554 return fold_build1 (NOP_EXPR, type, arg);
2556 case FIXED_POINT_TYPE:
2557 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2560 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2561 return fold_convert (type, tem);
2567 case FIXED_POINT_TYPE:
2568 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2569 || TREE_CODE (arg) == REAL_CST)
2571 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2572 if (tem != NULL_TREE)
2576 switch (TREE_CODE (orig))
2578 case FIXED_POINT_TYPE:
2583 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2586 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2587 return fold_convert (type, tem);
2594 switch (TREE_CODE (orig))
2597 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2598 case POINTER_TYPE: case REFERENCE_TYPE:
2600 case FIXED_POINT_TYPE:
2601 return build2 (COMPLEX_EXPR, type,
2602 fold_convert (TREE_TYPE (type), arg),
2603 fold_convert (TREE_TYPE (type), integer_zero_node));
2608 if (TREE_CODE (arg) == COMPLEX_EXPR)
2610 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2611 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2612 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2615 arg = save_expr (arg);
2616 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2617 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2618 rpart = fold_convert (TREE_TYPE (type), rpart);
2619 ipart = fold_convert (TREE_TYPE (type), ipart);
2620 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2628 if (integer_zerop (arg))
2629 return build_zero_vector (type);
2630 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2631 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2632 || TREE_CODE (orig) == VECTOR_TYPE);
2633 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2636 tem = fold_ignored_result (arg);
2637 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2639 return fold_build1 (NOP_EXPR, type, tem);
2646 /* Return false if expr can be assumed not to be an lvalue, true
2650 maybe_lvalue_p (const_tree x)
2652 /* We only need to wrap lvalue tree codes. */
2653 switch (TREE_CODE (x))
2664 case ALIGN_INDIRECT_REF:
2665 case MISALIGNED_INDIRECT_REF:
2667 case ARRAY_RANGE_REF:
2673 case PREINCREMENT_EXPR:
2674 case PREDECREMENT_EXPR:
2676 case TRY_CATCH_EXPR:
2677 case WITH_CLEANUP_EXPR:
2680 case GIMPLE_MODIFY_STMT:
2689 /* Assume the worst for front-end tree codes. */
2690 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2698 /* Return an expr equal to X but certainly not valid as an lvalue. */
2703 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2708 if (! maybe_lvalue_p (x))
2710 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2713 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2714 Zero means allow extended lvalues. */
2716 int pedantic_lvalues;
2718 /* When pedantic, return an expr equal to X but certainly not valid as a
2719 pedantic lvalue. Otherwise, return X. */
2722 pedantic_non_lvalue (tree x)
2724 if (pedantic_lvalues)
2725 return non_lvalue (x);
2730 /* Given a tree comparison code, return the code that is the logical inverse
2731 of the given code. It is not safe to do this for floating-point
2732 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2733 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2736 invert_tree_comparison (enum tree_code code, bool honor_nans)
2738 if (honor_nans && flag_trapping_math)
2748 return honor_nans ? UNLE_EXPR : LE_EXPR;
2750 return honor_nans ? UNLT_EXPR : LT_EXPR;
2752 return honor_nans ? UNGE_EXPR : GE_EXPR;
2754 return honor_nans ? UNGT_EXPR : GT_EXPR;
2768 return UNORDERED_EXPR;
2769 case UNORDERED_EXPR:
2770 return ORDERED_EXPR;
2776 /* Similar, but return the comparison that results if the operands are
2777 swapped. This is safe for floating-point. */
2780 swap_tree_comparison (enum tree_code code)
2787 case UNORDERED_EXPR:
2813 /* Convert a comparison tree code from an enum tree_code representation
2814 into a compcode bit-based encoding. This function is the inverse of
2815 compcode_to_comparison. */
2817 static enum comparison_code
2818 comparison_to_compcode (enum tree_code code)
2835 return COMPCODE_ORD;
2836 case UNORDERED_EXPR:
2837 return COMPCODE_UNORD;
2839 return COMPCODE_UNLT;
2841 return COMPCODE_UNEQ;
2843 return COMPCODE_UNLE;
2845 return COMPCODE_UNGT;
2847 return COMPCODE_LTGT;
2849 return COMPCODE_UNGE;
2855 /* Convert a compcode bit-based encoding of a comparison operator back
2856 to GCC's enum tree_code representation. This function is the
2857 inverse of comparison_to_compcode. */
2859 static enum tree_code
2860 compcode_to_comparison (enum comparison_code code)
2877 return ORDERED_EXPR;
2878 case COMPCODE_UNORD:
2879 return UNORDERED_EXPR;
2897 /* Return a tree for the comparison which is the combination of
2898 doing the AND or OR (depending on CODE) of the two operations LCODE
2899 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2900 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2901 if this makes the transformation invalid. */
2904 combine_comparisons (enum tree_code code, enum tree_code lcode,
2905 enum tree_code rcode, tree truth_type,
2906 tree ll_arg, tree lr_arg)
2908 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2909 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2910 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2911 enum comparison_code compcode;
2915 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2916 compcode = lcompcode & rcompcode;
2919 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2920 compcode = lcompcode | rcompcode;
2929 /* Eliminate unordered comparisons, as well as LTGT and ORD
2930 which are not used unless the mode has NaNs. */
2931 compcode &= ~COMPCODE_UNORD;
2932 if (compcode == COMPCODE_LTGT)
2933 compcode = COMPCODE_NE;
2934 else if (compcode == COMPCODE_ORD)
2935 compcode = COMPCODE_TRUE;
2937 else if (flag_trapping_math)
2939 /* Check that the original operation and the optimized ones will trap
2940 under the same condition. */
2941 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2942 && (lcompcode != COMPCODE_EQ)
2943 && (lcompcode != COMPCODE_ORD);
2944 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2945 && (rcompcode != COMPCODE_EQ)
2946 && (rcompcode != COMPCODE_ORD);
2947 bool trap = (compcode & COMPCODE_UNORD) == 0
2948 && (compcode != COMPCODE_EQ)
2949 && (compcode != COMPCODE_ORD);
2951 /* In a short-circuited boolean expression the LHS might be
2952 such that the RHS, if evaluated, will never trap. For
2953 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2954 if neither x nor y is NaN. (This is a mixed blessing: for
2955 example, the expression above will never trap, hence
2956 optimizing it to x < y would be invalid). */
2957 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2958 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2961 /* If the comparison was short-circuited, and only the RHS
2962 trapped, we may now generate a spurious trap. */
2964 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2967 /* If we changed the conditions that cause a trap, we lose. */
2968 if ((ltrap || rtrap) != trap)
2972 if (compcode == COMPCODE_TRUE)
2973 return constant_boolean_node (true, truth_type);
2974 else if (compcode == COMPCODE_FALSE)
2975 return constant_boolean_node (false, truth_type);
2977 return fold_build2 (compcode_to_comparison (compcode),
2978 truth_type, ll_arg, lr_arg);
2981 /* Return nonzero if CODE is a tree code that represents a truth value. */
2984 truth_value_p (enum tree_code code)
2986 return (TREE_CODE_CLASS (code) == tcc_comparison
2987 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2988 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2989 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2992 /* Return nonzero if two operands (typically of the same tree node)
2993 are necessarily equal. If either argument has side-effects this
2994 function returns zero. FLAGS modifies behavior as follows:
2996 If OEP_ONLY_CONST is set, only return nonzero for constants.
2997 This function tests whether the operands are indistinguishable;
2998 it does not test whether they are equal using C's == operation.
2999 The distinction is important for IEEE floating point, because
3000 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3001 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3003 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3004 even though it may hold multiple values during a function.
3005 This is because a GCC tree node guarantees that nothing else is
3006 executed between the evaluation of its "operands" (which may often
3007 be evaluated in arbitrary order). Hence if the operands themselves
3008 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3009 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3010 unset means assuming isochronic (or instantaneous) tree equivalence.
3011 Unless comparing arbitrary expression trees, such as from different
3012 statements, this flag can usually be left unset.
3014 If OEP_PURE_SAME is set, then pure functions with identical arguments
3015 are considered the same. It is used when the caller has other ways
3016 to ensure that global memory is unchanged in between. */
3019 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3021 /* If either is ERROR_MARK, they aren't equal. */
3022 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3025 /* If both types don't have the same signedness, then we can't consider
3026 them equal. We must check this before the STRIP_NOPS calls
3027 because they may change the signedness of the arguments. */
3028 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3031 /* If both types don't have the same precision, then it is not safe
3033 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3039 /* In case both args are comparisons but with different comparison
3040 code, try to swap the comparison operands of one arg to produce
3041 a match and compare that variant. */
3042 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3043 && COMPARISON_CLASS_P (arg0)
3044 && COMPARISON_CLASS_P (arg1))
3046 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3048 if (TREE_CODE (arg0) == swap_code)
3049 return operand_equal_p (TREE_OPERAND (arg0, 0),
3050 TREE_OPERAND (arg1, 1), flags)
3051 && operand_equal_p (TREE_OPERAND (arg0, 1),
3052 TREE_OPERAND (arg1, 0), flags);
3055 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3056 /* This is needed for conversions and for COMPONENT_REF.
3057 Might as well play it safe and always test this. */
3058 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3059 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3060 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3063 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3064 We don't care about side effects in that case because the SAVE_EXPR
3065 takes care of that for us. In all other cases, two expressions are
3066 equal if they have no side effects. If we have two identical
3067 expressions with side effects that should be treated the same due
3068 to the only side effects being identical SAVE_EXPR's, that will
3069 be detected in the recursive calls below. */
3070 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3071 && (TREE_CODE (arg0) == SAVE_EXPR
3072 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3075 /* Next handle constant cases, those for which we can return 1 even
3076 if ONLY_CONST is set. */
3077 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3078 switch (TREE_CODE (arg0))
3081 return tree_int_cst_equal (arg0, arg1);
3084 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3085 TREE_FIXED_CST (arg1));
3088 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3089 TREE_REAL_CST (arg1)))
3093 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3095 /* If we do not distinguish between signed and unsigned zero,
3096 consider them equal. */
3097 if (real_zerop (arg0) && real_zerop (arg1))
3106 v1 = TREE_VECTOR_CST_ELTS (arg0);
3107 v2 = TREE_VECTOR_CST_ELTS (arg1);
3110 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3113 v1 = TREE_CHAIN (v1);
3114 v2 = TREE_CHAIN (v2);
3121 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3123 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3127 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3128 && ! memcmp (TREE_STRING_POINTER (arg0),
3129 TREE_STRING_POINTER (arg1),
3130 TREE_STRING_LENGTH (arg0)));
3133 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3139 if (flags & OEP_ONLY_CONST)
3142 /* Define macros to test an operand from arg0 and arg1 for equality and a
3143 variant that allows null and views null as being different from any
3144 non-null value. In the latter case, if either is null, the both
3145 must be; otherwise, do the normal comparison. */
3146 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3147 TREE_OPERAND (arg1, N), flags)
3149 #define OP_SAME_WITH_NULL(N) \
3150 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3151 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3153 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3156 /* Two conversions are equal only if signedness and modes match. */
3157 switch (TREE_CODE (arg0))
3161 case FIX_TRUNC_EXPR:
3162 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3163 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3173 case tcc_comparison:
3175 if (OP_SAME (0) && OP_SAME (1))
3178 /* For commutative ops, allow the other order. */
3179 return (commutative_tree_code (TREE_CODE (arg0))
3180 && operand_equal_p (TREE_OPERAND (arg0, 0),
3181 TREE_OPERAND (arg1, 1), flags)
3182 && operand_equal_p (TREE_OPERAND (arg0, 1),
3183 TREE_OPERAND (arg1, 0), flags));
3186 /* If either of the pointer (or reference) expressions we are
3187 dereferencing contain a side effect, these cannot be equal. */
3188 if (TREE_SIDE_EFFECTS (arg0)
3189 || TREE_SIDE_EFFECTS (arg1))
3192 switch (TREE_CODE (arg0))
3195 case ALIGN_INDIRECT_REF:
3196 case MISALIGNED_INDIRECT_REF:
3202 case ARRAY_RANGE_REF:
3203 /* Operands 2 and 3 may be null.
3204 Compare the array index by value if it is constant first as we
3205 may have different types but same value here. */
3207 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3208 TREE_OPERAND (arg1, 1))
3210 && OP_SAME_WITH_NULL (2)
3211 && OP_SAME_WITH_NULL (3));
3214 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3215 may be NULL when we're called to compare MEM_EXPRs. */
3216 return OP_SAME_WITH_NULL (0)
3218 && OP_SAME_WITH_NULL (2);
3221 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3227 case tcc_expression:
3228 switch (TREE_CODE (arg0))
3231 case TRUTH_NOT_EXPR:
3234 case TRUTH_ANDIF_EXPR:
3235 case TRUTH_ORIF_EXPR:
3236 return OP_SAME (0) && OP_SAME (1);
3238 case TRUTH_AND_EXPR:
3240 case TRUTH_XOR_EXPR:
3241 if (OP_SAME (0) && OP_SAME (1))
3244 /* Otherwise take into account this is a commutative operation. */
3245 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3246 TREE_OPERAND (arg1, 1), flags)
3247 && operand_equal_p (TREE_OPERAND (arg0, 1),
3248 TREE_OPERAND (arg1, 0), flags));
3255 switch (TREE_CODE (arg0))
3258 /* If the CALL_EXPRs call different functions, then they
3259 clearly can not be equal. */
3260 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3265 unsigned int cef = call_expr_flags (arg0);
3266 if (flags & OEP_PURE_SAME)
3267 cef &= ECF_CONST | ECF_PURE;
3274 /* Now see if all the arguments are the same. */
3276 const_call_expr_arg_iterator iter0, iter1;
3278 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3279 a1 = first_const_call_expr_arg (arg1, &iter1);
3281 a0 = next_const_call_expr_arg (&iter0),
3282 a1 = next_const_call_expr_arg (&iter1))
3283 if (! operand_equal_p (a0, a1, flags))
3286 /* If we get here and both argument lists are exhausted
3287 then the CALL_EXPRs are equal. */
3288 return ! (a0 || a1);
3294 case tcc_declaration:
3295 /* Consider __builtin_sqrt equal to sqrt. */
3296 return (TREE_CODE (arg0) == FUNCTION_DECL
3297 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3298 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3299 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3306 #undef OP_SAME_WITH_NULL
3309 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3310 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3312 When in doubt, return 0. */
3315 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3317 int unsignedp1, unsignedpo;
3318 tree primarg0, primarg1, primother;
3319 unsigned int correct_width;
3321 if (operand_equal_p (arg0, arg1, 0))
3324 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3325 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3328 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3329 and see if the inner values are the same. This removes any
3330 signedness comparison, which doesn't matter here. */
3331 primarg0 = arg0, primarg1 = arg1;
3332 STRIP_NOPS (primarg0);
3333 STRIP_NOPS (primarg1);
3334 if (operand_equal_p (primarg0, primarg1, 0))
3337 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3338 actual comparison operand, ARG0.
3340 First throw away any conversions to wider types
3341 already present in the operands. */
3343 primarg1 = get_narrower (arg1, &unsignedp1);
3344 primother = get_narrower (other, &unsignedpo);
3346 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3347 if (unsignedp1 == unsignedpo
3348 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3349 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3351 tree type = TREE_TYPE (arg0);
3353 /* Make sure shorter operand is extended the right way
3354 to match the longer operand. */
3355 primarg1 = fold_convert (signed_or_unsigned_type_for
3356 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3358 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3365 /* See if ARG is an expression that is either a comparison or is performing
3366 arithmetic on comparisons. The comparisons must only be comparing
3367 two different values, which will be stored in *CVAL1 and *CVAL2; if
3368 they are nonzero it means that some operands have already been found.
3369 No variables may be used anywhere else in the expression except in the
3370 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3371 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3373 If this is true, return 1. Otherwise, return zero. */
3376 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3378 enum tree_code code = TREE_CODE (arg);
3379 enum tree_code_class class = TREE_CODE_CLASS (code);
3381 /* We can handle some of the tcc_expression cases here. */
3382 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3384 else if (class == tcc_expression
3385 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3386 || code == COMPOUND_EXPR))
3389 else if (class == tcc_expression && code == SAVE_EXPR
3390 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3392 /* If we've already found a CVAL1 or CVAL2, this expression is
3393 two complex to handle. */
3394 if (*cval1 || *cval2)
3404 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3407 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3408 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3409 cval1, cval2, save_p));
3414 case tcc_expression:
3415 if (code == COND_EXPR)
3416 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3417 cval1, cval2, save_p)
3418 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3419 cval1, cval2, save_p)
3420 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3421 cval1, cval2, save_p));
3424 case tcc_comparison:
3425 /* First see if we can handle the first operand, then the second. For
3426 the second operand, we know *CVAL1 can't be zero. It must be that
3427 one side of the comparison is each of the values; test for the
3428 case where this isn't true by failing if the two operands
3431 if (operand_equal_p (TREE_OPERAND (arg, 0),
3432 TREE_OPERAND (arg, 1), 0))
3436 *cval1 = TREE_OPERAND (arg, 0);
3437 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3439 else if (*cval2 == 0)
3440 *cval2 = TREE_OPERAND (arg, 0);
3441 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3446 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3448 else if (*cval2 == 0)
3449 *cval2 = TREE_OPERAND (arg, 1);
3450 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3462 /* ARG is a tree that is known to contain just arithmetic operations and
3463 comparisons. Evaluate the operations in the tree substituting NEW0 for
3464 any occurrence of OLD0 as an operand of a comparison and likewise for
3468 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3470 tree type = TREE_TYPE (arg);
3471 enum tree_code code = TREE_CODE (arg);
3472 enum tree_code_class class = TREE_CODE_CLASS (code);
3474 /* We can handle some of the tcc_expression cases here. */
3475 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3477 else if (class == tcc_expression
3478 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3484 return fold_build1 (code, type,
3485 eval_subst (TREE_OPERAND (arg, 0),
3486 old0, new0, old1, new1));
3489 return fold_build2 (code, type,
3490 eval_subst (TREE_OPERAND (arg, 0),
3491 old0, new0, old1, new1),
3492 eval_subst (TREE_OPERAND (arg, 1),
3493 old0, new0, old1, new1));
3495 case tcc_expression:
3499 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3502 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3505 return fold_build3 (code, type,
3506 eval_subst (TREE_OPERAND (arg, 0),
3507 old0, new0, old1, new1),
3508 eval_subst (TREE_OPERAND (arg, 1),
3509 old0, new0, old1, new1),
3510 eval_subst (TREE_OPERAND (arg, 2),
3511 old0, new0, old1, new1));
3515 /* Fall through - ??? */
3517 case tcc_comparison:
3519 tree arg0 = TREE_OPERAND (arg, 0);
3520 tree arg1 = TREE_OPERAND (arg, 1);
3522 /* We need to check both for exact equality and tree equality. The
3523 former will be true if the operand has a side-effect. In that
3524 case, we know the operand occurred exactly once. */
3526 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3528 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3531 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3533 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3536 return fold_build2 (code, type, arg0, arg1);
3544 /* Return a tree for the case when the result of an expression is RESULT
3545 converted to TYPE and OMITTED was previously an operand of the expression
3546 but is now not needed (e.g., we folded OMITTED * 0).
3548 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3549 the conversion of RESULT to TYPE. */
3552 omit_one_operand (tree type, tree result, tree omitted)
3554 tree t = fold_convert (type, result);
3556 /* If the resulting operand is an empty statement, just return the omitted
3557 statement casted to void. */
3558 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3559 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3561 if (TREE_SIDE_EFFECTS (omitted))
3562 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3564 return non_lvalue (t);
3567 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3570 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3572 tree t = fold_convert (type, result);
3574 /* If the resulting operand is an empty statement, just return the omitted
3575 statement casted to void. */
3576 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3577 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3579 if (TREE_SIDE_EFFECTS (omitted))
3580 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3582 return pedantic_non_lvalue (t);
3585 /* Return a tree for the case when the result of an expression is RESULT
3586 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3587 of the expression but are now not needed.
3589 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3590 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3591 evaluated before OMITTED2. Otherwise, if neither has side effects,
3592 just do the conversion of RESULT to TYPE. */
3595 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3597 tree t = fold_convert (type, result);
3599 if (TREE_SIDE_EFFECTS (omitted2))
3600 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3601 if (TREE_SIDE_EFFECTS (omitted1))
3602 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3604 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3608 /* Return a simplified tree node for the truth-negation of ARG. This
3609 never alters ARG itself. We assume that ARG is an operation that
3610 returns a truth value (0 or 1).
3612 FIXME: one would think we would fold the result, but it causes
3613 problems with the dominator optimizer. */
3616 fold_truth_not_expr (tree arg)
3618 tree type = TREE_TYPE (arg);
3619 enum tree_code code = TREE_CODE (arg);
3621 /* If this is a comparison, we can simply invert it, except for
3622 floating-point non-equality comparisons, in which case we just
3623 enclose a TRUTH_NOT_EXPR around what we have. */
3625 if (TREE_CODE_CLASS (code) == tcc_comparison)
3627 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3628 if (FLOAT_TYPE_P (op_type)
3629 && flag_trapping_math
3630 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3631 && code != NE_EXPR && code != EQ_EXPR)
3635 code = invert_tree_comparison (code,
3636 HONOR_NANS (TYPE_MODE (op_type)));
3637 if (code == ERROR_MARK)
3640 return build2 (code, type,
3641 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3648 return constant_boolean_node (integer_zerop (arg), type);
3650 case TRUTH_AND_EXPR:
3651 return build2 (TRUTH_OR_EXPR, type,
3652 invert_truthvalue (TREE_OPERAND (arg, 0)),
3653 invert_truthvalue (TREE_OPERAND (arg, 1)));
3656 return build2 (TRUTH_AND_EXPR, type,
3657 invert_truthvalue (TREE_OPERAND (arg, 0)),
3658 invert_truthvalue (TREE_OPERAND (arg, 1)));
3660 case TRUTH_XOR_EXPR:
3661 /* Here we can invert either operand. We invert the first operand
3662 unless the second operand is a TRUTH_NOT_EXPR in which case our
3663 result is the XOR of the first operand with the inside of the
3664 negation of the second operand. */
3666 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3667 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3668 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3670 return build2 (TRUTH_XOR_EXPR, type,
3671 invert_truthvalue (TREE_OPERAND (arg, 0)),
3672 TREE_OPERAND (arg, 1));
3674 case TRUTH_ANDIF_EXPR:
3675 return build2 (TRUTH_ORIF_EXPR, type,
3676 invert_truthvalue (TREE_OPERAND (arg, 0)),
3677 invert_truthvalue (TREE_OPERAND (arg, 1)));
3679 case TRUTH_ORIF_EXPR:
3680 return build2 (TRUTH_ANDIF_EXPR, type,
3681 invert_truthvalue (TREE_OPERAND (arg, 0)),
3682 invert_truthvalue (TREE_OPERAND (arg, 1)));
3684 case TRUTH_NOT_EXPR:
3685 return TREE_OPERAND (arg, 0);
3689 tree arg1 = TREE_OPERAND (arg, 1);
3690 tree arg2 = TREE_OPERAND (arg, 2);
3691 /* A COND_EXPR may have a throw as one operand, which
3692 then has void type. Just leave void operands
3694 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3695 VOID_TYPE_P (TREE_TYPE (arg1))
3696 ? arg1 : invert_truthvalue (arg1),
3697 VOID_TYPE_P (TREE_TYPE (arg2))
3698 ? arg2 : invert_truthvalue (arg2));
3702 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3703 invert_truthvalue (TREE_OPERAND (arg, 1)));
3705 case NON_LVALUE_EXPR:
3706 return invert_truthvalue (TREE_OPERAND (arg, 0));
3709 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3710 return build1 (TRUTH_NOT_EXPR, type, arg);
3714 return build1 (TREE_CODE (arg), type,
3715 invert_truthvalue (TREE_OPERAND (arg, 0)));
3718 if (!integer_onep (TREE_OPERAND (arg, 1)))
3720 return build2 (EQ_EXPR, type, arg,
3721 build_int_cst (type, 0));
3724 return build1 (TRUTH_NOT_EXPR, type, arg);
3726 case CLEANUP_POINT_EXPR:
3727 return build1 (CLEANUP_POINT_EXPR, type,
3728 invert_truthvalue (TREE_OPERAND (arg, 0)));
3737 /* Return a simplified tree node for the truth-negation of ARG. This
3738 never alters ARG itself. We assume that ARG is an operation that
3739 returns a truth value (0 or 1).
3741 FIXME: one would think we would fold the result, but it causes
3742 problems with the dominator optimizer. */
3745 invert_truthvalue (tree arg)
3749 if (TREE_CODE (arg) == ERROR_MARK)
3752 tem = fold_truth_not_expr (arg);
3754 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3759 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3760 operands are another bit-wise operation with a common input. If so,
3761 distribute the bit operations to save an operation and possibly two if
3762 constants are involved. For example, convert
3763 (A | B) & (A | C) into A | (B & C)
3764 Further simplification will occur if B and C are constants.
3766 If this optimization cannot be done, 0 will be returned. */
3769 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3774 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3775 || TREE_CODE (arg0) == code
3776 || (TREE_CODE (arg0) != BIT_AND_EXPR
3777 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3780 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3782 common = TREE_OPERAND (arg0, 0);
3783 left = TREE_OPERAND (arg0, 1);
3784 right = TREE_OPERAND (arg1, 1);
3786 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3788 common = TREE_OPERAND (arg0, 0);
3789 left = TREE_OPERAND (arg0, 1);
3790 right = TREE_OPERAND (arg1, 0);
3792 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3794 common = TREE_OPERAND (arg0, 1);
3795 left = TREE_OPERAND (arg0, 0);
3796 right = TREE_OPERAND (arg1, 1);
3798 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3800 common = TREE_OPERAND (arg0, 1);
3801 left = TREE_OPERAND (arg0, 0);
3802 right = TREE_OPERAND (arg1, 0);
3807 return fold_build2 (TREE_CODE (arg0), type, common,
3808 fold_build2 (code, type, left, right));
3811 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3812 with code CODE. This optimization is unsafe. */
3814 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3816 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3817 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3819 /* (A / C) +- (B / C) -> (A +- B) / C. */
3821 && operand_equal_p (TREE_OPERAND (arg0, 1),
3822 TREE_OPERAND (arg1, 1), 0))
3823 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3824 fold_build2 (code, type,
3825 TREE_OPERAND (arg0, 0),
3826 TREE_OPERAND (arg1, 0)),
3827 TREE_OPERAND (arg0, 1));
3829 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3830 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3831 TREE_OPERAND (arg1, 0), 0)
3832 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3833 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3835 REAL_VALUE_TYPE r0, r1;
3836 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3837 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3839 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3841 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3842 real_arithmetic (&r0, code, &r0, &r1);
3843 return fold_build2 (MULT_EXPR, type,
3844 TREE_OPERAND (arg0, 0),
3845 build_real (type, r0));
3851 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3852 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3855 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3862 tree size = TYPE_SIZE (TREE_TYPE (inner));
3863 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3864 || POINTER_TYPE_P (TREE_TYPE (inner)))
3865 && host_integerp (size, 0)
3866 && tree_low_cst (size, 0) == bitsize)
3867 return fold_convert (type, inner);
3870 result = build3 (BIT_FIELD_REF, type, inner,
3871 size_int (bitsize), bitsize_int (bitpos));
3873 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3878 /* Optimize a bit-field compare.
3880 There are two cases: First is a compare against a constant and the
3881 second is a comparison of two items where the fields are at the same
3882 bit position relative to the start of a chunk (byte, halfword, word)
3883 large enough to contain it. In these cases we can avoid the shift
3884 implicit in bitfield extractions.
3886 For constants, we emit a compare of the shifted constant with the
3887 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3888 compared. For two fields at the same position, we do the ANDs with the
3889 similar mask and compare the result of the ANDs.
3891 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3892 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3893 are the left and right operands of the comparison, respectively.
3895 If the optimization described above can be done, we return the resulting
3896 tree. Otherwise we return zero. */
3899 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3902 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3903 tree type = TREE_TYPE (lhs);
3904 tree signed_type, unsigned_type;
3905 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3906 enum machine_mode lmode, rmode, nmode;
3907 int lunsignedp, runsignedp;
3908 int lvolatilep = 0, rvolatilep = 0;
3909 tree linner, rinner = NULL_TREE;
3913 /* Get all the information about the extractions being done. If the bit size
3914 if the same as the size of the underlying object, we aren't doing an
3915 extraction at all and so can do nothing. We also don't want to
3916 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3917 then will no longer be able to replace it. */
3918 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3919 &lunsignedp, &lvolatilep, false);
3920 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3921 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3926 /* If this is not a constant, we can only do something if bit positions,
3927 sizes, and signedness are the same. */
3928 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3929 &runsignedp, &rvolatilep, false);
3931 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3932 || lunsignedp != runsignedp || offset != 0
3933 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3937 /* See if we can find a mode to refer to this field. We should be able to,
3938 but fail if we can't. */
3939 nmode = get_best_mode (lbitsize, lbitpos,
3940 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3941 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3942 TYPE_ALIGN (TREE_TYPE (rinner))),
3943 word_mode, lvolatilep || rvolatilep);
3944 if (nmode == VOIDmode)
3947 /* Set signed and unsigned types of the precision of this mode for the
3949 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3950 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3952 /* Compute the bit position and size for the new reference and our offset
3953 within it. If the new reference is the same size as the original, we
3954 won't optimize anything, so return zero. */
3955 nbitsize = GET_MODE_BITSIZE (nmode);
3956 nbitpos = lbitpos & ~ (nbitsize - 1);
3958 if (nbitsize == lbitsize)
3961 if (BYTES_BIG_ENDIAN)
3962 lbitpos = nbitsize - lbitsize - lbitpos;
3964 /* Make the mask to be used against the extracted field. */
3965 mask = build_int_cst_type (unsigned_type, -1);
3966 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3967 mask = const_binop (RSHIFT_EXPR, mask,
3968 size_int (nbitsize - lbitsize - lbitpos), 0);
3971 /* If not comparing with constant, just rework the comparison
3973 return fold_build2 (code, compare_type,
3974 fold_build2 (BIT_AND_EXPR, unsigned_type,
3975 make_bit_field_ref (linner,
3980 fold_build2 (BIT_AND_EXPR, unsigned_type,
3981 make_bit_field_ref (rinner,
3987 /* Otherwise, we are handling the constant case. See if the constant is too
3988 big for the field. Warn and return a tree of for 0 (false) if so. We do
3989 this not only for its own sake, but to avoid having to test for this
3990 error case below. If we didn't, we might generate wrong code.
3992 For unsigned fields, the constant shifted right by the field length should
3993 be all zero. For signed fields, the high-order bits should agree with
3998 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3999 fold_convert (unsigned_type, rhs),
4000 size_int (lbitsize), 0)))
4002 warning (0, "comparison is always %d due to width of bit-field",
4004 return constant_boolean_node (code == NE_EXPR, compare_type);
4009 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4010 size_int (lbitsize - 1), 0);
4011 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4013 warning (0, "comparison is always %d due to width of bit-field",
4015 return constant_boolean_node (code == NE_EXPR, compare_type);
4019 /* Single-bit compares should always be against zero. */
4020 if (lbitsize == 1 && ! integer_zerop (rhs))
4022 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4023 rhs = build_int_cst (type, 0);
4026 /* Make a new bitfield reference, shift the constant over the
4027 appropriate number of bits and mask it with the computed mask
4028 (in case this was a signed field). If we changed it, make a new one. */
4029 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4032 TREE_SIDE_EFFECTS (lhs) = 1;
4033 TREE_THIS_VOLATILE (lhs) = 1;
4036 rhs = const_binop (BIT_AND_EXPR,
4037 const_binop (LSHIFT_EXPR,
4038 fold_convert (unsigned_type, rhs),
4039 size_int (lbitpos), 0),
4042 return build2 (code, compare_type,
4043 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4047 /* Subroutine for fold_truthop: decode a field reference.
4049 If EXP is a comparison reference, we return the innermost reference.
4051 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4052 set to the starting bit number.
4054 If the innermost field can be completely contained in a mode-sized
4055 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4057 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4058 otherwise it is not changed.
4060 *PUNSIGNEDP is set to the signedness of the field.
4062 *PMASK is set to the mask used. This is either contained in a
4063 BIT_AND_EXPR or derived from the width of the field.
4065 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4067 Return 0 if this is not a component reference or is one that we can't
4068 do anything with. */
4071 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4072 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4073 int *punsignedp, int *pvolatilep,
4074 tree *pmask, tree *pand_mask)
4076 tree outer_type = 0;
4078 tree mask, inner, offset;
4080 unsigned int precision;
4082 /* All the optimizations using this function assume integer fields.
4083 There are problems with FP fields since the type_for_size call
4084 below can fail for, e.g., XFmode. */
4085 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4088 /* We are interested in the bare arrangement of bits, so strip everything
4089 that doesn't affect the machine mode. However, record the type of the
4090 outermost expression if it may matter below. */
4091 if (TREE_CODE (exp) == NOP_EXPR
4092 || TREE_CODE (exp) == CONVERT_EXPR
4093 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4094 outer_type = TREE_TYPE (exp);
4097 if (TREE_CODE (exp) == BIT_AND_EXPR)
4099 and_mask = TREE_OPERAND (exp, 1);
4100 exp = TREE_OPERAND (exp, 0);
4101 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4102 if (TREE_CODE (and_mask) != INTEGER_CST)
4106 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4107 punsignedp, pvolatilep, false);
4108 if ((inner == exp && and_mask == 0)
4109 || *pbitsize < 0 || offset != 0
4110 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4113 /* If the number of bits in the reference is the same as the bitsize of
4114 the outer type, then the outer type gives the signedness. Otherwise
4115 (in case of a small bitfield) the signedness is unchanged. */
4116 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4117 *punsignedp = TYPE_UNSIGNED (outer_type);
4119 /* Compute the mask to access the bitfield. */
4120 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4121 precision = TYPE_PRECISION (unsigned_type);
4123 mask = build_int_cst_type (unsigned_type, -1);
4125 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4126 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4128 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4130 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4131 fold_convert (unsigned_type, and_mask), mask);
4134 *pand_mask = and_mask;
4138 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4142 all_ones_mask_p (const_tree mask, int size)
4144 tree type = TREE_TYPE (mask);
4145 unsigned int precision = TYPE_PRECISION (type);
4148 tmask = build_int_cst_type (signed_type_for (type), -1);
4151 tree_int_cst_equal (mask,
4152 const_binop (RSHIFT_EXPR,
4153 const_binop (LSHIFT_EXPR, tmask,
4154 size_int (precision - size),
4156 size_int (precision - size), 0));
4159 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4160 represents the sign bit of EXP's type. If EXP represents a sign
4161 or zero extension, also test VAL against the unextended type.
4162 The return value is the (sub)expression whose sign bit is VAL,
4163 or NULL_TREE otherwise. */
4166 sign_bit_p (tree exp, const_tree val)
4168 unsigned HOST_WIDE_INT mask_lo, lo;
4169 HOST_WIDE_INT mask_hi, hi;
4173 /* Tree EXP must have an integral type. */
4174 t = TREE_TYPE (exp);
4175 if (! INTEGRAL_TYPE_P (t))
4178 /* Tree VAL must be an integer constant. */
4179 if (TREE_CODE (val) != INTEGER_CST
4180 || TREE_OVERFLOW (val))
4183 width = TYPE_PRECISION (t);
4184 if (width > HOST_BITS_PER_WIDE_INT)
4186 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4189 mask_hi = ((unsigned HOST_WIDE_INT) -1
4190 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4196 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4199 mask_lo = ((unsigned HOST_WIDE_INT) -1
4200 >> (HOST_BITS_PER_WIDE_INT - width));
4203 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4204 treat VAL as if it were unsigned. */
4205 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4206 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4209 /* Handle extension from a narrower type. */
4210 if (TREE_CODE (exp) == NOP_EXPR
4211 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4212 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4217 /* Subroutine for fold_truthop: determine if an operand is simple enough
4218 to be evaluated unconditionally. */
4221 simple_operand_p (const_tree exp)
4223 /* Strip any conversions that don't change the machine mode. */
4226 return (CONSTANT_CLASS_P (exp)
4227 || TREE_CODE (exp) == SSA_NAME
4229 && ! TREE_ADDRESSABLE (exp)
4230 && ! TREE_THIS_VOLATILE (exp)
4231 && ! DECL_NONLOCAL (exp)
4232 /* Don't regard global variables as simple. They may be
4233 allocated in ways unknown to the compiler (shared memory,
4234 #pragma weak, etc). */
4235 && ! TREE_PUBLIC (exp)
4236 && ! DECL_EXTERNAL (exp)
4237 /* Loading a static variable is unduly expensive, but global
4238 registers aren't expensive. */
4239 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4242 /* The following functions are subroutines to fold_range_test and allow it to
4243 try to change a logical combination of comparisons into a range test.
4246 X == 2 || X == 3 || X == 4 || X == 5
4250 (unsigned) (X - 2) <= 3
4252 We describe each set of comparisons as being either inside or outside
4253 a range, using a variable named like IN_P, and then describe the
4254 range with a lower and upper bound. If one of the bounds is omitted,
4255 it represents either the highest or lowest value of the type.
4257 In the comments below, we represent a range by two numbers in brackets
4258 preceded by a "+" to designate being inside that range, or a "-" to
4259 designate being outside that range, so the condition can be inverted by
4260 flipping the prefix. An omitted bound is represented by a "-". For
4261 example, "- [-, 10]" means being outside the range starting at the lowest
4262 possible value and ending at 10, in other words, being greater than 10.
4263 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4266 We set up things so that the missing bounds are handled in a consistent
4267 manner so neither a missing bound nor "true" and "false" need to be
4268 handled using a special case. */
4270 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4271 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4272 and UPPER1_P are nonzero if the respective argument is an upper bound
4273 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4274 must be specified for a comparison. ARG1 will be converted to ARG0's
4275 type if both are specified. */
4278 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4279 tree arg1, int upper1_p)
4285 /* If neither arg represents infinity, do the normal operation.
4286 Else, if not a comparison, return infinity. Else handle the special
4287 comparison rules. Note that most of the cases below won't occur, but
4288 are handled for consistency. */
4290 if (arg0 != 0 && arg1 != 0)
4292 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4293 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4295 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4298 if (TREE_CODE_CLASS (code) != tcc_comparison)
4301 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4302 for neither. In real maths, we cannot assume open ended ranges are
4303 the same. But, this is computer arithmetic, where numbers are finite.
4304 We can therefore make the transformation of any unbounded range with
4305 the value Z, Z being greater than any representable number. This permits
4306 us to treat unbounded ranges as equal. */
4307 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4308 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4312 result = sgn0 == sgn1;
4315 result = sgn0 != sgn1;
4318 result = sgn0 < sgn1;
4321 result = sgn0 <= sgn1;
4324 result = sgn0 > sgn1;
4327 result = sgn0 >= sgn1;
4333 return constant_boolean_node (result, type);
4336 /* Given EXP, a logical expression, set the range it is testing into
4337 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4338 actually being tested. *PLOW and *PHIGH will be made of the same
4339 type as the returned expression. If EXP is not a comparison, we
4340 will most likely not be returning a useful value and range. Set
4341 *STRICT_OVERFLOW_P to true if the return value is only valid
4342 because signed overflow is undefined; otherwise, do not change
4343 *STRICT_OVERFLOW_P. */
4346 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4347 bool *strict_overflow_p)
4349 enum tree_code code;
4350 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4351 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4353 tree low, high, n_low, n_high;
4355 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4356 and see if we can refine the range. Some of the cases below may not
4357 happen, but it doesn't seem worth worrying about this. We "continue"
4358 the outer loop when we've changed something; otherwise we "break"
4359 the switch, which will "break" the while. */
4362 low = high = build_int_cst (TREE_TYPE (exp), 0);
4366 code = TREE_CODE (exp);
4367 exp_type = TREE_TYPE (exp);
4369 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4371 if (TREE_OPERAND_LENGTH (exp) > 0)
4372 arg0 = TREE_OPERAND (exp, 0);
4373 if (TREE_CODE_CLASS (code) == tcc_comparison
4374 || TREE_CODE_CLASS (code) == tcc_unary
4375 || TREE_CODE_CLASS (code) == tcc_binary)
4376 arg0_type = TREE_TYPE (arg0);
4377 if (TREE_CODE_CLASS (code) == tcc_binary
4378 || TREE_CODE_CLASS (code) == tcc_comparison
4379 || (TREE_CODE_CLASS (code) == tcc_expression
4380 && TREE_OPERAND_LENGTH (exp) > 1))
4381 arg1 = TREE_OPERAND (exp, 1);
4386 case TRUTH_NOT_EXPR:
4387 in_p = ! in_p, exp = arg0;
4390 case EQ_EXPR: case NE_EXPR:
4391 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4392 /* We can only do something if the range is testing for zero
4393 and if the second operand is an integer constant. Note that
4394 saying something is "in" the range we make is done by
4395 complementing IN_P since it will set in the initial case of
4396 being not equal to zero; "out" is leaving it alone. */
4397 if (low == 0 || high == 0
4398 || ! integer_zerop (low) || ! integer_zerop (high)
4399 || TREE_CODE (arg1) != INTEGER_CST)
4404 case NE_EXPR: /* - [c, c] */
4407 case EQ_EXPR: /* + [c, c] */
4408 in_p = ! in_p, low = high = arg1;
4410 case GT_EXPR: /* - [-, c] */
4411 low = 0, high = arg1;
4413 case GE_EXPR: /* + [c, -] */
4414 in_p = ! in_p, low = arg1, high = 0;
4416 case LT_EXPR: /* - [c, -] */
4417 low = arg1, high = 0;
4419 case LE_EXPR: /* + [-, c] */
4420 in_p = ! in_p, low = 0, high = arg1;
4426 /* If this is an unsigned comparison, we also know that EXP is
4427 greater than or equal to zero. We base the range tests we make
4428 on that fact, so we record it here so we can parse existing
4429 range tests. We test arg0_type since often the return type
4430 of, e.g. EQ_EXPR, is boolean. */
4431 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4433 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4435 build_int_cst (arg0_type, 0),
4439 in_p = n_in_p, low = n_low, high = n_high;
4441 /* If the high bound is missing, but we have a nonzero low
4442 bound, reverse the range so it goes from zero to the low bound
4444 if (high == 0 && low && ! integer_zerop (low))
4447 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4448 integer_one_node, 0);
4449 low = build_int_cst (arg0_type, 0);
4457 /* (-x) IN [a,b] -> x in [-b, -a] */
4458 n_low = range_binop (MINUS_EXPR, exp_type,
4459 build_int_cst (exp_type, 0),
4461 n_high = range_binop (MINUS_EXPR, exp_type,
4462 build_int_cst (exp_type, 0),
4464 low = n_low, high = n_high;
4470 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4471 build_int_cst (exp_type, 1));
4474 case PLUS_EXPR: case MINUS_EXPR:
4475 if (TREE_CODE (arg1) != INTEGER_CST)
4478 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4479 move a constant to the other side. */
4480 if (!TYPE_UNSIGNED (arg0_type)
4481 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4484 /* If EXP is signed, any overflow in the computation is undefined,
4485 so we don't worry about it so long as our computations on
4486 the bounds don't overflow. For unsigned, overflow is defined
4487 and this is exactly the right thing. */
4488 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4489 arg0_type, low, 0, arg1, 0);
4490 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4491 arg0_type, high, 1, arg1, 0);
4492 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4493 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4496 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4497 *strict_overflow_p = true;
4499 /* Check for an unsigned range which has wrapped around the maximum
4500 value thus making n_high < n_low, and normalize it. */
4501 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4503 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4504 integer_one_node, 0);
4505 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4506 integer_one_node, 0);
4508 /* If the range is of the form +/- [ x+1, x ], we won't
4509 be able to normalize it. But then, it represents the
4510 whole range or the empty set, so make it
4512 if (tree_int_cst_equal (n_low, low)
4513 && tree_int_cst_equal (n_high, high))
4519 low = n_low, high = n_high;
4524 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4525 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4528 if (! INTEGRAL_TYPE_P (arg0_type)
4529 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4530 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4533 n_low = low, n_high = high;
4536 n_low = fold_convert (arg0_type, n_low);
4539 n_high = fold_convert (arg0_type, n_high);
4542 /* If we're converting arg0 from an unsigned type, to exp,
4543 a signed type, we will be doing the comparison as unsigned.
4544 The tests above have already verified that LOW and HIGH
4547 So we have to ensure that we will handle large unsigned
4548 values the same way that the current signed bounds treat
4551 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4555 /* For fixed-point modes, we need to pass the saturating flag
4556 as the 2nd parameter. */
4557 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4558 equiv_type = lang_hooks.types.type_for_mode
4559 (TYPE_MODE (arg0_type),
4560 TYPE_SATURATING (arg0_type));
4562 equiv_type = lang_hooks.types.type_for_mode
4563 (TYPE_MODE (arg0_type), 1);
4565 /* A range without an upper bound is, naturally, unbounded.
4566 Since convert would have cropped a very large value, use
4567 the max value for the destination type. */
4569 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4570 : TYPE_MAX_VALUE (arg0_type);
4572 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4573 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4574 fold_convert (arg0_type,
4576 build_int_cst (arg0_type, 1));
4578 /* If the low bound is specified, "and" the range with the
4579 range for which the original unsigned value will be
4583 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4584 1, n_low, n_high, 1,
4585 fold_convert (arg0_type,
4590 in_p = (n_in_p == in_p);
4594 /* Otherwise, "or" the range with the range of the input
4595 that will be interpreted as negative. */
4596 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4597 0, n_low, n_high, 1,
4598 fold_convert (arg0_type,
4603 in_p = (in_p != n_in_p);
4608 low = n_low, high = n_high;
4618 /* If EXP is a constant, we can evaluate whether this is true or false. */
4619 if (TREE_CODE (exp) == INTEGER_CST)
4621 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4623 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4629 *pin_p = in_p, *plow = low, *phigh = high;
4633 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4634 type, TYPE, return an expression to test if EXP is in (or out of, depending
4635 on IN_P) the range. Return 0 if the test couldn't be created. */
4638 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4640 tree etype = TREE_TYPE (exp);
4643 #ifdef HAVE_canonicalize_funcptr_for_compare
4644 /* Disable this optimization for function pointer expressions
4645 on targets that require function pointer canonicalization. */
4646 if (HAVE_canonicalize_funcptr_for_compare
4647 && TREE_CODE (etype) == POINTER_TYPE
4648 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4654 value = build_range_check (type, exp, 1, low, high);
4656 return invert_truthvalue (value);
4661 if (low == 0 && high == 0)
4662 return build_int_cst (type, 1);
4665 return fold_build2 (LE_EXPR, type, exp,
4666 fold_convert (etype, high));
4669 return fold_build2 (GE_EXPR, type, exp,
4670 fold_convert (etype, low));
4672 if (operand_equal_p (low, high, 0))
4673 return fold_build2 (EQ_EXPR, type, exp,
4674 fold_convert (etype, low));
4676 if (integer_zerop (low))
4678 if (! TYPE_UNSIGNED (etype))
4680 etype = unsigned_type_for (etype);
4681 high = fold_convert (etype, high);
4682 exp = fold_convert (etype, exp);
4684 return build_range_check (type, exp, 1, 0, high);
4687 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4688 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4690 unsigned HOST_WIDE_INT lo;
4694 prec = TYPE_PRECISION (etype);
4695 if (prec <= HOST_BITS_PER_WIDE_INT)
4698 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4702 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4703 lo = (unsigned HOST_WIDE_INT) -1;
4706 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4708 if (TYPE_UNSIGNED (etype))
4710 etype = signed_type_for (etype);
4711 exp = fold_convert (etype, exp);
4713 return fold_build2 (GT_EXPR, type, exp,
4714 build_int_cst (etype, 0));
4718 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4719 This requires wrap-around arithmetics for the type of the expression. */
4720 switch (TREE_CODE (etype))
4723 /* There is no requirement that LOW be within the range of ETYPE
4724 if the latter is a subtype. It must, however, be within the base
4725 type of ETYPE. So be sure we do the subtraction in that type. */
4726 if (TREE_TYPE (etype))
4727 etype = TREE_TYPE (etype);
4732 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4733 TYPE_UNSIGNED (etype));
4740 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4741 if (TREE_CODE (etype) == INTEGER_TYPE
4742 && !TYPE_OVERFLOW_WRAPS (etype))
4744 tree utype, minv, maxv;
4746 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4747 for the type in question, as we rely on this here. */
4748 utype = unsigned_type_for (etype);
4749 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4750 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4751 integer_one_node, 1);
4752 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4754 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4761 high = fold_convert (etype, high);
4762 low = fold_convert (etype, low);
4763 exp = fold_convert (etype, exp);
4765 value = const_binop (MINUS_EXPR, high, low, 0);
4768 if (POINTER_TYPE_P (etype))
4770 if (value != 0 && !TREE_OVERFLOW (value))
4772 low = fold_convert (sizetype, low);
4773 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4774 return build_range_check (type,
4775 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4776 1, build_int_cst (etype, 0), value);
4781 if (value != 0 && !TREE_OVERFLOW (value))
4782 return build_range_check (type,
4783 fold_build2 (MINUS_EXPR, etype, exp, low),
4784 1, build_int_cst (etype, 0), value);
4789 /* Return the predecessor of VAL in its type, handling the infinite case. */
4792 range_predecessor (tree val)
4794 tree type = TREE_TYPE (val);
4796 if (INTEGRAL_TYPE_P (type)
4797 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4800 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4803 /* Return the successor of VAL in its type, handling the infinite case. */
4806 range_successor (tree val)
4808 tree type = TREE_TYPE (val);
4810 if (INTEGRAL_TYPE_P (type)
4811 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4814 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4817 /* Given two ranges, see if we can merge them into one. Return 1 if we
4818 can, 0 if we can't. Set the output range into the specified parameters. */
4821 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4822 tree high0, int in1_p, tree low1, tree high1)
4830 int lowequal = ((low0 == 0 && low1 == 0)
4831 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4832 low0, 0, low1, 0)));
4833 int highequal = ((high0 == 0 && high1 == 0)
4834 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4835 high0, 1, high1, 1)));
4837 /* Make range 0 be the range that starts first, or ends last if they
4838 start at the same value. Swap them if it isn't. */
4839 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4842 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4843 high1, 1, high0, 1))))
4845 temp = in0_p, in0_p = in1_p, in1_p = temp;
4846 tem = low0, low0 = low1, low1 = tem;
4847 tem = high0, high0 = high1, high1 = tem;
4850 /* Now flag two cases, whether the ranges are disjoint or whether the
4851 second range is totally subsumed in the first. Note that the tests
4852 below are simplified by the ones above. */
4853 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4854 high0, 1, low1, 0));
4855 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4856 high1, 1, high0, 1));
4858 /* We now have four cases, depending on whether we are including or
4859 excluding the two ranges. */
4862 /* If they don't overlap, the result is false. If the second range
4863 is a subset it is the result. Otherwise, the range is from the start
4864 of the second to the end of the first. */
4866 in_p = 0, low = high = 0;
4868 in_p = 1, low = low1, high = high1;
4870 in_p = 1, low = low1, high = high0;
4873 else if (in0_p && ! in1_p)
4875 /* If they don't overlap, the result is the first range. If they are
4876 equal, the result is false. If the second range is a subset of the
4877 first, and the ranges begin at the same place, we go from just after
4878 the end of the second range to the end of the first. If the second
4879 range is not a subset of the first, or if it is a subset and both
4880 ranges end at the same place, the range starts at the start of the
4881 first range and ends just before the second range.
4882 Otherwise, we can't describe this as a single range. */
4884 in_p = 1, low = low0, high = high0;
4885 else if (lowequal && highequal)
4886 in_p = 0, low = high = 0;
4887 else if (subset && lowequal)
4889 low = range_successor (high1);
4894 /* We are in the weird situation where high0 > high1 but
4895 high1 has no successor. Punt. */
4899 else if (! subset || highequal)
4902 high = range_predecessor (low1);
4906 /* low0 < low1 but low1 has no predecessor. Punt. */
4914 else if (! in0_p && in1_p)
4916 /* If they don't overlap, the result is the second range. If the second
4917 is a subset of the first, the result is false. Otherwise,
4918 the range starts just after the first range and ends at the
4919 end of the second. */
4921 in_p = 1, low = low1, high = high1;
4922 else if (subset || highequal)
4923 in_p = 0, low = high = 0;
4926 low = range_successor (high0);
4931 /* high1 > high0 but high0 has no successor. Punt. */
4939 /* The case where we are excluding both ranges. Here the complex case
4940 is if they don't overlap. In that case, the only time we have a
4941 range is if they are adjacent. If the second is a subset of the
4942 first, the result is the first. Otherwise, the range to exclude
4943 starts at the beginning of the first range and ends at the end of the
4947 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4948 range_successor (high0),
4950 in_p = 0, low = low0, high = high1;
4953 /* Canonicalize - [min, x] into - [-, x]. */
4954 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4955 switch (TREE_CODE (TREE_TYPE (low0)))
4958 if (TYPE_PRECISION (TREE_TYPE (low0))
4959 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4963 if (tree_int_cst_equal (low0,
4964 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4968 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4969 && integer_zerop (low0))
4976 /* Canonicalize - [x, max] into - [x, -]. */
4977 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4978 switch (TREE_CODE (TREE_TYPE (high1)))
4981 if (TYPE_PRECISION (TREE_TYPE (high1))
4982 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4986 if (tree_int_cst_equal (high1,
4987 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4991 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4992 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4994 integer_one_node, 1)))
5001 /* The ranges might be also adjacent between the maximum and
5002 minimum values of the given type. For
5003 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5004 return + [x + 1, y - 1]. */
5005 if (low0 == 0 && high1 == 0)
5007 low = range_successor (high0);
5008 high = range_predecessor (low1);
5009 if (low == 0 || high == 0)
5019 in_p = 0, low = low0, high = high0;
5021 in_p = 0, low = low0, high = high1;
5024 *pin_p = in_p, *plow = low, *phigh = high;
5029 /* Subroutine of fold, looking inside expressions of the form
5030 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5031 of the COND_EXPR. This function is being used also to optimize
5032 A op B ? C : A, by reversing the comparison first.
5034 Return a folded expression whose code is not a COND_EXPR
5035 anymore, or NULL_TREE if no folding opportunity is found. */
5038 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5040 enum tree_code comp_code = TREE_CODE (arg0);
5041 tree arg00 = TREE_OPERAND (arg0, 0);
5042 tree arg01 = TREE_OPERAND (arg0, 1);
5043 tree arg1_type = TREE_TYPE (arg1);
5049 /* If we have A op 0 ? A : -A, consider applying the following
5052 A == 0? A : -A same as -A
5053 A != 0? A : -A same as A
5054 A >= 0? A : -A same as abs (A)
5055 A > 0? A : -A same as abs (A)
5056 A <= 0? A : -A same as -abs (A)
5057 A < 0? A : -A same as -abs (A)
5059 None of these transformations work for modes with signed
5060 zeros. If A is +/-0, the first two transformations will
5061 change the sign of the result (from +0 to -0, or vice
5062 versa). The last four will fix the sign of the result,
5063 even though the original expressions could be positive or
5064 negative, depending on the sign of A.
5066 Note that all these transformations are correct if A is
5067 NaN, since the two alternatives (A and -A) are also NaNs. */
5068 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
5069 ? real_zerop (arg01)
5070 : integer_zerop (arg01))
5071 && ((TREE_CODE (arg2) == NEGATE_EXPR
5072 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5073 /* In the case that A is of the form X-Y, '-A' (arg2) may
5074 have already been folded to Y-X, check for that. */
5075 || (TREE_CODE (arg1) == MINUS_EXPR
5076 && TREE_CODE (arg2) == MINUS_EXPR
5077 && operand_equal_p (TREE_OPERAND (arg1, 0),
5078 TREE_OPERAND (arg2, 1), 0)
5079 && operand_equal_p (TREE_OPERAND (arg1, 1),
5080 TREE_OPERAND (arg2, 0), 0))))
5085 tem = fold_convert (arg1_type, arg1);
5086 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5089 return pedantic_non_lvalue (fold_convert (type, arg1));
5092 if (flag_trapping_math)
5097 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5098 arg1 = fold_convert (signed_type_for
5099 (TREE_TYPE (arg1)), arg1);
5100 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5101 return pedantic_non_lvalue (fold_convert (type, tem));
5104 if (flag_trapping_math)
5108 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5109 arg1 = fold_convert (signed_type_for
5110 (TREE_TYPE (arg1)), arg1);
5111 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5112 return negate_expr (fold_convert (type, tem));
5114 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5118 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5119 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5120 both transformations are correct when A is NaN: A != 0
5121 is then true, and A == 0 is false. */
5123 if (integer_zerop (arg01) && integer_zerop (arg2))
5125 if (comp_code == NE_EXPR)
5126 return pedantic_non_lvalue (fold_convert (type, arg1));
5127 else if (comp_code == EQ_EXPR)
5128 return build_int_cst (type, 0);
5131 /* Try some transformations of A op B ? A : B.
5133 A == B? A : B same as B
5134 A != B? A : B same as A
5135 A >= B? A : B same as max (A, B)
5136 A > B? A : B same as max (B, A)
5137 A <= B? A : B same as min (A, B)
5138 A < B? A : B same as min (B, A)
5140 As above, these transformations don't work in the presence
5141 of signed zeros. For example, if A and B are zeros of
5142 opposite sign, the first two transformations will change
5143 the sign of the result. In the last four, the original
5144 expressions give different results for (A=+0, B=-0) and
5145 (A=-0, B=+0), but the transformed expressions do not.
5147 The first two transformations are correct if either A or B
5148 is a NaN. In the first transformation, the condition will
5149 be false, and B will indeed be chosen. In the case of the
5150 second transformation, the condition A != B will be true,
5151 and A will be chosen.
5153 The conversions to max() and min() are not correct if B is
5154 a number and A is not. The conditions in the original
5155 expressions will be false, so all four give B. The min()
5156 and max() versions would give a NaN instead. */
5157 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
5158 /* Avoid these transformations if the COND_EXPR may be used
5159 as an lvalue in the C++ front-end. PR c++/19199. */
5161 || (strcmp (lang_hooks.name, "GNU C++") != 0
5162 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5163 || ! maybe_lvalue_p (arg1)
5164 || ! maybe_lvalue_p (arg2)))
5166 tree comp_op0 = arg00;
5167 tree comp_op1 = arg01;
5168 tree comp_type = TREE_TYPE (comp_op0);
5170 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5171 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5181 return pedantic_non_lvalue (fold_convert (type, arg2));
5183 return pedantic_non_lvalue (fold_convert (type, arg1));
5188 /* In C++ a ?: expression can be an lvalue, so put the
5189 operand which will be used if they are equal first
5190 so that we can convert this back to the
5191 corresponding COND_EXPR. */
5192 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5194 comp_op0 = fold_convert (comp_type, comp_op0);
5195 comp_op1 = fold_convert (comp_type, comp_op1);
5196 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5197 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5198 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5199 return pedantic_non_lvalue (fold_convert (type, tem));
5206 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5208 comp_op0 = fold_convert (comp_type, comp_op0);
5209 comp_op1 = fold_convert (comp_type, comp_op1);
5210 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5211 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5212 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5213 return pedantic_non_lvalue (fold_convert (type, tem));
5217 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5218 return pedantic_non_lvalue (fold_convert (type, arg2));
5221 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5222 return pedantic_non_lvalue (fold_convert (type, arg1));
5225 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5230 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5231 we might still be able to simplify this. For example,
5232 if C1 is one less or one more than C2, this might have started
5233 out as a MIN or MAX and been transformed by this function.
5234 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5236 if (INTEGRAL_TYPE_P (type)
5237 && TREE_CODE (arg01) == INTEGER_CST
5238 && TREE_CODE (arg2) == INTEGER_CST)
5242 /* We can replace A with C1 in this case. */
5243 arg1 = fold_convert (type, arg01);
5244 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5247 /* If C1 is C2 + 1, this is min(A, C2). */
5248 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5250 && operand_equal_p (arg01,
5251 const_binop (PLUS_EXPR, arg2,
5252 build_int_cst (type, 1), 0),
5254 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5256 fold_convert (type, arg1),
5261 /* If C1 is C2 - 1, this is min(A, C2). */
5262 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5264 && operand_equal_p (arg01,
5265 const_binop (MINUS_EXPR, arg2,
5266 build_int_cst (type, 1), 0),
5268 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5270 fold_convert (type, arg1),
5275 /* If C1 is C2 - 1, this is max(A, C2). */
5276 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5278 && operand_equal_p (arg01,
5279 const_binop (MINUS_EXPR, arg2,
5280 build_int_cst (type, 1), 0),
5282 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5284 fold_convert (type, arg1),
5289 /* If C1 is C2 + 1, this is max(A, C2). */
5290 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5292 && operand_equal_p (arg01,
5293 const_binop (PLUS_EXPR, arg2,
5294 build_int_cst (type, 1), 0),
5296 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5298 fold_convert (type, arg1),
5312 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5313 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5316 /* EXP is some logical combination of boolean tests. See if we can
5317 merge it into some range test. Return the new tree if so. */
5320 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5322 int or_op = (code == TRUTH_ORIF_EXPR
5323 || code == TRUTH_OR_EXPR);
5324 int in0_p, in1_p, in_p;
5325 tree low0, low1, low, high0, high1, high;
5326 bool strict_overflow_p = false;
5327 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5328 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5330 const char * const warnmsg = G_("assuming signed overflow does not occur "
5331 "when simplifying range test");
5333 /* If this is an OR operation, invert both sides; we will invert
5334 again at the end. */
5336 in0_p = ! in0_p, in1_p = ! in1_p;
5338 /* If both expressions are the same, if we can merge the ranges, and we
5339 can build the range test, return it or it inverted. If one of the
5340 ranges is always true or always false, consider it to be the same
5341 expression as the other. */
5342 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5343 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5345 && 0 != (tem = (build_range_check (type,
5347 : rhs != 0 ? rhs : integer_zero_node,
5350 if (strict_overflow_p)
5351 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5352 return or_op ? invert_truthvalue (tem) : tem;
5355 /* On machines where the branch cost is expensive, if this is a
5356 short-circuited branch and the underlying object on both sides
5357 is the same, make a non-short-circuit operation. */
5358 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5359 && lhs != 0 && rhs != 0
5360 && (code == TRUTH_ANDIF_EXPR
5361 || code == TRUTH_ORIF_EXPR)
5362 && operand_equal_p (lhs, rhs, 0))
5364 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5365 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5366 which cases we can't do this. */
5367 if (simple_operand_p (lhs))
5368 return build2 (code == TRUTH_ANDIF_EXPR
5369 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5372 else if (lang_hooks.decls.global_bindings_p () == 0
5373 && ! CONTAINS_PLACEHOLDER_P (lhs))
5375 tree common = save_expr (lhs);
5377 if (0 != (lhs = build_range_check (type, common,
5378 or_op ? ! in0_p : in0_p,
5380 && (0 != (rhs = build_range_check (type, common,
5381 or_op ? ! in1_p : in1_p,
5384 if (strict_overflow_p)
5385 fold_overflow_warning (warnmsg,
5386 WARN_STRICT_OVERFLOW_COMPARISON);
5387 return build2 (code == TRUTH_ANDIF_EXPR
5388 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5397 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5398 bit value. Arrange things so the extra bits will be set to zero if and
5399 only if C is signed-extended to its full width. If MASK is nonzero,
5400 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5403 unextend (tree c, int p, int unsignedp, tree mask)
5405 tree type = TREE_TYPE (c);
5406 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5409 if (p == modesize || unsignedp)
5412 /* We work by getting just the sign bit into the low-order bit, then
5413 into the high-order bit, then sign-extend. We then XOR that value
5415 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5416 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5418 /* We must use a signed type in order to get an arithmetic right shift.
5419 However, we must also avoid introducing accidental overflows, so that
5420 a subsequent call to integer_zerop will work. Hence we must
5421 do the type conversion here. At this point, the constant is either
5422 zero or one, and the conversion to a signed type can never overflow.
5423 We could get an overflow if this conversion is done anywhere else. */
5424 if (TYPE_UNSIGNED (type))
5425 temp = fold_convert (signed_type_for (type), temp);
5427 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5428 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5430 temp = const_binop (BIT_AND_EXPR, temp,
5431 fold_convert (TREE_TYPE (c), mask), 0);
5432 /* If necessary, convert the type back to match the type of C. */
5433 if (TYPE_UNSIGNED (type))
5434 temp = fold_convert (type, temp);
5436 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5439 /* Find ways of folding logical expressions of LHS and RHS:
5440 Try to merge two comparisons to the same innermost item.
5441 Look for range tests like "ch >= '0' && ch <= '9'".
5442 Look for combinations of simple terms on machines with expensive branches
5443 and evaluate the RHS unconditionally.
5445 For example, if we have p->a == 2 && p->b == 4 and we can make an
5446 object large enough to span both A and B, we can do this with a comparison
5447 against the object ANDed with the a mask.
5449 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5450 operations to do this with one comparison.
5452 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5453 function and the one above.
5455 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5456 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5458 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5461 We return the simplified tree or 0 if no optimization is possible. */
5464 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5466 /* If this is the "or" of two comparisons, we can do something if
5467 the comparisons are NE_EXPR. If this is the "and", we can do something
5468 if the comparisons are EQ_EXPR. I.e.,
5469 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5471 WANTED_CODE is this operation code. For single bit fields, we can
5472 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5473 comparison for one-bit fields. */
5475 enum tree_code wanted_code;
5476 enum tree_code lcode, rcode;
5477 tree ll_arg, lr_arg, rl_arg, rr_arg;
5478 tree ll_inner, lr_inner, rl_inner, rr_inner;
5479 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5480 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5481 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5482 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5483 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5484 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5485 enum machine_mode lnmode, rnmode;
5486 tree ll_mask, lr_mask, rl_mask, rr_mask;
5487 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5488 tree l_const, r_const;
5489 tree lntype, rntype, result;
5490 int first_bit, end_bit;
5492 tree orig_lhs = lhs, orig_rhs = rhs;
5493 enum tree_code orig_code = code;
5495 /* Start by getting the comparison codes. Fail if anything is volatile.
5496 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5497 it were surrounded with a NE_EXPR. */
5499 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5502 lcode = TREE_CODE (lhs);
5503 rcode = TREE_CODE (rhs);
5505 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5507 lhs = build2 (NE_EXPR, truth_type, lhs,
5508 build_int_cst (TREE_TYPE (lhs), 0));
5512 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5514 rhs = build2 (NE_EXPR, truth_type, rhs,
5515 build_int_cst (TREE_TYPE (rhs), 0));
5519 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5520 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5523 ll_arg = TREE_OPERAND (lhs, 0);
5524 lr_arg = TREE_OPERAND (lhs, 1);
5525 rl_arg = TREE_OPERAND (rhs, 0);
5526 rr_arg = TREE_OPERAND (rhs, 1);
5528 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5529 if (simple_operand_p (ll_arg)
5530 && simple_operand_p (lr_arg))
5533 if (operand_equal_p (ll_arg, rl_arg, 0)
5534 && operand_equal_p (lr_arg, rr_arg, 0))
5536 result = combine_comparisons (code, lcode, rcode,
5537 truth_type, ll_arg, lr_arg);
5541 else if (operand_equal_p (ll_arg, rr_arg, 0)
5542 && operand_equal_p (lr_arg, rl_arg, 0))
5544 result = combine_comparisons (code, lcode,
5545 swap_tree_comparison (rcode),
5546 truth_type, ll_arg, lr_arg);
5552 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5553 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5555 /* If the RHS can be evaluated unconditionally and its operands are
5556 simple, it wins to evaluate the RHS unconditionally on machines
5557 with expensive branches. In this case, this isn't a comparison
5558 that can be merged. Avoid doing this if the RHS is a floating-point
5559 comparison since those can trap. */
5561 if (BRANCH_COST >= 2
5562 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5563 && simple_operand_p (rl_arg)
5564 && simple_operand_p (rr_arg))
5566 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5567 if (code == TRUTH_OR_EXPR
5568 && lcode == NE_EXPR && integer_zerop (lr_arg)
5569 && rcode == NE_EXPR && integer_zerop (rr_arg)
5570 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5571 return build2 (NE_EXPR, truth_type,
5572 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5574 build_int_cst (TREE_TYPE (ll_arg), 0));
5576 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5577 if (code == TRUTH_AND_EXPR
5578 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5579 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5580 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5581 return build2 (EQ_EXPR, truth_type,
5582 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5584 build_int_cst (TREE_TYPE (ll_arg), 0));
5586 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5588 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5589 return build2 (code, truth_type, lhs, rhs);
5594 /* See if the comparisons can be merged. Then get all the parameters for
5597 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5598 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5602 ll_inner = decode_field_reference (ll_arg,
5603 &ll_bitsize, &ll_bitpos, &ll_mode,
5604 &ll_unsignedp, &volatilep, &ll_mask,
5606 lr_inner = decode_field_reference (lr_arg,
5607 &lr_bitsize, &lr_bitpos, &lr_mode,
5608 &lr_unsignedp, &volatilep, &lr_mask,
5610 rl_inner = decode_field_reference (rl_arg,
5611 &rl_bitsize, &rl_bitpos, &rl_mode,
5612 &rl_unsignedp, &volatilep, &rl_mask,
5614 rr_inner = decode_field_reference (rr_arg,
5615 &rr_bitsize, &rr_bitpos, &rr_mode,
5616 &rr_unsignedp, &volatilep, &rr_mask,
5619 /* It must be true that the inner operation on the lhs of each
5620 comparison must be the same if we are to be able to do anything.
5621 Then see if we have constants. If not, the same must be true for
5623 if (volatilep || ll_inner == 0 || rl_inner == 0
5624 || ! operand_equal_p (ll_inner, rl_inner, 0))
5627 if (TREE_CODE (lr_arg) == INTEGER_CST
5628 && TREE_CODE (rr_arg) == INTEGER_CST)
5629 l_const = lr_arg, r_const = rr_arg;
5630 else if (lr_inner == 0 || rr_inner == 0
5631 || ! operand_equal_p (lr_inner, rr_inner, 0))
5634 l_const = r_const = 0;
5636 /* If either comparison code is not correct for our logical operation,
5637 fail. However, we can convert a one-bit comparison against zero into
5638 the opposite comparison against that bit being set in the field. */
5640 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5641 if (lcode != wanted_code)
5643 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5645 /* Make the left operand unsigned, since we are only interested
5646 in the value of one bit. Otherwise we are doing the wrong
5655 /* This is analogous to the code for l_const above. */
5656 if (rcode != wanted_code)
5658 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5667 /* See if we can find a mode that contains both fields being compared on
5668 the left. If we can't, fail. Otherwise, update all constants and masks
5669 to be relative to a field of that size. */
5670 first_bit = MIN (ll_bitpos, rl_bitpos);
5671 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5672 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5673 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5675 if (lnmode == VOIDmode)
5678 lnbitsize = GET_MODE_BITSIZE (lnmode);
5679 lnbitpos = first_bit & ~ (lnbitsize - 1);
5680 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5681 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5683 if (BYTES_BIG_ENDIAN)
5685 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5686 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5689 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5690 size_int (xll_bitpos), 0);
5691 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5692 size_int (xrl_bitpos), 0);
5696 l_const = fold_convert (lntype, l_const);
5697 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5698 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5699 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5700 fold_build1 (BIT_NOT_EXPR,
5704 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5706 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5711 r_const = fold_convert (lntype, r_const);
5712 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5713 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5714 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5715 fold_build1 (BIT_NOT_EXPR,
5719 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5721 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5725 /* If the right sides are not constant, do the same for it. Also,
5726 disallow this optimization if a size or signedness mismatch occurs
5727 between the left and right sides. */
5730 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5731 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5732 /* Make sure the two fields on the right
5733 correspond to the left without being swapped. */
5734 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5737 first_bit = MIN (lr_bitpos, rr_bitpos);
5738 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5739 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5740 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5742 if (rnmode == VOIDmode)
5745 rnbitsize = GET_MODE_BITSIZE (rnmode);
5746 rnbitpos = first_bit & ~ (rnbitsize - 1);
5747 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5748 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5750 if (BYTES_BIG_ENDIAN)
5752 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5753 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5756 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5757 size_int (xlr_bitpos), 0);
5758 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5759 size_int (xrr_bitpos), 0);
5761 /* Make a mask that corresponds to both fields being compared.
5762 Do this for both items being compared. If the operands are the
5763 same size and the bits being compared are in the same position
5764 then we can do this by masking both and comparing the masked
5766 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5767 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5768 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5770 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5771 ll_unsignedp || rl_unsignedp);
5772 if (! all_ones_mask_p (ll_mask, lnbitsize))
5773 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5775 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5776 lr_unsignedp || rr_unsignedp);
5777 if (! all_ones_mask_p (lr_mask, rnbitsize))
5778 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5780 return build2 (wanted_code, truth_type, lhs, rhs);
5783 /* There is still another way we can do something: If both pairs of
5784 fields being compared are adjacent, we may be able to make a wider
5785 field containing them both.
5787 Note that we still must mask the lhs/rhs expressions. Furthermore,
5788 the mask must be shifted to account for the shift done by
5789 make_bit_field_ref. */
5790 if ((ll_bitsize + ll_bitpos == rl_bitpos
5791 && lr_bitsize + lr_bitpos == rr_bitpos)
5792 || (ll_bitpos == rl_bitpos + rl_bitsize
5793 && lr_bitpos == rr_bitpos + rr_bitsize))
5797 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5798 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5799 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5800 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5802 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5803 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5804 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5805 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5807 /* Convert to the smaller type before masking out unwanted bits. */
5809 if (lntype != rntype)
5811 if (lnbitsize > rnbitsize)
5813 lhs = fold_convert (rntype, lhs);
5814 ll_mask = fold_convert (rntype, ll_mask);
5817 else if (lnbitsize < rnbitsize)
5819 rhs = fold_convert (lntype, rhs);
5820 lr_mask = fold_convert (lntype, lr_mask);
5825 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5826 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5828 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5829 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5831 return build2 (wanted_code, truth_type, lhs, rhs);
5837 /* Handle the case of comparisons with constants. If there is something in
5838 common between the masks, those bits of the constants must be the same.
5839 If not, the condition is always false. Test for this to avoid generating
5840 incorrect code below. */
5841 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5842 if (! integer_zerop (result)
5843 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5844 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5846 if (wanted_code == NE_EXPR)
5848 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5849 return constant_boolean_node (true, truth_type);
5853 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5854 return constant_boolean_node (false, truth_type);
5858 /* Construct the expression we will return. First get the component
5859 reference we will make. Unless the mask is all ones the width of
5860 that field, perform the mask operation. Then compare with the
5862 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5863 ll_unsignedp || rl_unsignedp);
5865 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5866 if (! all_ones_mask_p (ll_mask, lnbitsize))
5867 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5869 return build2 (wanted_code, truth_type, result,
5870 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5873 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5877 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5880 enum tree_code op_code;
5881 tree comp_const = op1;
5883 int consts_equal, consts_lt;
5886 STRIP_SIGN_NOPS (arg0);
5888 op_code = TREE_CODE (arg0);
5889 minmax_const = TREE_OPERAND (arg0, 1);
5890 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5891 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5892 inner = TREE_OPERAND (arg0, 0);
5894 /* If something does not permit us to optimize, return the original tree. */
5895 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5896 || TREE_CODE (comp_const) != INTEGER_CST
5897 || TREE_OVERFLOW (comp_const)
5898 || TREE_CODE (minmax_const) != INTEGER_CST
5899 || TREE_OVERFLOW (minmax_const))
5902 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5903 and GT_EXPR, doing the rest with recursive calls using logical
5907 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5909 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5912 return invert_truthvalue (tem);
5918 fold_build2 (TRUTH_ORIF_EXPR, type,
5919 optimize_minmax_comparison
5920 (EQ_EXPR, type, arg0, comp_const),
5921 optimize_minmax_comparison
5922 (GT_EXPR, type, arg0, comp_const));
5925 if (op_code == MAX_EXPR && consts_equal)
5926 /* MAX (X, 0) == 0 -> X <= 0 */
5927 return fold_build2 (LE_EXPR, type, inner, comp_const);
5929 else if (op_code == MAX_EXPR && consts_lt)
5930 /* MAX (X, 0) == 5 -> X == 5 */
5931 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5933 else if (op_code == MAX_EXPR)
5934 /* MAX (X, 0) == -1 -> false */
5935 return omit_one_operand (type, integer_zero_node, inner);
5937 else if (consts_equal)
5938 /* MIN (X, 0) == 0 -> X >= 0 */
5939 return fold_build2 (GE_EXPR, type, inner, comp_const);
5942 /* MIN (X, 0) == 5 -> false */
5943 return omit_one_operand (type, integer_zero_node, inner);
5946 /* MIN (X, 0) == -1 -> X == -1 */
5947 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5950 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5951 /* MAX (X, 0) > 0 -> X > 0
5952 MAX (X, 0) > 5 -> X > 5 */
5953 return fold_build2 (GT_EXPR, type, inner, comp_const);
5955 else if (op_code == MAX_EXPR)
5956 /* MAX (X, 0) > -1 -> true */
5957 return omit_one_operand (type, integer_one_node, inner);
5959 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5960 /* MIN (X, 0) > 0 -> false
5961 MIN (X, 0) > 5 -> false */
5962 return omit_one_operand (type, integer_zero_node, inner);
5965 /* MIN (X, 0) > -1 -> X > -1 */
5966 return fold_build2 (GT_EXPR, type, inner, comp_const);
5973 /* T is an integer expression that is being multiplied, divided, or taken a
5974 modulus (CODE says which and what kind of divide or modulus) by a
5975 constant C. See if we can eliminate that operation by folding it with
5976 other operations already in T. WIDE_TYPE, if non-null, is a type that
5977 should be used for the computation if wider than our type.
5979 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5980 (X * 2) + (Y * 4). We must, however, be assured that either the original
5981 expression would not overflow or that overflow is undefined for the type
5982 in the language in question.
5984 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5985 the machine has a multiply-accumulate insn or that this is part of an
5986 addressing calculation.
5988 If we return a non-null expression, it is an equivalent form of the
5989 original computation, but need not be in the original type.
5991 We set *STRICT_OVERFLOW_P to true if the return values depends on
5992 signed overflow being undefined. Otherwise we do not change
5993 *STRICT_OVERFLOW_P. */
5996 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5997 bool *strict_overflow_p)
5999 /* To avoid exponential search depth, refuse to allow recursion past
6000 three levels. Beyond that (1) it's highly unlikely that we'll find
6001 something interesting and (2) we've probably processed it before
6002 when we built the inner expression. */
6011 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6018 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6019 bool *strict_overflow_p)
6021 tree type = TREE_TYPE (t);
6022 enum tree_code tcode = TREE_CODE (t);
6023 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6024 > GET_MODE_SIZE (TYPE_MODE (type)))
6025 ? wide_type : type);
6027 int same_p = tcode == code;
6028 tree op0 = NULL_TREE, op1 = NULL_TREE;
6029 bool sub_strict_overflow_p;
6031 /* Don't deal with constants of zero here; they confuse the code below. */
6032 if (integer_zerop (c))
6035 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6036 op0 = TREE_OPERAND (t, 0);
6038 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6039 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6041 /* Note that we need not handle conditional operations here since fold
6042 already handles those cases. So just do arithmetic here. */
6046 /* For a constant, we can always simplify if we are a multiply
6047 or (for divide and modulus) if it is a multiple of our constant. */
6048 if (code == MULT_EXPR
6049 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6050 return const_binop (code, fold_convert (ctype, t),
6051 fold_convert (ctype, c), 0);
6054 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
6055 /* If op0 is an expression ... */
6056 if ((COMPARISON_CLASS_P (op0)
6057 || UNARY_CLASS_P (op0)
6058 || BINARY_CLASS_P (op0)
6059 || VL_EXP_CLASS_P (op0)
6060 || EXPRESSION_CLASS_P (op0))
6061 /* ... and is unsigned, and its type is smaller than ctype,
6062 then we cannot pass through as widening. */
6063 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
6064 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6065 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6066 && (GET_MODE_SIZE (TYPE_MODE (ctype))
6067 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
6068 /* ... or this is a truncation (t is narrower than op0),
6069 then we cannot pass through this narrowing. */
6070 || (GET_MODE_SIZE (TYPE_MODE (type))
6071 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
6072 /* ... or signedness changes for division or modulus,
6073 then we cannot pass through this conversion. */
6074 || (code != MULT_EXPR
6075 && (TYPE_UNSIGNED (ctype)
6076 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6077 /* ... or has undefined overflow while the converted to
6078 type has not, we cannot do the operation in the inner type
6079 as that would introduce undefined overflow. */
6080 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6081 && !TYPE_OVERFLOW_UNDEFINED (type))))
6084 /* Pass the constant down and see if we can make a simplification. If
6085 we can, replace this expression with the inner simplification for
6086 possible later conversion to our or some other type. */
6087 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6088 && TREE_CODE (t2) == INTEGER_CST
6089 && !TREE_OVERFLOW (t2)
6090 && (0 != (t1 = extract_muldiv (op0, t2, code,
6092 ? ctype : NULL_TREE,
6093 strict_overflow_p))))
6098 /* If widening the type changes it from signed to unsigned, then we
6099 must avoid building ABS_EXPR itself as unsigned. */
6100 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6102 tree cstype = (*signed_type_for) (ctype);
6103 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6106 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6107 return fold_convert (ctype, t1);
6111 /* If the constant is negative, we cannot simplify this. */
6112 if (tree_int_cst_sgn (c) == -1)
6116 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6118 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6121 case MIN_EXPR: case MAX_EXPR:
6122 /* If widening the type changes the signedness, then we can't perform
6123 this optimization as that changes the result. */
6124 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6127 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6128 sub_strict_overflow_p = false;
6129 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6130 &sub_strict_overflow_p)) != 0
6131 && (t2 = extract_muldiv (op1, c, code, wide_type,
6132 &sub_strict_overflow_p)) != 0)
6134 if (tree_int_cst_sgn (c) < 0)
6135 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6136 if (sub_strict_overflow_p)
6137 *strict_overflow_p = true;
6138 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6139 fold_convert (ctype, t2));
6143 case LSHIFT_EXPR: case RSHIFT_EXPR:
6144 /* If the second operand is constant, this is a multiplication
6145 or floor division, by a power of two, so we can treat it that
6146 way unless the multiplier or divisor overflows. Signed
6147 left-shift overflow is implementation-defined rather than
6148 undefined in C90, so do not convert signed left shift into
6150 if (TREE_CODE (op1) == INTEGER_CST
6151 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6152 /* const_binop may not detect overflow correctly,
6153 so check for it explicitly here. */
6154 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6155 && TREE_INT_CST_HIGH (op1) == 0
6156 && 0 != (t1 = fold_convert (ctype,
6157 const_binop (LSHIFT_EXPR,
6160 && !TREE_OVERFLOW (t1))
6161 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6162 ? MULT_EXPR : FLOOR_DIV_EXPR,
6163 ctype, fold_convert (ctype, op0), t1),
6164 c, code, wide_type, strict_overflow_p);
6167 case PLUS_EXPR: case MINUS_EXPR:
6168 /* See if we can eliminate the operation on both sides. If we can, we
6169 can return a new PLUS or MINUS. If we can't, the only remaining
6170 cases where we can do anything are if the second operand is a
6172 sub_strict_overflow_p = false;
6173 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6174 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6175 if (t1 != 0 && t2 != 0
6176 && (code == MULT_EXPR
6177 /* If not multiplication, we can only do this if both operands
6178 are divisible by c. */
6179 || (multiple_of_p (ctype, op0, c)
6180 && multiple_of_p (ctype, op1, c))))
6182 if (sub_strict_overflow_p)
6183 *strict_overflow_p = true;
6184 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6185 fold_convert (ctype, t2));
6188 /* If this was a subtraction, negate OP1 and set it to be an addition.
6189 This simplifies the logic below. */
6190 if (tcode == MINUS_EXPR)
6191 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6193 if (TREE_CODE (op1) != INTEGER_CST)
6196 /* If either OP1 or C are negative, this optimization is not safe for
6197 some of the division and remainder types while for others we need
6198 to change the code. */
6199 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6201 if (code == CEIL_DIV_EXPR)
6202 code = FLOOR_DIV_EXPR;
6203 else if (code == FLOOR_DIV_EXPR)
6204 code = CEIL_DIV_EXPR;
6205 else if (code != MULT_EXPR
6206 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6210 /* If it's a multiply or a division/modulus operation of a multiple
6211 of our constant, do the operation and verify it doesn't overflow. */
6212 if (code == MULT_EXPR
6213 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6215 op1 = const_binop (code, fold_convert (ctype, op1),
6216 fold_convert (ctype, c), 0);
6217 /* We allow the constant to overflow with wrapping semantics. */
6219 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6225 /* If we have an unsigned type is not a sizetype, we cannot widen
6226 the operation since it will change the result if the original
6227 computation overflowed. */
6228 if (TYPE_UNSIGNED (ctype)
6229 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6233 /* If we were able to eliminate our operation from the first side,
6234 apply our operation to the second side and reform the PLUS. */
6235 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6236 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6238 /* The last case is if we are a multiply. In that case, we can
6239 apply the distributive law to commute the multiply and addition
6240 if the multiplication of the constants doesn't overflow. */
6241 if (code == MULT_EXPR)
6242 return fold_build2 (tcode, ctype,
6243 fold_build2 (code, ctype,
6244 fold_convert (ctype, op0),
6245 fold_convert (ctype, c)),
6251 /* We have a special case here if we are doing something like
6252 (C * 8) % 4 since we know that's zero. */
6253 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6254 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6255 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6256 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6257 return omit_one_operand (type, integer_zero_node, op0);
6259 /* ... fall through ... */
6261 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6262 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6263 /* If we can extract our operation from the LHS, do so and return a
6264 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6265 do something only if the second operand is a constant. */
6267 && (t1 = extract_muldiv (op0, c, code, wide_type,
6268 strict_overflow_p)) != 0)
6269 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6270 fold_convert (ctype, op1));
6271 else if (tcode == MULT_EXPR && code == MULT_EXPR
6272 && (t1 = extract_muldiv (op1, c, code, wide_type,
6273 strict_overflow_p)) != 0)
6274 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6275 fold_convert (ctype, t1));
6276 else if (TREE_CODE (op1) != INTEGER_CST)
6279 /* If these are the same operation types, we can associate them
6280 assuming no overflow. */
6282 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
6283 fold_convert (ctype, c), 0))
6284 && !TREE_OVERFLOW (t1))
6285 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6287 /* If these operations "cancel" each other, we have the main
6288 optimizations of this pass, which occur when either constant is a
6289 multiple of the other, in which case we replace this with either an
6290 operation or CODE or TCODE.
6292 If we have an unsigned type that is not a sizetype, we cannot do
6293 this since it will change the result if the original computation
6295 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6296 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6297 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6298 || (tcode == MULT_EXPR
6299 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6300 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6301 && code != MULT_EXPR)))
6303 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6305 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6306 *strict_overflow_p = true;
6307 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6308 fold_convert (ctype,
6309 const_binop (TRUNC_DIV_EXPR,
6312 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6314 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6315 *strict_overflow_p = true;
6316 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6317 fold_convert (ctype,
6318 const_binop (TRUNC_DIV_EXPR,
6331 /* Return a node which has the indicated constant VALUE (either 0 or
6332 1), and is of the indicated TYPE. */
6335 constant_boolean_node (int value, tree type)
6337 if (type == integer_type_node)
6338 return value ? integer_one_node : integer_zero_node;
6339 else if (type == boolean_type_node)
6340 return value ? boolean_true_node : boolean_false_node;
6342 return build_int_cst (type, value);
6346 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6347 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6348 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6349 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6350 COND is the first argument to CODE; otherwise (as in the example
6351 given here), it is the second argument. TYPE is the type of the
6352 original expression. Return NULL_TREE if no simplification is
6356 fold_binary_op_with_conditional_arg (enum tree_code code,
6357 tree type, tree op0, tree op1,
6358 tree cond, tree arg, int cond_first_p)
6360 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6361 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6362 tree test, true_value, false_value;
6363 tree lhs = NULL_TREE;
6364 tree rhs = NULL_TREE;
6366 /* This transformation is only worthwhile if we don't have to wrap
6367 arg in a SAVE_EXPR, and the operation can be simplified on at least
6368 one of the branches once its pushed inside the COND_EXPR. */
6369 if (!TREE_CONSTANT (arg))
6372 if (TREE_CODE (cond) == COND_EXPR)
6374 test = TREE_OPERAND (cond, 0);
6375 true_value = TREE_OPERAND (cond, 1);
6376 false_value = TREE_OPERAND (cond, 2);
6377 /* If this operand throws an expression, then it does not make
6378 sense to try to perform a logical or arithmetic operation
6380 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6382 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6387 tree testtype = TREE_TYPE (cond);
6389 true_value = constant_boolean_node (true, testtype);
6390 false_value = constant_boolean_node (false, testtype);
6393 arg = fold_convert (arg_type, arg);
6396 true_value = fold_convert (cond_type, true_value);
6398 lhs = fold_build2 (code, type, true_value, arg);
6400 lhs = fold_build2 (code, type, arg, true_value);
6404 false_value = fold_convert (cond_type, false_value);
6406 rhs = fold_build2 (code, type, false_value, arg);
6408 rhs = fold_build2 (code, type, arg, false_value);
6411 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6412 return fold_convert (type, test);
6416 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6418 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6419 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6420 ADDEND is the same as X.
6422 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6423 and finite. The problematic cases are when X is zero, and its mode
6424 has signed zeros. In the case of rounding towards -infinity,
6425 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6426 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6429 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6431 if (!real_zerop (addend))
6434 /* Don't allow the fold with -fsignaling-nans. */
6435 if (HONOR_SNANS (TYPE_MODE (type)))
6438 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6439 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6442 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6443 if (TREE_CODE (addend) == REAL_CST
6444 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6447 /* The mode has signed zeros, and we have to honor their sign.
6448 In this situation, there is only one case we can return true for.
6449 X - 0 is the same as X unless rounding towards -infinity is
6451 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6454 /* Subroutine of fold() that checks comparisons of built-in math
6455 functions against real constants.
6457 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6458 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6459 is the type of the result and ARG0 and ARG1 are the operands of the
6460 comparison. ARG1 must be a TREE_REAL_CST.
6462 The function returns the constant folded tree if a simplification
6463 can be made, and NULL_TREE otherwise. */
6466 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6467 tree type, tree arg0, tree arg1)
6471 if (BUILTIN_SQRT_P (fcode))
6473 tree arg = CALL_EXPR_ARG (arg0, 0);
6474 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6476 c = TREE_REAL_CST (arg1);
6477 if (REAL_VALUE_NEGATIVE (c))
6479 /* sqrt(x) < y is always false, if y is negative. */
6480 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6481 return omit_one_operand (type, integer_zero_node, arg);
6483 /* sqrt(x) > y is always true, if y is negative and we
6484 don't care about NaNs, i.e. negative values of x. */
6485 if (code == NE_EXPR || !HONOR_NANS (mode))
6486 return omit_one_operand (type, integer_one_node, arg);
6488 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6489 return fold_build2 (GE_EXPR, type, arg,
6490 build_real (TREE_TYPE (arg), dconst0));
6492 else if (code == GT_EXPR || code == GE_EXPR)
6496 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6497 real_convert (&c2, mode, &c2);
6499 if (REAL_VALUE_ISINF (c2))
6501 /* sqrt(x) > y is x == +Inf, when y is very large. */
6502 if (HONOR_INFINITIES (mode))
6503 return fold_build2 (EQ_EXPR, type, arg,
6504 build_real (TREE_TYPE (arg), c2));
6506 /* sqrt(x) > y is always false, when y is very large
6507 and we don't care about infinities. */
6508 return omit_one_operand (type, integer_zero_node, arg);
6511 /* sqrt(x) > c is the same as x > c*c. */
6512 return fold_build2 (code, type, arg,
6513 build_real (TREE_TYPE (arg), c2));
6515 else if (code == LT_EXPR || code == LE_EXPR)
6519 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6520 real_convert (&c2, mode, &c2);
6522 if (REAL_VALUE_ISINF (c2))
6524 /* sqrt(x) < y is always true, when y is a very large
6525 value and we don't care about NaNs or Infinities. */
6526 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6527 return omit_one_operand (type, integer_one_node, arg);
6529 /* sqrt(x) < y is x != +Inf when y is very large and we
6530 don't care about NaNs. */
6531 if (! HONOR_NANS (mode))
6532 return fold_build2 (NE_EXPR, type, arg,
6533 build_real (TREE_TYPE (arg), c2));
6535 /* sqrt(x) < y is x >= 0 when y is very large and we
6536 don't care about Infinities. */
6537 if (! HONOR_INFINITIES (mode))
6538 return fold_build2 (GE_EXPR, type, arg,
6539 build_real (TREE_TYPE (arg), dconst0));
6541 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6542 if (lang_hooks.decls.global_bindings_p () != 0
6543 || CONTAINS_PLACEHOLDER_P (arg))
6546 arg = save_expr (arg);
6547 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6548 fold_build2 (GE_EXPR, type, arg,
6549 build_real (TREE_TYPE (arg),
6551 fold_build2 (NE_EXPR, type, arg,
6552 build_real (TREE_TYPE (arg),
6556 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6557 if (! HONOR_NANS (mode))
6558 return fold_build2 (code, type, arg,
6559 build_real (TREE_TYPE (arg), c2));
6561 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6562 if (lang_hooks.decls.global_bindings_p () == 0
6563 && ! CONTAINS_PLACEHOLDER_P (arg))
6565 arg = save_expr (arg);
6566 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6567 fold_build2 (GE_EXPR, type, arg,
6568 build_real (TREE_TYPE (arg),
6570 fold_build2 (code, type, arg,
6571 build_real (TREE_TYPE (arg),
6580 /* Subroutine of fold() that optimizes comparisons against Infinities,
6581 either +Inf or -Inf.
6583 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6584 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6585 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6587 The function returns the constant folded tree if a simplification
6588 can be made, and NULL_TREE otherwise. */
6591 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6593 enum machine_mode mode;
6594 REAL_VALUE_TYPE max;
6598 mode = TYPE_MODE (TREE_TYPE (arg0));
6600 /* For negative infinity swap the sense of the comparison. */
6601 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6603 code = swap_tree_comparison (code);
6608 /* x > +Inf is always false, if with ignore sNANs. */
6609 if (HONOR_SNANS (mode))
6611 return omit_one_operand (type, integer_zero_node, arg0);
6614 /* x <= +Inf is always true, if we don't case about NaNs. */
6615 if (! HONOR_NANS (mode))
6616 return omit_one_operand (type, integer_one_node, arg0);
6618 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6619 if (lang_hooks.decls.global_bindings_p () == 0
6620 && ! CONTAINS_PLACEHOLDER_P (arg0))
6622 arg0 = save_expr (arg0);
6623 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6629 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6630 real_maxval (&max, neg, mode);
6631 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6632 arg0, build_real (TREE_TYPE (arg0), max));
6635 /* x < +Inf is always equal to x <= DBL_MAX. */
6636 real_maxval (&max, neg, mode);
6637 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6638 arg0, build_real (TREE_TYPE (arg0), max));
6641 /* x != +Inf is always equal to !(x > DBL_MAX). */
6642 real_maxval (&max, neg, mode);
6643 if (! HONOR_NANS (mode))
6644 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6645 arg0, build_real (TREE_TYPE (arg0), max));
6647 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6648 arg0, build_real (TREE_TYPE (arg0), max));
6649 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6658 /* Subroutine of fold() that optimizes comparisons of a division by
6659 a nonzero integer constant against an integer constant, i.e.
6662 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6663 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6664 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6666 The function returns the constant folded tree if a simplification
6667 can be made, and NULL_TREE otherwise. */
6670 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6672 tree prod, tmp, hi, lo;
6673 tree arg00 = TREE_OPERAND (arg0, 0);
6674 tree arg01 = TREE_OPERAND (arg0, 1);
6675 unsigned HOST_WIDE_INT lpart;
6676 HOST_WIDE_INT hpart;
6677 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6681 /* We have to do this the hard way to detect unsigned overflow.
6682 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6683 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6684 TREE_INT_CST_HIGH (arg01),
6685 TREE_INT_CST_LOW (arg1),
6686 TREE_INT_CST_HIGH (arg1),
6687 &lpart, &hpart, unsigned_p);
6688 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6690 neg_overflow = false;
6694 tmp = int_const_binop (MINUS_EXPR, arg01,
6695 build_int_cst (TREE_TYPE (arg01), 1), 0);
6698 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6699 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6700 TREE_INT_CST_HIGH (prod),
6701 TREE_INT_CST_LOW (tmp),
6702 TREE_INT_CST_HIGH (tmp),
6703 &lpart, &hpart, unsigned_p);
6704 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6705 -1, overflow | TREE_OVERFLOW (prod));
6707 else if (tree_int_cst_sgn (arg01) >= 0)
6709 tmp = int_const_binop (MINUS_EXPR, arg01,
6710 build_int_cst (TREE_TYPE (arg01), 1), 0);
6711 switch (tree_int_cst_sgn (arg1))
6714 neg_overflow = true;
6715 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6720 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6725 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6735 /* A negative divisor reverses the relational operators. */
6736 code = swap_tree_comparison (code);
6738 tmp = int_const_binop (PLUS_EXPR, arg01,
6739 build_int_cst (TREE_TYPE (arg01), 1), 0);
6740 switch (tree_int_cst_sgn (arg1))
6743 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6748 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6753 neg_overflow = true;
6754 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6766 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6767 return omit_one_operand (type, integer_zero_node, arg00);
6768 if (TREE_OVERFLOW (hi))
6769 return fold_build2 (GE_EXPR, type, arg00, lo);
6770 if (TREE_OVERFLOW (lo))
6771 return fold_build2 (LE_EXPR, type, arg00, hi);
6772 return build_range_check (type, arg00, 1, lo, hi);
6775 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6776 return omit_one_operand (type, integer_one_node, arg00);
6777 if (TREE_OVERFLOW (hi))
6778 return fold_build2 (LT_EXPR, type, arg00, lo);
6779 if (TREE_OVERFLOW (lo))
6780 return fold_build2 (GT_EXPR, type, arg00, hi);
6781 return build_range_check (type, arg00, 0, lo, hi);
6784 if (TREE_OVERFLOW (lo))
6786 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6787 return omit_one_operand (type, tmp, arg00);
6789 return fold_build2 (LT_EXPR, type, arg00, lo);
6792 if (TREE_OVERFLOW (hi))
6794 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6795 return omit_one_operand (type, tmp, arg00);
6797 return fold_build2 (LE_EXPR, type, arg00, hi);
6800 if (TREE_OVERFLOW (hi))
6802 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6803 return omit_one_operand (type, tmp, arg00);
6805 return fold_build2 (GT_EXPR, type, arg00, hi);
6808 if (TREE_OVERFLOW (lo))
6810 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6811 return omit_one_operand (type, tmp, arg00);
6813 return fold_build2 (GE_EXPR, type, arg00, lo);
6823 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6824 equality/inequality test, then return a simplified form of the test
6825 using a sign testing. Otherwise return NULL. TYPE is the desired
6829 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6832 /* If this is testing a single bit, we can optimize the test. */
6833 if ((code == NE_EXPR || code == EQ_EXPR)
6834 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6835 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6837 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6838 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6839 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6841 if (arg00 != NULL_TREE
6842 /* This is only a win if casting to a signed type is cheap,
6843 i.e. when arg00's type is not a partial mode. */
6844 && TYPE_PRECISION (TREE_TYPE (arg00))
6845 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6847 tree stype = signed_type_for (TREE_TYPE (arg00));
6848 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6849 result_type, fold_convert (stype, arg00),
6850 build_int_cst (stype, 0));
6857 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6858 equality/inequality test, then return a simplified form of
6859 the test using shifts and logical operations. Otherwise return
6860 NULL. TYPE is the desired result type. */
6863 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6866 /* If this is testing a single bit, we can optimize the test. */
6867 if ((code == NE_EXPR || code == EQ_EXPR)
6868 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6869 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6871 tree inner = TREE_OPERAND (arg0, 0);
6872 tree type = TREE_TYPE (arg0);
6873 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6874 enum machine_mode operand_mode = TYPE_MODE (type);
6876 tree signed_type, unsigned_type, intermediate_type;
6879 /* First, see if we can fold the single bit test into a sign-bit
6881 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6886 /* Otherwise we have (A & C) != 0 where C is a single bit,
6887 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6888 Similarly for (A & C) == 0. */
6890 /* If INNER is a right shift of a constant and it plus BITNUM does
6891 not overflow, adjust BITNUM and INNER. */
6892 if (TREE_CODE (inner) == RSHIFT_EXPR
6893 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6894 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6895 && bitnum < TYPE_PRECISION (type)
6896 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6897 bitnum - TYPE_PRECISION (type)))
6899 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6900 inner = TREE_OPERAND (inner, 0);
6903 /* If we are going to be able to omit the AND below, we must do our
6904 operations as unsigned. If we must use the AND, we have a choice.
6905 Normally unsigned is faster, but for some machines signed is. */
6906 #ifdef LOAD_EXTEND_OP
6907 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6908 && !flag_syntax_only) ? 0 : 1;
6913 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6914 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6915 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6916 inner = fold_convert (intermediate_type, inner);
6919 inner = build2 (RSHIFT_EXPR, intermediate_type,
6920 inner, size_int (bitnum));
6922 one = build_int_cst (intermediate_type, 1);
6924 if (code == EQ_EXPR)
6925 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6927 /* Put the AND last so it can combine with more things. */
6928 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6930 /* Make sure to return the proper type. */
6931 inner = fold_convert (result_type, inner);
6938 /* Check whether we are allowed to reorder operands arg0 and arg1,
6939 such that the evaluation of arg1 occurs before arg0. */
6942 reorder_operands_p (const_tree arg0, const_tree arg1)
6944 if (! flag_evaluation_order)
6946 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6948 return ! TREE_SIDE_EFFECTS (arg0)
6949 && ! TREE_SIDE_EFFECTS (arg1);
6952 /* Test whether it is preferable two swap two operands, ARG0 and
6953 ARG1, for example because ARG0 is an integer constant and ARG1
6954 isn't. If REORDER is true, only recommend swapping if we can
6955 evaluate the operands in reverse order. */
6958 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6960 STRIP_SIGN_NOPS (arg0);
6961 STRIP_SIGN_NOPS (arg1);
6963 if (TREE_CODE (arg1) == INTEGER_CST)
6965 if (TREE_CODE (arg0) == INTEGER_CST)
6968 if (TREE_CODE (arg1) == REAL_CST)
6970 if (TREE_CODE (arg0) == REAL_CST)
6973 if (TREE_CODE (arg1) == FIXED_CST)
6975 if (TREE_CODE (arg0) == FIXED_CST)
6978 if (TREE_CODE (arg1) == COMPLEX_CST)
6980 if (TREE_CODE (arg0) == COMPLEX_CST)
6983 if (TREE_CONSTANT (arg1))
6985 if (TREE_CONSTANT (arg0))
6991 if (reorder && flag_evaluation_order
6992 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6995 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6996 for commutative and comparison operators. Ensuring a canonical
6997 form allows the optimizers to find additional redundancies without
6998 having to explicitly check for both orderings. */
6999 if (TREE_CODE (arg0) == SSA_NAME
7000 && TREE_CODE (arg1) == SSA_NAME
7001 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7004 /* Put SSA_NAMEs last. */
7005 if (TREE_CODE (arg1) == SSA_NAME)
7007 if (TREE_CODE (arg0) == SSA_NAME)
7010 /* Put variables last. */
7019 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7020 ARG0 is extended to a wider type. */
7023 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7025 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7027 tree shorter_type, outer_type;
7031 if (arg0_unw == arg0)
7033 shorter_type = TREE_TYPE (arg0_unw);
7035 #ifdef HAVE_canonicalize_funcptr_for_compare
7036 /* Disable this optimization if we're casting a function pointer
7037 type on targets that require function pointer canonicalization. */
7038 if (HAVE_canonicalize_funcptr_for_compare
7039 && TREE_CODE (shorter_type) == POINTER_TYPE
7040 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7044 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7047 arg1_unw = get_unwidened (arg1, NULL_TREE);
7049 /* If possible, express the comparison in the shorter mode. */
7050 if ((code == EQ_EXPR || code == NE_EXPR
7051 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7052 && (TREE_TYPE (arg1_unw) == shorter_type
7053 || (TYPE_PRECISION (shorter_type)
7054 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7055 || (TREE_CODE (arg1_unw) == INTEGER_CST
7056 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7057 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7058 && int_fits_type_p (arg1_unw, shorter_type))))
7059 return fold_build2 (code, type, arg0_unw,
7060 fold_convert (shorter_type, arg1_unw));
7062 if (TREE_CODE (arg1_unw) != INTEGER_CST
7063 || TREE_CODE (shorter_type) != INTEGER_TYPE
7064 || !int_fits_type_p (arg1_unw, shorter_type))
7067 /* If we are comparing with the integer that does not fit into the range
7068 of the shorter type, the result is known. */
7069 outer_type = TREE_TYPE (arg1_unw);
7070 min = lower_bound_in_type (outer_type, shorter_type);
7071 max = upper_bound_in_type (outer_type, shorter_type);
7073 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7075 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7082 return omit_one_operand (type, integer_zero_node, arg0);
7087 return omit_one_operand (type, integer_one_node, arg0);
7093 return omit_one_operand (type, integer_one_node, arg0);
7095 return omit_one_operand (type, integer_zero_node, arg0);
7100 return omit_one_operand (type, integer_zero_node, arg0);
7102 return omit_one_operand (type, integer_one_node, arg0);
7111 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7112 ARG0 just the signedness is changed. */
7115 fold_sign_changed_comparison (enum tree_code code, tree type,
7116 tree arg0, tree arg1)
7119 tree inner_type, outer_type;
7121 if (TREE_CODE (arg0) != NOP_EXPR
7122 && TREE_CODE (arg0) != CONVERT_EXPR)
7125 outer_type = TREE_TYPE (arg0);
7126 arg0_inner = TREE_OPERAND (arg0, 0);
7127 inner_type = TREE_TYPE (arg0_inner);
7129 #ifdef HAVE_canonicalize_funcptr_for_compare
7130 /* Disable this optimization if we're casting a function pointer
7131 type on targets that require function pointer canonicalization. */
7132 if (HAVE_canonicalize_funcptr_for_compare
7133 && TREE_CODE (inner_type) == POINTER_TYPE
7134 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7138 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7141 if (TREE_CODE (arg1) != INTEGER_CST
7142 && !((TREE_CODE (arg1) == NOP_EXPR
7143 || TREE_CODE (arg1) == CONVERT_EXPR)
7144 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7147 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7152 if (TREE_CODE (arg1) == INTEGER_CST)
7153 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7154 TREE_INT_CST_HIGH (arg1), 0,
7155 TREE_OVERFLOW (arg1));
7157 arg1 = fold_convert (inner_type, arg1);
7159 return fold_build2 (code, type, arg0_inner, arg1);
7162 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7163 step of the array. Reconstructs s and delta in the case of s * delta
7164 being an integer constant (and thus already folded).
7165 ADDR is the address. MULT is the multiplicative expression.
7166 If the function succeeds, the new address expression is returned. Otherwise
7167 NULL_TREE is returned. */
7170 try_move_mult_to_index (tree addr, tree op1)
7172 tree s, delta, step;
7173 tree ref = TREE_OPERAND (addr, 0), pref;
7178 /* Strip the nops that might be added when converting op1 to sizetype. */
7181 /* Canonicalize op1 into a possibly non-constant delta
7182 and an INTEGER_CST s. */
7183 if (TREE_CODE (op1) == MULT_EXPR)
7185 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7190 if (TREE_CODE (arg0) == INTEGER_CST)
7195 else if (TREE_CODE (arg1) == INTEGER_CST)
7203 else if (TREE_CODE (op1) == INTEGER_CST)
7210 /* Simulate we are delta * 1. */
7212 s = integer_one_node;
7215 for (;; ref = TREE_OPERAND (ref, 0))
7217 if (TREE_CODE (ref) == ARRAY_REF)
7219 /* Remember if this was a multi-dimensional array. */
7220 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7223 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7227 step = array_ref_element_size (ref);
7228 if (TREE_CODE (step) != INTEGER_CST)
7233 if (! tree_int_cst_equal (step, s))
7238 /* Try if delta is a multiple of step. */
7239 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7245 /* Only fold here if we can verify we do not overflow one
7246 dimension of a multi-dimensional array. */
7251 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7252 || !INTEGRAL_TYPE_P (itype)
7253 || !TYPE_MAX_VALUE (itype)
7254 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7257 tmp = fold_binary (PLUS_EXPR, itype,
7258 fold_convert (itype,
7259 TREE_OPERAND (ref, 1)),
7260 fold_convert (itype, delta));
7262 || TREE_CODE (tmp) != INTEGER_CST
7263 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7272 if (!handled_component_p (ref))
7276 /* We found the suitable array reference. So copy everything up to it,
7277 and replace the index. */
7279 pref = TREE_OPERAND (addr, 0);
7280 ret = copy_node (pref);
7285 pref = TREE_OPERAND (pref, 0);
7286 TREE_OPERAND (pos, 0) = copy_node (pref);
7287 pos = TREE_OPERAND (pos, 0);
7290 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7291 fold_convert (itype,
7292 TREE_OPERAND (pos, 1)),
7293 fold_convert (itype, delta));
7295 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7299 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7300 means A >= Y && A != MAX, but in this case we know that
7301 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7304 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7306 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7308 if (TREE_CODE (bound) == LT_EXPR)
7309 a = TREE_OPERAND (bound, 0);
7310 else if (TREE_CODE (bound) == GT_EXPR)
7311 a = TREE_OPERAND (bound, 1);
7315 typea = TREE_TYPE (a);
7316 if (!INTEGRAL_TYPE_P (typea)
7317 && !POINTER_TYPE_P (typea))
7320 if (TREE_CODE (ineq) == LT_EXPR)
7322 a1 = TREE_OPERAND (ineq, 1);
7323 y = TREE_OPERAND (ineq, 0);
7325 else if (TREE_CODE (ineq) == GT_EXPR)
7327 a1 = TREE_OPERAND (ineq, 0);
7328 y = TREE_OPERAND (ineq, 1);
7333 if (TREE_TYPE (a1) != typea)
7336 if (POINTER_TYPE_P (typea))
7338 /* Convert the pointer types into integer before taking the difference. */
7339 tree ta = fold_convert (ssizetype, a);
7340 tree ta1 = fold_convert (ssizetype, a1);
7341 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7344 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7346 if (!diff || !integer_onep (diff))
7349 return fold_build2 (GE_EXPR, type, a, y);
7352 /* Fold a sum or difference of at least one multiplication.
7353 Returns the folded tree or NULL if no simplification could be made. */
7356 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7358 tree arg00, arg01, arg10, arg11;
7359 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7361 /* (A * C) +- (B * C) -> (A+-B) * C.
7362 (A * C) +- A -> A * (C+-1).
7363 We are most concerned about the case where C is a constant,
7364 but other combinations show up during loop reduction. Since
7365 it is not difficult, try all four possibilities. */
7367 if (TREE_CODE (arg0) == MULT_EXPR)
7369 arg00 = TREE_OPERAND (arg0, 0);
7370 arg01 = TREE_OPERAND (arg0, 1);
7372 else if (TREE_CODE (arg0) == INTEGER_CST)
7374 arg00 = build_one_cst (type);
7379 /* We cannot generate constant 1 for fract. */
7380 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7383 arg01 = build_one_cst (type);
7385 if (TREE_CODE (arg1) == MULT_EXPR)
7387 arg10 = TREE_OPERAND (arg1, 0);
7388 arg11 = TREE_OPERAND (arg1, 1);
7390 else if (TREE_CODE (arg1) == INTEGER_CST)
7392 arg10 = build_one_cst (type);
7397 /* We cannot generate constant 1 for fract. */
7398 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7401 arg11 = build_one_cst (type);
7405 if (operand_equal_p (arg01, arg11, 0))
7406 same = arg01, alt0 = arg00, alt1 = arg10;
7407 else if (operand_equal_p (arg00, arg10, 0))
7408 same = arg00, alt0 = arg01, alt1 = arg11;
7409 else if (operand_equal_p (arg00, arg11, 0))
7410 same = arg00, alt0 = arg01, alt1 = arg10;
7411 else if (operand_equal_p (arg01, arg10, 0))
7412 same = arg01, alt0 = arg00, alt1 = arg11;
7414 /* No identical multiplicands; see if we can find a common
7415 power-of-two factor in non-power-of-two multiplies. This
7416 can help in multi-dimensional array access. */
7417 else if (host_integerp (arg01, 0)
7418 && host_integerp (arg11, 0))
7420 HOST_WIDE_INT int01, int11, tmp;
7423 int01 = TREE_INT_CST_LOW (arg01);
7424 int11 = TREE_INT_CST_LOW (arg11);
7426 /* Move min of absolute values to int11. */
7427 if ((int01 >= 0 ? int01 : -int01)
7428 < (int11 >= 0 ? int11 : -int11))
7430 tmp = int01, int01 = int11, int11 = tmp;
7431 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7438 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7440 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7441 build_int_cst (TREE_TYPE (arg00),
7446 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7451 return fold_build2 (MULT_EXPR, type,
7452 fold_build2 (code, type,
7453 fold_convert (type, alt0),
7454 fold_convert (type, alt1)),
7455 fold_convert (type, same));
7460 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7461 specified by EXPR into the buffer PTR of length LEN bytes.
7462 Return the number of bytes placed in the buffer, or zero
7466 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7468 tree type = TREE_TYPE (expr);
7469 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7470 int byte, offset, word, words;
7471 unsigned char value;
7473 if (total_bytes > len)
7475 words = total_bytes / UNITS_PER_WORD;
7477 for (byte = 0; byte < total_bytes; byte++)
7479 int bitpos = byte * BITS_PER_UNIT;
7480 if (bitpos < HOST_BITS_PER_WIDE_INT)
7481 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7483 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7484 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7486 if (total_bytes > UNITS_PER_WORD)
7488 word = byte / UNITS_PER_WORD;
7489 if (WORDS_BIG_ENDIAN)
7490 word = (words - 1) - word;
7491 offset = word * UNITS_PER_WORD;
7492 if (BYTES_BIG_ENDIAN)
7493 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7495 offset += byte % UNITS_PER_WORD;
7498 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7499 ptr[offset] = value;
7505 /* Subroutine of native_encode_expr. Encode the REAL_CST
7506 specified by EXPR into the buffer PTR of length LEN bytes.
7507 Return the number of bytes placed in the buffer, or zero
7511 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7513 tree type = TREE_TYPE (expr);
7514 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7515 int byte, offset, word, words, bitpos;
7516 unsigned char value;
7518 /* There are always 32 bits in each long, no matter the size of
7519 the hosts long. We handle floating point representations with
7523 if (total_bytes > len)
7525 words = 32 / UNITS_PER_WORD;
7527 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7529 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7530 bitpos += BITS_PER_UNIT)
7532 byte = (bitpos / BITS_PER_UNIT) & 3;
7533 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7535 if (UNITS_PER_WORD < 4)
7537 word = byte / UNITS_PER_WORD;
7538 if (WORDS_BIG_ENDIAN)
7539 word = (words - 1) - word;
7540 offset = word * UNITS_PER_WORD;
7541 if (BYTES_BIG_ENDIAN)
7542 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7544 offset += byte % UNITS_PER_WORD;
7547 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7548 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7553 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7554 specified by EXPR into the buffer PTR of length LEN bytes.
7555 Return the number of bytes placed in the buffer, or zero
7559 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7564 part = TREE_REALPART (expr);
7565 rsize = native_encode_expr (part, ptr, len);
7568 part = TREE_IMAGPART (expr);
7569 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7572 return rsize + isize;
7576 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7577 specified by EXPR into the buffer PTR of length LEN bytes.
7578 Return the number of bytes placed in the buffer, or zero
7582 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7584 int i, size, offset, count;
7585 tree itype, elem, elements;
7588 elements = TREE_VECTOR_CST_ELTS (expr);
7589 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7590 itype = TREE_TYPE (TREE_TYPE (expr));
7591 size = GET_MODE_SIZE (TYPE_MODE (itype));
7592 for (i = 0; i < count; i++)
7596 elem = TREE_VALUE (elements);
7597 elements = TREE_CHAIN (elements);
7604 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7609 if (offset + size > len)
7611 memset (ptr+offset, 0, size);
7619 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7620 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7621 buffer PTR of length LEN bytes. Return the number of bytes
7622 placed in the buffer, or zero upon failure. */
7625 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7627 switch (TREE_CODE (expr))
7630 return native_encode_int (expr, ptr, len);
7633 return native_encode_real (expr, ptr, len);
7636 return native_encode_complex (expr, ptr, len);
7639 return native_encode_vector (expr, ptr, len);
7647 /* Subroutine of native_interpret_expr. Interpret the contents of
7648 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7649 If the buffer cannot be interpreted, return NULL_TREE. */
7652 native_interpret_int (tree type, const unsigned char *ptr, int len)
7654 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7655 int byte, offset, word, words;
7656 unsigned char value;
7657 unsigned int HOST_WIDE_INT lo = 0;
7658 HOST_WIDE_INT hi = 0;
7660 if (total_bytes > len)
7662 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7664 words = total_bytes / UNITS_PER_WORD;
7666 for (byte = 0; byte < total_bytes; byte++)
7668 int bitpos = byte * BITS_PER_UNIT;
7669 if (total_bytes > UNITS_PER_WORD)
7671 word = byte / UNITS_PER_WORD;
7672 if (WORDS_BIG_ENDIAN)
7673 word = (words - 1) - word;
7674 offset = word * UNITS_PER_WORD;
7675 if (BYTES_BIG_ENDIAN)
7676 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7678 offset += byte % UNITS_PER_WORD;
7681 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7682 value = ptr[offset];
7684 if (bitpos < HOST_BITS_PER_WIDE_INT)
7685 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7687 hi |= (unsigned HOST_WIDE_INT) value
7688 << (bitpos - HOST_BITS_PER_WIDE_INT);
7691 return build_int_cst_wide_type (type, lo, hi);
7695 /* Subroutine of native_interpret_expr. Interpret the contents of
7696 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7697 If the buffer cannot be interpreted, return NULL_TREE. */
7700 native_interpret_real (tree type, const unsigned char *ptr, int len)
7702 enum machine_mode mode = TYPE_MODE (type);
7703 int total_bytes = GET_MODE_SIZE (mode);
7704 int byte, offset, word, words, bitpos;
7705 unsigned char value;
7706 /* There are always 32 bits in each long, no matter the size of
7707 the hosts long. We handle floating point representations with
7712 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7713 if (total_bytes > len || total_bytes > 24)
7715 words = 32 / UNITS_PER_WORD;
7717 memset (tmp, 0, sizeof (tmp));
7718 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7719 bitpos += BITS_PER_UNIT)
7721 byte = (bitpos / BITS_PER_UNIT) & 3;
7722 if (UNITS_PER_WORD < 4)
7724 word = byte / UNITS_PER_WORD;
7725 if (WORDS_BIG_ENDIAN)
7726 word = (words - 1) - word;
7727 offset = word * UNITS_PER_WORD;
7728 if (BYTES_BIG_ENDIAN)
7729 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7731 offset += byte % UNITS_PER_WORD;
7734 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7735 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7737 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7740 real_from_target (&r, tmp, mode);
7741 return build_real (type, r);
7745 /* Subroutine of native_interpret_expr. Interpret the contents of
7746 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7747 If the buffer cannot be interpreted, return NULL_TREE. */
7750 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7752 tree etype, rpart, ipart;
7755 etype = TREE_TYPE (type);
7756 size = GET_MODE_SIZE (TYPE_MODE (etype));
7759 rpart = native_interpret_expr (etype, ptr, size);
7762 ipart = native_interpret_expr (etype, ptr+size, size);
7765 return build_complex (type, rpart, ipart);
7769 /* Subroutine of native_interpret_expr. Interpret the contents of
7770 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7771 If the buffer cannot be interpreted, return NULL_TREE. */
7774 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7776 tree etype, elem, elements;
7779 etype = TREE_TYPE (type);
7780 size = GET_MODE_SIZE (TYPE_MODE (etype));
7781 count = TYPE_VECTOR_SUBPARTS (type);
7782 if (size * count > len)
7785 elements = NULL_TREE;
7786 for (i = count - 1; i >= 0; i--)
7788 elem = native_interpret_expr (etype, ptr+(i*size), size);
7791 elements = tree_cons (NULL_TREE, elem, elements);
7793 return build_vector (type, elements);
7797 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7798 the buffer PTR of length LEN as a constant of type TYPE. For
7799 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7800 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7801 return NULL_TREE. */
7804 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7806 switch (TREE_CODE (type))
7811 return native_interpret_int (type, ptr, len);
7814 return native_interpret_real (type, ptr, len);
7817 return native_interpret_complex (type, ptr, len);
7820 return native_interpret_vector (type, ptr, len);
7828 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7829 TYPE at compile-time. If we're unable to perform the conversion
7830 return NULL_TREE. */
7833 fold_view_convert_expr (tree type, tree expr)
7835 /* We support up to 512-bit values (for V8DFmode). */
7836 unsigned char buffer[64];
7839 /* Check that the host and target are sane. */
7840 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7843 len = native_encode_expr (expr, buffer, sizeof (buffer));
7847 return native_interpret_expr (type, buffer, len);
7850 /* Build an expression for the address of T. Folds away INDIRECT_REF
7851 to avoid confusing the gimplify process. When IN_FOLD is true
7852 avoid modifications of T. */
7855 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7857 /* The size of the object is not relevant when talking about its address. */
7858 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7859 t = TREE_OPERAND (t, 0);
7861 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7862 if (TREE_CODE (t) == INDIRECT_REF
7863 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7865 t = TREE_OPERAND (t, 0);
7867 if (TREE_TYPE (t) != ptrtype)
7868 t = build1 (NOP_EXPR, ptrtype, t);
7874 while (handled_component_p (base))
7875 base = TREE_OPERAND (base, 0);
7878 TREE_ADDRESSABLE (base) = 1;
7880 t = build1 (ADDR_EXPR, ptrtype, t);
7883 t = build1 (ADDR_EXPR, ptrtype, t);
7888 /* Build an expression for the address of T with type PTRTYPE. This
7889 function modifies the input parameter 'T' by sometimes setting the
7890 TREE_ADDRESSABLE flag. */
7893 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7895 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7898 /* Build an expression for the address of T. This function modifies
7899 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7900 flag. When called from fold functions, use fold_addr_expr instead. */
7903 build_fold_addr_expr (tree t)
7905 return build_fold_addr_expr_with_type_1 (t,
7906 build_pointer_type (TREE_TYPE (t)),
7910 /* Same as build_fold_addr_expr, builds an expression for the address
7911 of T, but avoids touching the input node 't'. Fold functions
7912 should use this version. */
7915 fold_addr_expr (tree t)
7917 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7919 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7922 /* Fold a unary expression of code CODE and type TYPE with operand
7923 OP0. Return the folded expression if folding is successful.
7924 Otherwise, return NULL_TREE. */
7927 fold_unary (enum tree_code code, tree type, tree op0)
7931 enum tree_code_class kind = TREE_CODE_CLASS (code);
7933 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7934 && TREE_CODE_LENGTH (code) == 1);
7939 if (code == NOP_EXPR || code == CONVERT_EXPR
7940 || code == FLOAT_EXPR || code == ABS_EXPR)
7942 /* Don't use STRIP_NOPS, because signedness of argument type
7944 STRIP_SIGN_NOPS (arg0);
7948 /* Strip any conversions that don't change the mode. This
7949 is safe for every expression, except for a comparison
7950 expression because its signedness is derived from its
7953 Note that this is done as an internal manipulation within
7954 the constant folder, in order to find the simplest
7955 representation of the arguments so that their form can be
7956 studied. In any cases, the appropriate type conversions
7957 should be put back in the tree that will get out of the
7963 if (TREE_CODE_CLASS (code) == tcc_unary)
7965 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7966 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7967 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7968 else if (TREE_CODE (arg0) == COND_EXPR)
7970 tree arg01 = TREE_OPERAND (arg0, 1);
7971 tree arg02 = TREE_OPERAND (arg0, 2);
7972 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7973 arg01 = fold_build1 (code, type, arg01);
7974 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7975 arg02 = fold_build1 (code, type, arg02);
7976 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7979 /* If this was a conversion, and all we did was to move into
7980 inside the COND_EXPR, bring it back out. But leave it if
7981 it is a conversion from integer to integer and the
7982 result precision is no wider than a word since such a
7983 conversion is cheap and may be optimized away by combine,
7984 while it couldn't if it were outside the COND_EXPR. Then return
7985 so we don't get into an infinite recursion loop taking the
7986 conversion out and then back in. */
7988 if ((code == NOP_EXPR || code == CONVERT_EXPR
7989 || code == NON_LVALUE_EXPR)
7990 && TREE_CODE (tem) == COND_EXPR
7991 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7992 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7993 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7994 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7995 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7996 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7997 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7999 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8000 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8001 || flag_syntax_only))
8002 tem = build1 (code, type,
8004 TREE_TYPE (TREE_OPERAND
8005 (TREE_OPERAND (tem, 1), 0)),
8006 TREE_OPERAND (tem, 0),
8007 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8008 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8011 else if (COMPARISON_CLASS_P (arg0))
8013 if (TREE_CODE (type) == BOOLEAN_TYPE)
8015 arg0 = copy_node (arg0);
8016 TREE_TYPE (arg0) = type;
8019 else if (TREE_CODE (type) != INTEGER_TYPE)
8020 return fold_build3 (COND_EXPR, type, arg0,
8021 fold_build1 (code, type,
8023 fold_build1 (code, type,
8024 integer_zero_node));
8031 /* Re-association barriers around constants and other re-association
8032 barriers can be removed. */
8033 if (CONSTANT_CLASS_P (op0)
8034 || TREE_CODE (op0) == PAREN_EXPR)
8035 return fold_convert (type, op0);
8041 case FIX_TRUNC_EXPR:
8042 if (TREE_TYPE (op0) == type)
8045 /* If we have (type) (a CMP b) and type is an integral type, return
8046 new expression involving the new type. */
8047 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8048 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8049 TREE_OPERAND (op0, 1));
8051 /* Handle cases of two conversions in a row. */
8052 if (TREE_CODE (op0) == NOP_EXPR
8053 || TREE_CODE (op0) == CONVERT_EXPR)
8055 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8056 tree inter_type = TREE_TYPE (op0);
8057 int inside_int = INTEGRAL_TYPE_P (inside_type);
8058 int inside_ptr = POINTER_TYPE_P (inside_type);
8059 int inside_float = FLOAT_TYPE_P (inside_type);
8060 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8061 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8062 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8063 int inter_int = INTEGRAL_TYPE_P (inter_type);
8064 int inter_ptr = POINTER_TYPE_P (inter_type);
8065 int inter_float = FLOAT_TYPE_P (inter_type);
8066 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8067 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8068 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8069 int final_int = INTEGRAL_TYPE_P (type);
8070 int final_ptr = POINTER_TYPE_P (type);
8071 int final_float = FLOAT_TYPE_P (type);
8072 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8073 unsigned int final_prec = TYPE_PRECISION (type);
8074 int final_unsignedp = TYPE_UNSIGNED (type);
8076 /* In addition to the cases of two conversions in a row
8077 handled below, if we are converting something to its own
8078 type via an object of identical or wider precision, neither
8079 conversion is needed. */
8080 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8081 && (((inter_int || inter_ptr) && final_int)
8082 || (inter_float && final_float))
8083 && inter_prec >= final_prec)
8084 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8086 /* Likewise, if the intermediate and final types are either both
8087 float or both integer, we don't need the middle conversion if
8088 it is wider than the final type and doesn't change the signedness
8089 (for integers). Avoid this if the final type is a pointer
8090 since then we sometimes need the inner conversion. Likewise if
8091 the outer has a precision not equal to the size of its mode. */
8092 if (((inter_int && inside_int)
8093 || (inter_float && inside_float)
8094 || (inter_vec && inside_vec))
8095 && inter_prec >= inside_prec
8096 && (inter_float || inter_vec
8097 || inter_unsignedp == inside_unsignedp)
8098 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8099 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8101 && (! final_vec || inter_prec == inside_prec))
8102 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8104 /* If we have a sign-extension of a zero-extended value, we can
8105 replace that by a single zero-extension. */
8106 if (inside_int && inter_int && final_int
8107 && inside_prec < inter_prec && inter_prec < final_prec
8108 && inside_unsignedp && !inter_unsignedp)
8109 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8111 /* Two conversions in a row are not needed unless:
8112 - some conversion is floating-point (overstrict for now), or
8113 - some conversion is a vector (overstrict for now), or
8114 - the intermediate type is narrower than both initial and
8116 - the intermediate type and innermost type differ in signedness,
8117 and the outermost type is wider than the intermediate, or
8118 - the initial type is a pointer type and the precisions of the
8119 intermediate and final types differ, or
8120 - the final type is a pointer type and the precisions of the
8121 initial and intermediate types differ.
8122 - the initial type is a pointer to an array and the final type
8124 if (! inside_float && ! inter_float && ! final_float
8125 && ! inside_vec && ! inter_vec && ! final_vec
8126 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8127 && ! (inside_int && inter_int
8128 && inter_unsignedp != inside_unsignedp
8129 && inter_prec < final_prec)
8130 && ((inter_unsignedp && inter_prec > inside_prec)
8131 == (final_unsignedp && final_prec > inter_prec))
8132 && ! (inside_ptr && inter_prec != final_prec)
8133 && ! (final_ptr && inside_prec != inter_prec)
8134 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8135 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8136 && ! (inside_ptr && final_ptr
8137 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
8138 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8139 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8142 /* Handle (T *)&A.B.C for A being of type T and B and C
8143 living at offset zero. This occurs frequently in
8144 C++ upcasting and then accessing the base. */
8145 if (TREE_CODE (op0) == ADDR_EXPR
8146 && POINTER_TYPE_P (type)
8147 && handled_component_p (TREE_OPERAND (op0, 0)))
8149 HOST_WIDE_INT bitsize, bitpos;
8151 enum machine_mode mode;
8152 int unsignedp, volatilep;
8153 tree base = TREE_OPERAND (op0, 0);
8154 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8155 &mode, &unsignedp, &volatilep, false);
8156 /* If the reference was to a (constant) zero offset, we can use
8157 the address of the base if it has the same base type
8158 as the result type. */
8159 if (! offset && bitpos == 0
8160 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8161 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8162 return fold_convert (type, fold_addr_expr (base));
8165 if ((TREE_CODE (op0) == MODIFY_EXPR
8166 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
8167 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
8168 /* Detect assigning a bitfield. */
8169 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
8171 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
8173 /* Don't leave an assignment inside a conversion
8174 unless assigning a bitfield. */
8175 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
8176 /* First do the assignment, then return converted constant. */
8177 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8178 TREE_NO_WARNING (tem) = 1;
8179 TREE_USED (tem) = 1;
8183 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8184 constants (if x has signed type, the sign bit cannot be set
8185 in c). This folds extension into the BIT_AND_EXPR. */
8186 if (INTEGRAL_TYPE_P (type)
8187 && TREE_CODE (type) != BOOLEAN_TYPE
8188 && TREE_CODE (op0) == BIT_AND_EXPR
8189 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8192 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8195 if (TYPE_UNSIGNED (TREE_TYPE (and))
8196 || (TYPE_PRECISION (type)
8197 <= TYPE_PRECISION (TREE_TYPE (and))))
8199 else if (TYPE_PRECISION (TREE_TYPE (and1))
8200 <= HOST_BITS_PER_WIDE_INT
8201 && host_integerp (and1, 1))
8203 unsigned HOST_WIDE_INT cst;
8205 cst = tree_low_cst (and1, 1);
8206 cst &= (HOST_WIDE_INT) -1
8207 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8208 change = (cst == 0);
8209 #ifdef LOAD_EXTEND_OP
8211 && !flag_syntax_only
8212 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8215 tree uns = unsigned_type_for (TREE_TYPE (and0));
8216 and0 = fold_convert (uns, and0);
8217 and1 = fold_convert (uns, and1);
8223 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8224 TREE_INT_CST_HIGH (and1), 0,
8225 TREE_OVERFLOW (and1));
8226 return fold_build2 (BIT_AND_EXPR, type,
8227 fold_convert (type, and0), tem);
8231 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8232 when one of the new casts will fold away. Conservatively we assume
8233 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8234 if (POINTER_TYPE_P (type)
8235 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8236 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8237 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8238 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8240 tree arg00 = TREE_OPERAND (arg0, 0);
8241 tree arg01 = TREE_OPERAND (arg0, 1);
8243 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8244 fold_convert (sizetype, arg01));
8247 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8248 of the same precision, and X is an integer type not narrower than
8249 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8250 if (INTEGRAL_TYPE_P (type)
8251 && TREE_CODE (op0) == BIT_NOT_EXPR
8252 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8253 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8254 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8255 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8257 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8258 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8259 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8260 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8263 tem = fold_convert_const (code, type, op0);
8264 return tem ? tem : NULL_TREE;
8266 case FIXED_CONVERT_EXPR:
8267 tem = fold_convert_const (code, type, arg0);
8268 return tem ? tem : NULL_TREE;
8270 case VIEW_CONVERT_EXPR:
8271 if (TREE_TYPE (op0) == type)
8273 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR
8274 || (TREE_CODE (op0) == NOP_EXPR
8275 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8276 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8277 && TYPE_PRECISION (TREE_TYPE (op0))
8278 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8279 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8280 return fold_view_convert_expr (type, op0);
8283 tem = fold_negate_expr (arg0);
8285 return fold_convert (type, tem);
8289 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8290 return fold_abs_const (arg0, type);
8291 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8292 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8293 /* Convert fabs((double)float) into (double)fabsf(float). */
8294 else if (TREE_CODE (arg0) == NOP_EXPR
8295 && TREE_CODE (type) == REAL_TYPE)
8297 tree targ0 = strip_float_extensions (arg0);
8299 return fold_convert (type, fold_build1 (ABS_EXPR,
8303 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8304 else if (TREE_CODE (arg0) == ABS_EXPR)
8306 else if (tree_expr_nonnegative_p (arg0))
8309 /* Strip sign ops from argument. */
8310 if (TREE_CODE (type) == REAL_TYPE)
8312 tem = fold_strip_sign_ops (arg0);
8314 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8319 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8320 return fold_convert (type, arg0);
8321 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8323 tree itype = TREE_TYPE (type);
8324 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8325 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8326 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8328 if (TREE_CODE (arg0) == COMPLEX_CST)
8330 tree itype = TREE_TYPE (type);
8331 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8332 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8333 return build_complex (type, rpart, negate_expr (ipart));
8335 if (TREE_CODE (arg0) == CONJ_EXPR)
8336 return fold_convert (type, TREE_OPERAND (arg0, 0));
8340 if (TREE_CODE (arg0) == INTEGER_CST)
8341 return fold_not_const (arg0, type);
8342 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8343 return fold_convert (type, TREE_OPERAND (arg0, 0));
8344 /* Convert ~ (-A) to A - 1. */
8345 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8346 return fold_build2 (MINUS_EXPR, type,
8347 fold_convert (type, TREE_OPERAND (arg0, 0)),
8348 build_int_cst (type, 1));
8349 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8350 else if (INTEGRAL_TYPE_P (type)
8351 && ((TREE_CODE (arg0) == MINUS_EXPR
8352 && integer_onep (TREE_OPERAND (arg0, 1)))
8353 || (TREE_CODE (arg0) == PLUS_EXPR
8354 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8355 return fold_build1 (NEGATE_EXPR, type,
8356 fold_convert (type, TREE_OPERAND (arg0, 0)));
8357 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8358 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8359 && (tem = fold_unary (BIT_NOT_EXPR, type,
8361 TREE_OPERAND (arg0, 0)))))
8362 return fold_build2 (BIT_XOR_EXPR, type, tem,
8363 fold_convert (type, TREE_OPERAND (arg0, 1)));
8364 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8365 && (tem = fold_unary (BIT_NOT_EXPR, type,
8367 TREE_OPERAND (arg0, 1)))))
8368 return fold_build2 (BIT_XOR_EXPR, type,
8369 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8370 /* Perform BIT_NOT_EXPR on each element individually. */
8371 else if (TREE_CODE (arg0) == VECTOR_CST)
8373 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8374 int count = TYPE_VECTOR_SUBPARTS (type), i;
8376 for (i = 0; i < count; i++)
8380 elem = TREE_VALUE (elements);
8381 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8382 if (elem == NULL_TREE)
8384 elements = TREE_CHAIN (elements);
8387 elem = build_int_cst (TREE_TYPE (type), -1);
8388 list = tree_cons (NULL_TREE, elem, list);
8391 return build_vector (type, nreverse (list));
8396 case TRUTH_NOT_EXPR:
8397 /* The argument to invert_truthvalue must have Boolean type. */
8398 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8399 arg0 = fold_convert (boolean_type_node, arg0);
8401 /* Note that the operand of this must be an int
8402 and its values must be 0 or 1.
8403 ("true" is a fixed value perhaps depending on the language,
8404 but we don't handle values other than 1 correctly yet.) */
8405 tem = fold_truth_not_expr (arg0);
8408 return fold_convert (type, tem);
8411 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8412 return fold_convert (type, arg0);
8413 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8414 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8415 TREE_OPERAND (arg0, 1));
8416 if (TREE_CODE (arg0) == COMPLEX_CST)
8417 return fold_convert (type, TREE_REALPART (arg0));
8418 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8420 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8421 tem = fold_build2 (TREE_CODE (arg0), itype,
8422 fold_build1 (REALPART_EXPR, itype,
8423 TREE_OPERAND (arg0, 0)),
8424 fold_build1 (REALPART_EXPR, itype,
8425 TREE_OPERAND (arg0, 1)));
8426 return fold_convert (type, tem);
8428 if (TREE_CODE (arg0) == CONJ_EXPR)
8430 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8431 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8432 return fold_convert (type, tem);
8434 if (TREE_CODE (arg0) == CALL_EXPR)
8436 tree fn = get_callee_fndecl (arg0);
8437 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8438 switch (DECL_FUNCTION_CODE (fn))
8440 CASE_FLT_FN (BUILT_IN_CEXPI):
8441 fn = mathfn_built_in (type, BUILT_IN_COS);
8443 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8453 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8454 return fold_convert (type, integer_zero_node);
8455 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8456 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8457 TREE_OPERAND (arg0, 0));
8458 if (TREE_CODE (arg0) == COMPLEX_CST)
8459 return fold_convert (type, TREE_IMAGPART (arg0));
8460 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8462 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8463 tem = fold_build2 (TREE_CODE (arg0), itype,
8464 fold_build1 (IMAGPART_EXPR, itype,
8465 TREE_OPERAND (arg0, 0)),
8466 fold_build1 (IMAGPART_EXPR, itype,
8467 TREE_OPERAND (arg0, 1)));
8468 return fold_convert (type, tem);
8470 if (TREE_CODE (arg0) == CONJ_EXPR)
8472 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8473 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8474 return fold_convert (type, negate_expr (tem));
8476 if (TREE_CODE (arg0) == CALL_EXPR)
8478 tree fn = get_callee_fndecl (arg0);
8479 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8480 switch (DECL_FUNCTION_CODE (fn))
8482 CASE_FLT_FN (BUILT_IN_CEXPI):
8483 fn = mathfn_built_in (type, BUILT_IN_SIN);
8485 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8496 } /* switch (code) */
8499 /* Fold a binary expression of code CODE and type TYPE with operands
8500 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8501 Return the folded expression if folding is successful. Otherwise,
8502 return NULL_TREE. */
8505 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8507 enum tree_code compl_code;
8509 if (code == MIN_EXPR)
8510 compl_code = MAX_EXPR;
8511 else if (code == MAX_EXPR)
8512 compl_code = MIN_EXPR;
8516 /* MIN (MAX (a, b), b) == b. */
8517 if (TREE_CODE (op0) == compl_code
8518 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8519 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8521 /* MIN (MAX (b, a), b) == b. */
8522 if (TREE_CODE (op0) == compl_code
8523 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8524 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8525 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8527 /* MIN (a, MAX (a, b)) == a. */
8528 if (TREE_CODE (op1) == compl_code
8529 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8530 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8531 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8533 /* MIN (a, MAX (b, a)) == a. */
8534 if (TREE_CODE (op1) == compl_code
8535 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8536 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8537 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8542 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8543 by changing CODE to reduce the magnitude of constants involved in
8544 ARG0 of the comparison.
8545 Returns a canonicalized comparison tree if a simplification was
8546 possible, otherwise returns NULL_TREE.
8547 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8548 valid if signed overflow is undefined. */
8551 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8552 tree arg0, tree arg1,
8553 bool *strict_overflow_p)
8555 enum tree_code code0 = TREE_CODE (arg0);
8556 tree t, cst0 = NULL_TREE;
8560 /* Match A +- CST code arg1 and CST code arg1. */
8561 if (!(((code0 == MINUS_EXPR
8562 || code0 == PLUS_EXPR)
8563 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8564 || code0 == INTEGER_CST))
8567 /* Identify the constant in arg0 and its sign. */
8568 if (code0 == INTEGER_CST)
8571 cst0 = TREE_OPERAND (arg0, 1);
8572 sgn0 = tree_int_cst_sgn (cst0);
8574 /* Overflowed constants and zero will cause problems. */
8575 if (integer_zerop (cst0)
8576 || TREE_OVERFLOW (cst0))
8579 /* See if we can reduce the magnitude of the constant in
8580 arg0 by changing the comparison code. */
8581 if (code0 == INTEGER_CST)
8583 /* CST <= arg1 -> CST-1 < arg1. */
8584 if (code == LE_EXPR && sgn0 == 1)
8586 /* -CST < arg1 -> -CST-1 <= arg1. */
8587 else if (code == LT_EXPR && sgn0 == -1)
8589 /* CST > arg1 -> CST-1 >= arg1. */
8590 else if (code == GT_EXPR && sgn0 == 1)
8592 /* -CST >= arg1 -> -CST-1 > arg1. */
8593 else if (code == GE_EXPR && sgn0 == -1)
8597 /* arg1 code' CST' might be more canonical. */
8602 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8604 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8606 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8607 else if (code == GT_EXPR
8608 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8610 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8611 else if (code == LE_EXPR
8612 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8614 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8615 else if (code == GE_EXPR
8616 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8620 *strict_overflow_p = true;
8623 /* Now build the constant reduced in magnitude. */
8624 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8625 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8626 if (code0 != INTEGER_CST)
8627 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8629 /* If swapping might yield to a more canonical form, do so. */
8631 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8633 return fold_build2 (code, type, t, arg1);
8636 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8637 overflow further. Try to decrease the magnitude of constants involved
8638 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8639 and put sole constants at the second argument position.
8640 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8643 maybe_canonicalize_comparison (enum tree_code code, tree type,
8644 tree arg0, tree arg1)
8647 bool strict_overflow_p;
8648 const char * const warnmsg = G_("assuming signed overflow does not occur "
8649 "when reducing constant in comparison");
8651 /* In principle pointers also have undefined overflow behavior,
8652 but that causes problems elsewhere. */
8653 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8654 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8657 /* Try canonicalization by simplifying arg0. */
8658 strict_overflow_p = false;
8659 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8660 &strict_overflow_p);
8663 if (strict_overflow_p)
8664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8668 /* Try canonicalization by simplifying arg1 using the swapped
8670 code = swap_tree_comparison (code);
8671 strict_overflow_p = false;
8672 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8673 &strict_overflow_p);
8674 if (t && strict_overflow_p)
8675 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8679 /* Subroutine of fold_binary. This routine performs all of the
8680 transformations that are common to the equality/inequality
8681 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8682 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8683 fold_binary should call fold_binary. Fold a comparison with
8684 tree code CODE and type TYPE with operands OP0 and OP1. Return
8685 the folded comparison or NULL_TREE. */
8688 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8690 tree arg0, arg1, tem;
8695 STRIP_SIGN_NOPS (arg0);
8696 STRIP_SIGN_NOPS (arg1);
8698 tem = fold_relational_const (code, type, arg0, arg1);
8699 if (tem != NULL_TREE)
8702 /* If one arg is a real or integer constant, put it last. */
8703 if (tree_swap_operands_p (arg0, arg1, true))
8704 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8706 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8707 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8708 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8709 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8710 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8711 && (TREE_CODE (arg1) == INTEGER_CST
8712 && !TREE_OVERFLOW (arg1)))
8714 tree const1 = TREE_OPERAND (arg0, 1);
8716 tree variable = TREE_OPERAND (arg0, 0);
8719 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8721 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8722 TREE_TYPE (arg1), const2, const1);
8724 /* If the constant operation overflowed this can be
8725 simplified as a comparison against INT_MAX/INT_MIN. */
8726 if (TREE_CODE (lhs) == INTEGER_CST
8727 && TREE_OVERFLOW (lhs))
8729 int const1_sgn = tree_int_cst_sgn (const1);
8730 enum tree_code code2 = code;
8732 /* Get the sign of the constant on the lhs if the
8733 operation were VARIABLE + CONST1. */
8734 if (TREE_CODE (arg0) == MINUS_EXPR)
8735 const1_sgn = -const1_sgn;
8737 /* The sign of the constant determines if we overflowed
8738 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8739 Canonicalize to the INT_MIN overflow by swapping the comparison
8741 if (const1_sgn == -1)
8742 code2 = swap_tree_comparison (code);
8744 /* We now can look at the canonicalized case
8745 VARIABLE + 1 CODE2 INT_MIN
8746 and decide on the result. */
8747 if (code2 == LT_EXPR
8749 || code2 == EQ_EXPR)
8750 return omit_one_operand (type, boolean_false_node, variable);
8751 else if (code2 == NE_EXPR
8753 || code2 == GT_EXPR)
8754 return omit_one_operand (type, boolean_true_node, variable);
8757 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8758 && (TREE_CODE (lhs) != INTEGER_CST
8759 || !TREE_OVERFLOW (lhs)))
8761 fold_overflow_warning (("assuming signed overflow does not occur "
8762 "when changing X +- C1 cmp C2 to "
8764 WARN_STRICT_OVERFLOW_COMPARISON);
8765 return fold_build2 (code, type, variable, lhs);
8769 /* For comparisons of pointers we can decompose it to a compile time
8770 comparison of the base objects and the offsets into the object.
8771 This requires at least one operand being an ADDR_EXPR or a
8772 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8773 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8774 && (TREE_CODE (arg0) == ADDR_EXPR
8775 || TREE_CODE (arg1) == ADDR_EXPR
8776 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8777 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8779 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8780 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8781 enum machine_mode mode;
8782 int volatilep, unsignedp;
8783 bool indirect_base0 = false;
8785 /* Get base and offset for the access. Strip ADDR_EXPR for
8786 get_inner_reference, but put it back by stripping INDIRECT_REF
8787 off the base object if possible. */
8789 if (TREE_CODE (arg0) == ADDR_EXPR)
8791 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8792 &bitsize, &bitpos0, &offset0, &mode,
8793 &unsignedp, &volatilep, false);
8794 if (TREE_CODE (base0) == INDIRECT_REF)
8795 base0 = TREE_OPERAND (base0, 0);
8797 indirect_base0 = true;
8799 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8801 base0 = TREE_OPERAND (arg0, 0);
8802 offset0 = TREE_OPERAND (arg0, 1);
8806 if (TREE_CODE (arg1) == ADDR_EXPR)
8808 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8809 &bitsize, &bitpos1, &offset1, &mode,
8810 &unsignedp, &volatilep, false);
8811 /* We have to make sure to have an indirect/non-indirect base1
8812 just the same as we did for base0. */
8813 if (TREE_CODE (base1) == INDIRECT_REF
8815 base1 = TREE_OPERAND (base1, 0);
8816 else if (!indirect_base0)
8819 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8821 base1 = TREE_OPERAND (arg1, 0);
8822 offset1 = TREE_OPERAND (arg1, 1);
8824 else if (indirect_base0)
8827 /* If we have equivalent bases we might be able to simplify. */
8829 && operand_equal_p (base0, base1, 0))
8831 /* We can fold this expression to a constant if the non-constant
8832 offset parts are equal. */
8833 if (offset0 == offset1
8834 || (offset0 && offset1
8835 && operand_equal_p (offset0, offset1, 0)))
8840 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8842 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8844 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8846 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8848 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8850 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8854 /* We can simplify the comparison to a comparison of the variable
8855 offset parts if the constant offset parts are equal.
8856 Be careful to use signed size type here because otherwise we
8857 mess with array offsets in the wrong way. This is possible
8858 because pointer arithmetic is restricted to retain within an
8859 object and overflow on pointer differences is undefined as of
8860 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8861 else if (bitpos0 == bitpos1)
8863 tree signed_size_type_node;
8864 signed_size_type_node = signed_type_for (size_type_node);
8866 /* By converting to signed size type we cover middle-end pointer
8867 arithmetic which operates on unsigned pointer types of size
8868 type size and ARRAY_REF offsets which are properly sign or
8869 zero extended from their type in case it is narrower than
8871 if (offset0 == NULL_TREE)
8872 offset0 = build_int_cst (signed_size_type_node, 0);
8874 offset0 = fold_convert (signed_size_type_node, offset0);
8875 if (offset1 == NULL_TREE)
8876 offset1 = build_int_cst (signed_size_type_node, 0);
8878 offset1 = fold_convert (signed_size_type_node, offset1);
8880 return fold_build2 (code, type, offset0, offset1);
8885 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8886 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8887 the resulting offset is smaller in absolute value than the
8889 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8890 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8891 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8892 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8893 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8894 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8895 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8897 tree const1 = TREE_OPERAND (arg0, 1);
8898 tree const2 = TREE_OPERAND (arg1, 1);
8899 tree variable1 = TREE_OPERAND (arg0, 0);
8900 tree variable2 = TREE_OPERAND (arg1, 0);
8902 const char * const warnmsg = G_("assuming signed overflow does not "
8903 "occur when combining constants around "
8906 /* Put the constant on the side where it doesn't overflow and is
8907 of lower absolute value than before. */
8908 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8909 ? MINUS_EXPR : PLUS_EXPR,
8911 if (!TREE_OVERFLOW (cst)
8912 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8914 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8915 return fold_build2 (code, type,
8917 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8921 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8922 ? MINUS_EXPR : PLUS_EXPR,
8924 if (!TREE_OVERFLOW (cst)
8925 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8927 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8928 return fold_build2 (code, type,
8929 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8935 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8936 signed arithmetic case. That form is created by the compiler
8937 often enough for folding it to be of value. One example is in
8938 computing loop trip counts after Operator Strength Reduction. */
8939 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8940 && TREE_CODE (arg0) == MULT_EXPR
8941 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8942 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8943 && integer_zerop (arg1))
8945 tree const1 = TREE_OPERAND (arg0, 1);
8946 tree const2 = arg1; /* zero */
8947 tree variable1 = TREE_OPERAND (arg0, 0);
8948 enum tree_code cmp_code = code;
8950 gcc_assert (!integer_zerop (const1));
8952 fold_overflow_warning (("assuming signed overflow does not occur when "
8953 "eliminating multiplication in comparison "
8955 WARN_STRICT_OVERFLOW_COMPARISON);
8957 /* If const1 is negative we swap the sense of the comparison. */
8958 if (tree_int_cst_sgn (const1) < 0)
8959 cmp_code = swap_tree_comparison (cmp_code);
8961 return fold_build2 (cmp_code, type, variable1, const2);
8964 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8968 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8970 tree targ0 = strip_float_extensions (arg0);
8971 tree targ1 = strip_float_extensions (arg1);
8972 tree newtype = TREE_TYPE (targ0);
8974 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8975 newtype = TREE_TYPE (targ1);
8977 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8978 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8979 return fold_build2 (code, type, fold_convert (newtype, targ0),
8980 fold_convert (newtype, targ1));
8982 /* (-a) CMP (-b) -> b CMP a */
8983 if (TREE_CODE (arg0) == NEGATE_EXPR
8984 && TREE_CODE (arg1) == NEGATE_EXPR)
8985 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8986 TREE_OPERAND (arg0, 0));
8988 if (TREE_CODE (arg1) == REAL_CST)
8990 REAL_VALUE_TYPE cst;
8991 cst = TREE_REAL_CST (arg1);
8993 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8994 if (TREE_CODE (arg0) == NEGATE_EXPR)
8995 return fold_build2 (swap_tree_comparison (code), type,
8996 TREE_OPERAND (arg0, 0),
8997 build_real (TREE_TYPE (arg1),
8998 REAL_VALUE_NEGATE (cst)));
9000 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9001 /* a CMP (-0) -> a CMP 0 */
9002 if (REAL_VALUE_MINUS_ZERO (cst))
9003 return fold_build2 (code, type, arg0,
9004 build_real (TREE_TYPE (arg1), dconst0));
9006 /* x != NaN is always true, other ops are always false. */
9007 if (REAL_VALUE_ISNAN (cst)
9008 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9010 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9011 return omit_one_operand (type, tem, arg0);
9014 /* Fold comparisons against infinity. */
9015 if (REAL_VALUE_ISINF (cst))
9017 tem = fold_inf_compare (code, type, arg0, arg1);
9018 if (tem != NULL_TREE)
9023 /* If this is a comparison of a real constant with a PLUS_EXPR
9024 or a MINUS_EXPR of a real constant, we can convert it into a
9025 comparison with a revised real constant as long as no overflow
9026 occurs when unsafe_math_optimizations are enabled. */
9027 if (flag_unsafe_math_optimizations
9028 && TREE_CODE (arg1) == REAL_CST
9029 && (TREE_CODE (arg0) == PLUS_EXPR
9030 || TREE_CODE (arg0) == MINUS_EXPR)
9031 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9032 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9033 ? MINUS_EXPR : PLUS_EXPR,
9034 arg1, TREE_OPERAND (arg0, 1), 0))
9035 && !TREE_OVERFLOW (tem))
9036 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9038 /* Likewise, we can simplify a comparison of a real constant with
9039 a MINUS_EXPR whose first operand is also a real constant, i.e.
9040 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9041 floating-point types only if -fassociative-math is set. */
9042 if (flag_associative_math
9043 && TREE_CODE (arg1) == REAL_CST
9044 && TREE_CODE (arg0) == MINUS_EXPR
9045 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9046 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9048 && !TREE_OVERFLOW (tem))
9049 return fold_build2 (swap_tree_comparison (code), type,
9050 TREE_OPERAND (arg0, 1), tem);
9052 /* Fold comparisons against built-in math functions. */
9053 if (TREE_CODE (arg1) == REAL_CST
9054 && flag_unsafe_math_optimizations
9055 && ! flag_errno_math)
9057 enum built_in_function fcode = builtin_mathfn_code (arg0);
9059 if (fcode != END_BUILTINS)
9061 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9062 if (tem != NULL_TREE)
9068 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9069 && (TREE_CODE (arg0) == NOP_EXPR
9070 || TREE_CODE (arg0) == CONVERT_EXPR))
9072 /* If we are widening one operand of an integer comparison,
9073 see if the other operand is similarly being widened. Perhaps we
9074 can do the comparison in the narrower type. */
9075 tem = fold_widened_comparison (code, type, arg0, arg1);
9079 /* Or if we are changing signedness. */
9080 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9085 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9086 constant, we can simplify it. */
9087 if (TREE_CODE (arg1) == INTEGER_CST
9088 && (TREE_CODE (arg0) == MIN_EXPR
9089 || TREE_CODE (arg0) == MAX_EXPR)
9090 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9092 tem = optimize_minmax_comparison (code, type, op0, op1);
9097 /* Simplify comparison of something with itself. (For IEEE
9098 floating-point, we can only do some of these simplifications.) */
9099 if (operand_equal_p (arg0, arg1, 0))
9104 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9105 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9106 return constant_boolean_node (1, type);
9111 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9112 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9113 return constant_boolean_node (1, type);
9114 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9117 /* For NE, we can only do this simplification if integer
9118 or we don't honor IEEE floating point NaNs. */
9119 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9120 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9122 /* ... fall through ... */
9125 return constant_boolean_node (0, type);
9131 /* If we are comparing an expression that just has comparisons
9132 of two integer values, arithmetic expressions of those comparisons,
9133 and constants, we can simplify it. There are only three cases
9134 to check: the two values can either be equal, the first can be
9135 greater, or the second can be greater. Fold the expression for
9136 those three values. Since each value must be 0 or 1, we have
9137 eight possibilities, each of which corresponds to the constant 0
9138 or 1 or one of the six possible comparisons.
9140 This handles common cases like (a > b) == 0 but also handles
9141 expressions like ((x > y) - (y > x)) > 0, which supposedly
9142 occur in macroized code. */
9144 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9146 tree cval1 = 0, cval2 = 0;
9149 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9150 /* Don't handle degenerate cases here; they should already
9151 have been handled anyway. */
9152 && cval1 != 0 && cval2 != 0
9153 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9154 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9155 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9156 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9157 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9158 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9159 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9161 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9162 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9164 /* We can't just pass T to eval_subst in case cval1 or cval2
9165 was the same as ARG1. */
9168 = fold_build2 (code, type,
9169 eval_subst (arg0, cval1, maxval,
9173 = fold_build2 (code, type,
9174 eval_subst (arg0, cval1, maxval,
9178 = fold_build2 (code, type,
9179 eval_subst (arg0, cval1, minval,
9183 /* All three of these results should be 0 or 1. Confirm they are.
9184 Then use those values to select the proper code to use. */
9186 if (TREE_CODE (high_result) == INTEGER_CST
9187 && TREE_CODE (equal_result) == INTEGER_CST
9188 && TREE_CODE (low_result) == INTEGER_CST)
9190 /* Make a 3-bit mask with the high-order bit being the
9191 value for `>', the next for '=', and the low for '<'. */
9192 switch ((integer_onep (high_result) * 4)
9193 + (integer_onep (equal_result) * 2)
9194 + integer_onep (low_result))
9198 return omit_one_operand (type, integer_zero_node, arg0);
9219 return omit_one_operand (type, integer_one_node, arg0);
9223 return save_expr (build2 (code, type, cval1, cval2));
9224 return fold_build2 (code, type, cval1, cval2);
9229 /* Fold a comparison of the address of COMPONENT_REFs with the same
9230 type and component to a comparison of the address of the base
9231 object. In short, &x->a OP &y->a to x OP y and
9232 &x->a OP &y.a to x OP &y */
9233 if (TREE_CODE (arg0) == ADDR_EXPR
9234 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9235 && TREE_CODE (arg1) == ADDR_EXPR
9236 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9238 tree cref0 = TREE_OPERAND (arg0, 0);
9239 tree cref1 = TREE_OPERAND (arg1, 0);
9240 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9242 tree op0 = TREE_OPERAND (cref0, 0);
9243 tree op1 = TREE_OPERAND (cref1, 0);
9244 return fold_build2 (code, type,
9245 fold_addr_expr (op0),
9246 fold_addr_expr (op1));
9250 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9251 into a single range test. */
9252 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9253 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9254 && TREE_CODE (arg1) == INTEGER_CST
9255 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9256 && !integer_zerop (TREE_OPERAND (arg0, 1))
9257 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9258 && !TREE_OVERFLOW (arg1))
9260 tem = fold_div_compare (code, type, arg0, arg1);
9261 if (tem != NULL_TREE)
9265 /* Fold ~X op ~Y as Y op X. */
9266 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9267 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9269 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9270 return fold_build2 (code, type,
9271 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9272 TREE_OPERAND (arg0, 0));
9275 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9276 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9277 && TREE_CODE (arg1) == INTEGER_CST)
9279 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9280 return fold_build2 (swap_tree_comparison (code), type,
9281 TREE_OPERAND (arg0, 0),
9282 fold_build1 (BIT_NOT_EXPR, cmp_type,
9283 fold_convert (cmp_type, arg1)));
9290 /* Subroutine of fold_binary. Optimize complex multiplications of the
9291 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9292 argument EXPR represents the expression "z" of type TYPE. */
9295 fold_mult_zconjz (tree type, tree expr)
9297 tree itype = TREE_TYPE (type);
9298 tree rpart, ipart, tem;
9300 if (TREE_CODE (expr) == COMPLEX_EXPR)
9302 rpart = TREE_OPERAND (expr, 0);
9303 ipart = TREE_OPERAND (expr, 1);
9305 else if (TREE_CODE (expr) == COMPLEX_CST)
9307 rpart = TREE_REALPART (expr);
9308 ipart = TREE_IMAGPART (expr);
9312 expr = save_expr (expr);
9313 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9314 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9317 rpart = save_expr (rpart);
9318 ipart = save_expr (ipart);
9319 tem = fold_build2 (PLUS_EXPR, itype,
9320 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9321 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9322 return fold_build2 (COMPLEX_EXPR, type, tem,
9323 fold_convert (itype, integer_zero_node));
9327 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9328 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9329 guarantees that P and N have the same least significant log2(M) bits.
9330 N is not otherwise constrained. In particular, N is not normalized to
9331 0 <= N < M as is common. In general, the precise value of P is unknown.
9332 M is chosen as large as possible such that constant N can be determined.
9334 Returns M and sets *RESIDUE to N. */
9336 static unsigned HOST_WIDE_INT
9337 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9339 enum tree_code code;
9343 code = TREE_CODE (expr);
9344 if (code == ADDR_EXPR)
9346 expr = TREE_OPERAND (expr, 0);
9347 if (handled_component_p (expr))
9349 HOST_WIDE_INT bitsize, bitpos;
9351 enum machine_mode mode;
9352 int unsignedp, volatilep;
9354 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9355 &mode, &unsignedp, &volatilep, false);
9356 *residue = bitpos / BITS_PER_UNIT;
9359 if (TREE_CODE (offset) == INTEGER_CST)
9360 *residue += TREE_INT_CST_LOW (offset);
9362 /* We don't handle more complicated offset expressions. */
9368 return DECL_ALIGN_UNIT (expr);
9370 else if (code == POINTER_PLUS_EXPR)
9373 unsigned HOST_WIDE_INT modulus;
9374 enum tree_code inner_code;
9376 op0 = TREE_OPERAND (expr, 0);
9378 modulus = get_pointer_modulus_and_residue (op0, residue);
9380 op1 = TREE_OPERAND (expr, 1);
9382 inner_code = TREE_CODE (op1);
9383 if (inner_code == INTEGER_CST)
9385 *residue += TREE_INT_CST_LOW (op1);
9388 else if (inner_code == MULT_EXPR)
9390 op1 = TREE_OPERAND (op1, 1);
9391 if (TREE_CODE (op1) == INTEGER_CST)
9393 unsigned HOST_WIDE_INT align;
9395 /* Compute the greatest power-of-2 divisor of op1. */
9396 align = TREE_INT_CST_LOW (op1);
9399 /* If align is non-zero and less than *modulus, replace
9400 *modulus with align., If align is 0, then either op1 is 0
9401 or the greatest power-of-2 divisor of op1 doesn't fit in an
9402 unsigned HOST_WIDE_INT. In either case, no additional
9403 constraint is imposed. */
9405 modulus = MIN (modulus, align);
9412 /* If we get here, we were unable to determine anything useful about the
9418 /* Fold a binary expression of code CODE and type TYPE with operands
9419 OP0 and OP1. Return the folded expression if folding is
9420 successful. Otherwise, return NULL_TREE. */
9423 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9425 enum tree_code_class kind = TREE_CODE_CLASS (code);
9426 tree arg0, arg1, tem;
9427 tree t1 = NULL_TREE;
9428 bool strict_overflow_p;
9430 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9431 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9432 && TREE_CODE_LENGTH (code) == 2
9434 && op1 != NULL_TREE);
9439 /* Strip any conversions that don't change the mode. This is
9440 safe for every expression, except for a comparison expression
9441 because its signedness is derived from its operands. So, in
9442 the latter case, only strip conversions that don't change the
9445 Note that this is done as an internal manipulation within the
9446 constant folder, in order to find the simplest representation
9447 of the arguments so that their form can be studied. In any
9448 cases, the appropriate type conversions should be put back in
9449 the tree that will get out of the constant folder. */
9451 if (kind == tcc_comparison)
9453 STRIP_SIGN_NOPS (arg0);
9454 STRIP_SIGN_NOPS (arg1);
9462 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9463 constant but we can't do arithmetic on them. */
9464 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9465 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9466 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9467 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9468 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9469 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9471 if (kind == tcc_binary)
9473 /* Make sure type and arg0 have the same saturating flag. */
9474 gcc_assert (TYPE_SATURATING (type)
9475 == TYPE_SATURATING (TREE_TYPE (arg0)));
9476 tem = const_binop (code, arg0, arg1, 0);
9478 else if (kind == tcc_comparison)
9479 tem = fold_relational_const (code, type, arg0, arg1);
9483 if (tem != NULL_TREE)
9485 if (TREE_TYPE (tem) != type)
9486 tem = fold_convert (type, tem);
9491 /* If this is a commutative operation, and ARG0 is a constant, move it
9492 to ARG1 to reduce the number of tests below. */
9493 if (commutative_tree_code (code)
9494 && tree_swap_operands_p (arg0, arg1, true))
9495 return fold_build2 (code, type, op1, op0);
9497 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9499 First check for cases where an arithmetic operation is applied to a
9500 compound, conditional, or comparison operation. Push the arithmetic
9501 operation inside the compound or conditional to see if any folding
9502 can then be done. Convert comparison to conditional for this purpose.
9503 The also optimizes non-constant cases that used to be done in
9506 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9507 one of the operands is a comparison and the other is a comparison, a
9508 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9509 code below would make the expression more complex. Change it to a
9510 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9511 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9513 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9514 || code == EQ_EXPR || code == NE_EXPR)
9515 && ((truth_value_p (TREE_CODE (arg0))
9516 && (truth_value_p (TREE_CODE (arg1))
9517 || (TREE_CODE (arg1) == BIT_AND_EXPR
9518 && integer_onep (TREE_OPERAND (arg1, 1)))))
9519 || (truth_value_p (TREE_CODE (arg1))
9520 && (truth_value_p (TREE_CODE (arg0))
9521 || (TREE_CODE (arg0) == BIT_AND_EXPR
9522 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9524 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9525 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9528 fold_convert (boolean_type_node, arg0),
9529 fold_convert (boolean_type_node, arg1));
9531 if (code == EQ_EXPR)
9532 tem = invert_truthvalue (tem);
9534 return fold_convert (type, tem);
9537 if (TREE_CODE_CLASS (code) == tcc_binary
9538 || TREE_CODE_CLASS (code) == tcc_comparison)
9540 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9541 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9542 fold_build2 (code, type,
9543 fold_convert (TREE_TYPE (op0),
9544 TREE_OPERAND (arg0, 1)),
9546 if (TREE_CODE (arg1) == COMPOUND_EXPR
9547 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9548 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9549 fold_build2 (code, type, op0,
9550 fold_convert (TREE_TYPE (op1),
9551 TREE_OPERAND (arg1, 1))));
9553 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9555 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9557 /*cond_first_p=*/1);
9558 if (tem != NULL_TREE)
9562 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9564 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9566 /*cond_first_p=*/0);
9567 if (tem != NULL_TREE)
9574 case POINTER_PLUS_EXPR:
9575 /* 0 +p index -> (type)index */
9576 if (integer_zerop (arg0))
9577 return non_lvalue (fold_convert (type, arg1));
9579 /* PTR +p 0 -> PTR */
9580 if (integer_zerop (arg1))
9581 return non_lvalue (fold_convert (type, arg0));
9583 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9584 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9585 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9586 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9587 fold_convert (sizetype, arg1),
9588 fold_convert (sizetype, arg0)));
9590 /* index +p PTR -> PTR +p index */
9591 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9592 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9593 return fold_build2 (POINTER_PLUS_EXPR, type,
9594 fold_convert (type, arg1),
9595 fold_convert (sizetype, arg0));
9597 /* (PTR +p B) +p A -> PTR +p (B + A) */
9598 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9601 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9602 tree arg00 = TREE_OPERAND (arg0, 0);
9603 inner = fold_build2 (PLUS_EXPR, sizetype,
9604 arg01, fold_convert (sizetype, arg1));
9605 return fold_convert (type,
9606 fold_build2 (POINTER_PLUS_EXPR,
9607 TREE_TYPE (arg00), arg00, inner));
9610 /* PTR_CST +p CST -> CST1 */
9611 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9612 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9614 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9615 of the array. Loop optimizer sometimes produce this type of
9617 if (TREE_CODE (arg0) == ADDR_EXPR)
9619 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9621 return fold_convert (type, tem);
9627 /* PTR + INT -> (INT)(PTR p+ INT) */
9628 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9629 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9630 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9633 fold_convert (sizetype, arg1)));
9634 /* INT + PTR -> (INT)(PTR p+ INT) */
9635 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9636 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9637 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9640 fold_convert (sizetype, arg0)));
9641 /* A + (-B) -> A - B */
9642 if (TREE_CODE (arg1) == NEGATE_EXPR)
9643 return fold_build2 (MINUS_EXPR, type,
9644 fold_convert (type, arg0),
9645 fold_convert (type, TREE_OPERAND (arg1, 0)));
9646 /* (-A) + B -> B - A */
9647 if (TREE_CODE (arg0) == NEGATE_EXPR
9648 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9649 return fold_build2 (MINUS_EXPR, type,
9650 fold_convert (type, arg1),
9651 fold_convert (type, TREE_OPERAND (arg0, 0)));
9653 if (INTEGRAL_TYPE_P (type))
9655 /* Convert ~A + 1 to -A. */
9656 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9657 && integer_onep (arg1))
9658 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9661 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9662 && !TYPE_OVERFLOW_TRAPS (type))
9664 tree tem = TREE_OPERAND (arg0, 0);
9667 if (operand_equal_p (tem, arg1, 0))
9669 t1 = build_int_cst_type (type, -1);
9670 return omit_one_operand (type, t1, arg1);
9675 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9676 && !TYPE_OVERFLOW_TRAPS (type))
9678 tree tem = TREE_OPERAND (arg1, 0);
9681 if (operand_equal_p (arg0, tem, 0))
9683 t1 = build_int_cst_type (type, -1);
9684 return omit_one_operand (type, t1, arg0);
9688 /* X + (X / CST) * -CST is X % CST. */
9689 if (TREE_CODE (arg1) == MULT_EXPR
9690 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9691 && operand_equal_p (arg0,
9692 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9694 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9695 tree cst1 = TREE_OPERAND (arg1, 1);
9696 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9697 if (sum && integer_zerop (sum))
9698 return fold_convert (type,
9699 fold_build2 (TRUNC_MOD_EXPR,
9700 TREE_TYPE (arg0), arg0, cst0));
9704 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9705 same or one. Make sure type is not saturating.
9706 fold_plusminus_mult_expr will re-associate. */
9707 if ((TREE_CODE (arg0) == MULT_EXPR
9708 || TREE_CODE (arg1) == MULT_EXPR)
9709 && !TYPE_SATURATING (type)
9710 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9712 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9717 if (! FLOAT_TYPE_P (type))
9719 if (integer_zerop (arg1))
9720 return non_lvalue (fold_convert (type, arg0));
9722 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9723 with a constant, and the two constants have no bits in common,
9724 we should treat this as a BIT_IOR_EXPR since this may produce more
9726 if (TREE_CODE (arg0) == BIT_AND_EXPR
9727 && TREE_CODE (arg1) == BIT_AND_EXPR
9728 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9729 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9730 && integer_zerop (const_binop (BIT_AND_EXPR,
9731 TREE_OPERAND (arg0, 1),
9732 TREE_OPERAND (arg1, 1), 0)))
9734 code = BIT_IOR_EXPR;
9738 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9739 (plus (plus (mult) (mult)) (foo)) so that we can
9740 take advantage of the factoring cases below. */
9741 if (((TREE_CODE (arg0) == PLUS_EXPR
9742 || TREE_CODE (arg0) == MINUS_EXPR)
9743 && TREE_CODE (arg1) == MULT_EXPR)
9744 || ((TREE_CODE (arg1) == PLUS_EXPR
9745 || TREE_CODE (arg1) == MINUS_EXPR)
9746 && TREE_CODE (arg0) == MULT_EXPR))
9748 tree parg0, parg1, parg, marg;
9749 enum tree_code pcode;
9751 if (TREE_CODE (arg1) == MULT_EXPR)
9752 parg = arg0, marg = arg1;
9754 parg = arg1, marg = arg0;
9755 pcode = TREE_CODE (parg);
9756 parg0 = TREE_OPERAND (parg, 0);
9757 parg1 = TREE_OPERAND (parg, 1);
9761 if (TREE_CODE (parg0) == MULT_EXPR
9762 && TREE_CODE (parg1) != MULT_EXPR)
9763 return fold_build2 (pcode, type,
9764 fold_build2 (PLUS_EXPR, type,
9765 fold_convert (type, parg0),
9766 fold_convert (type, marg)),
9767 fold_convert (type, parg1));
9768 if (TREE_CODE (parg0) != MULT_EXPR
9769 && TREE_CODE (parg1) == MULT_EXPR)
9770 return fold_build2 (PLUS_EXPR, type,
9771 fold_convert (type, parg0),
9772 fold_build2 (pcode, type,
9773 fold_convert (type, marg),
9780 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9781 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9782 return non_lvalue (fold_convert (type, arg0));
9784 /* Likewise if the operands are reversed. */
9785 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9786 return non_lvalue (fold_convert (type, arg1));
9788 /* Convert X + -C into X - C. */
9789 if (TREE_CODE (arg1) == REAL_CST
9790 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9792 tem = fold_negate_const (arg1, type);
9793 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9794 return fold_build2 (MINUS_EXPR, type,
9795 fold_convert (type, arg0),
9796 fold_convert (type, tem));
9799 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9800 to __complex__ ( x, y ). This is not the same for SNaNs or
9801 if signed zeros are involved. */
9802 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9803 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9804 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9806 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9807 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9808 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9809 bool arg0rz = false, arg0iz = false;
9810 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9811 || (arg0i && (arg0iz = real_zerop (arg0i))))
9813 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9814 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9815 if (arg0rz && arg1i && real_zerop (arg1i))
9817 tree rp = arg1r ? arg1r
9818 : build1 (REALPART_EXPR, rtype, arg1);
9819 tree ip = arg0i ? arg0i
9820 : build1 (IMAGPART_EXPR, rtype, arg0);
9821 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9823 else if (arg0iz && arg1r && real_zerop (arg1r))
9825 tree rp = arg0r ? arg0r
9826 : build1 (REALPART_EXPR, rtype, arg0);
9827 tree ip = arg1i ? arg1i
9828 : build1 (IMAGPART_EXPR, rtype, arg1);
9829 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9834 if (flag_unsafe_math_optimizations
9835 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9836 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9837 && (tem = distribute_real_division (code, type, arg0, arg1)))
9840 /* Convert x+x into x*2.0. */
9841 if (operand_equal_p (arg0, arg1, 0)
9842 && SCALAR_FLOAT_TYPE_P (type))
9843 return fold_build2 (MULT_EXPR, type, arg0,
9844 build_real (type, dconst2));
9846 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9847 We associate floats only if the user has specified
9848 -fassociative-math. */
9849 if (flag_associative_math
9850 && TREE_CODE (arg1) == PLUS_EXPR
9851 && TREE_CODE (arg0) != MULT_EXPR)
9853 tree tree10 = TREE_OPERAND (arg1, 0);
9854 tree tree11 = TREE_OPERAND (arg1, 1);
9855 if (TREE_CODE (tree11) == MULT_EXPR
9856 && TREE_CODE (tree10) == MULT_EXPR)
9859 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9860 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9863 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9864 We associate floats only if the user has specified
9865 -fassociative-math. */
9866 if (flag_associative_math
9867 && TREE_CODE (arg0) == PLUS_EXPR
9868 && TREE_CODE (arg1) != MULT_EXPR)
9870 tree tree00 = TREE_OPERAND (arg0, 0);
9871 tree tree01 = TREE_OPERAND (arg0, 1);
9872 if (TREE_CODE (tree01) == MULT_EXPR
9873 && TREE_CODE (tree00) == MULT_EXPR)
9876 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9877 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9883 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9884 is a rotate of A by C1 bits. */
9885 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9886 is a rotate of A by B bits. */
9888 enum tree_code code0, code1;
9890 code0 = TREE_CODE (arg0);
9891 code1 = TREE_CODE (arg1);
9892 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9893 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9894 && operand_equal_p (TREE_OPERAND (arg0, 0),
9895 TREE_OPERAND (arg1, 0), 0)
9896 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9897 TYPE_UNSIGNED (rtype))
9898 /* Only create rotates in complete modes. Other cases are not
9899 expanded properly. */
9900 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9902 tree tree01, tree11;
9903 enum tree_code code01, code11;
9905 tree01 = TREE_OPERAND (arg0, 1);
9906 tree11 = TREE_OPERAND (arg1, 1);
9907 STRIP_NOPS (tree01);
9908 STRIP_NOPS (tree11);
9909 code01 = TREE_CODE (tree01);
9910 code11 = TREE_CODE (tree11);
9911 if (code01 == INTEGER_CST
9912 && code11 == INTEGER_CST
9913 && TREE_INT_CST_HIGH (tree01) == 0
9914 && TREE_INT_CST_HIGH (tree11) == 0
9915 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9916 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9917 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9918 code0 == LSHIFT_EXPR ? tree01 : tree11);
9919 else if (code11 == MINUS_EXPR)
9921 tree tree110, tree111;
9922 tree110 = TREE_OPERAND (tree11, 0);
9923 tree111 = TREE_OPERAND (tree11, 1);
9924 STRIP_NOPS (tree110);
9925 STRIP_NOPS (tree111);
9926 if (TREE_CODE (tree110) == INTEGER_CST
9927 && 0 == compare_tree_int (tree110,
9929 (TREE_TYPE (TREE_OPERAND
9931 && operand_equal_p (tree01, tree111, 0))
9932 return build2 ((code0 == LSHIFT_EXPR
9935 type, TREE_OPERAND (arg0, 0), tree01);
9937 else if (code01 == MINUS_EXPR)
9939 tree tree010, tree011;
9940 tree010 = TREE_OPERAND (tree01, 0);
9941 tree011 = TREE_OPERAND (tree01, 1);
9942 STRIP_NOPS (tree010);
9943 STRIP_NOPS (tree011);
9944 if (TREE_CODE (tree010) == INTEGER_CST
9945 && 0 == compare_tree_int (tree010,
9947 (TREE_TYPE (TREE_OPERAND
9949 && operand_equal_p (tree11, tree011, 0))
9950 return build2 ((code0 != LSHIFT_EXPR
9953 type, TREE_OPERAND (arg0, 0), tree11);
9959 /* In most languages, can't associate operations on floats through
9960 parentheses. Rather than remember where the parentheses were, we
9961 don't associate floats at all, unless the user has specified
9963 And, we need to make sure type is not saturating. */
9965 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9966 && !TYPE_SATURATING (type))
9968 tree var0, con0, lit0, minus_lit0;
9969 tree var1, con1, lit1, minus_lit1;
9972 /* Split both trees into variables, constants, and literals. Then
9973 associate each group together, the constants with literals,
9974 then the result with variables. This increases the chances of
9975 literals being recombined later and of generating relocatable
9976 expressions for the sum of a constant and literal. */
9977 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9978 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9979 code == MINUS_EXPR);
9981 /* With undefined overflow we can only associate constants
9982 with one variable. */
9983 if ((POINTER_TYPE_P (type)
9984 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9990 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9991 tmp0 = TREE_OPERAND (tmp0, 0);
9992 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9993 tmp1 = TREE_OPERAND (tmp1, 0);
9994 /* The only case we can still associate with two variables
9995 is if they are the same, modulo negation. */
9996 if (!operand_equal_p (tmp0, tmp1, 0))
10000 /* Only do something if we found more than two objects. Otherwise,
10001 nothing has changed and we risk infinite recursion. */
10003 && (2 < ((var0 != 0) + (var1 != 0)
10004 + (con0 != 0) + (con1 != 0)
10005 + (lit0 != 0) + (lit1 != 0)
10006 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10008 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10009 if (code == MINUS_EXPR)
10012 var0 = associate_trees (var0, var1, code, type);
10013 con0 = associate_trees (con0, con1, code, type);
10014 lit0 = associate_trees (lit0, lit1, code, type);
10015 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10017 /* Preserve the MINUS_EXPR if the negative part of the literal is
10018 greater than the positive part. Otherwise, the multiplicative
10019 folding code (i.e extract_muldiv) may be fooled in case
10020 unsigned constants are subtracted, like in the following
10021 example: ((X*2 + 4) - 8U)/2. */
10022 if (minus_lit0 && lit0)
10024 if (TREE_CODE (lit0) == INTEGER_CST
10025 && TREE_CODE (minus_lit0) == INTEGER_CST
10026 && tree_int_cst_lt (lit0, minus_lit0))
10028 minus_lit0 = associate_trees (minus_lit0, lit0,
10034 lit0 = associate_trees (lit0, minus_lit0,
10042 return fold_convert (type,
10043 associate_trees (var0, minus_lit0,
10044 MINUS_EXPR, type));
10047 con0 = associate_trees (con0, minus_lit0,
10049 return fold_convert (type,
10050 associate_trees (var0, con0,
10055 con0 = associate_trees (con0, lit0, code, type);
10056 return fold_convert (type, associate_trees (var0, con0,
10064 /* Pointer simplifications for subtraction, simple reassociations. */
10065 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10067 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10068 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10069 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10071 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10072 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10073 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10074 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10075 return fold_build2 (PLUS_EXPR, type,
10076 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10077 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10079 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10080 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10082 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10083 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10084 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10086 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10089 /* A - (-B) -> A + B */
10090 if (TREE_CODE (arg1) == NEGATE_EXPR)
10091 return fold_build2 (PLUS_EXPR, type, op0,
10092 fold_convert (type, TREE_OPERAND (arg1, 0)));
10093 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10094 if (TREE_CODE (arg0) == NEGATE_EXPR
10095 && (FLOAT_TYPE_P (type)
10096 || INTEGRAL_TYPE_P (type))
10097 && negate_expr_p (arg1)
10098 && reorder_operands_p (arg0, arg1))
10099 return fold_build2 (MINUS_EXPR, type,
10100 fold_convert (type, negate_expr (arg1)),
10101 fold_convert (type, TREE_OPERAND (arg0, 0)));
10102 /* Convert -A - 1 to ~A. */
10103 if (INTEGRAL_TYPE_P (type)
10104 && TREE_CODE (arg0) == NEGATE_EXPR
10105 && integer_onep (arg1)
10106 && !TYPE_OVERFLOW_TRAPS (type))
10107 return fold_build1 (BIT_NOT_EXPR, type,
10108 fold_convert (type, TREE_OPERAND (arg0, 0)));
10110 /* Convert -1 - A to ~A. */
10111 if (INTEGRAL_TYPE_P (type)
10112 && integer_all_onesp (arg0))
10113 return fold_build1 (BIT_NOT_EXPR, type, op1);
10116 /* X - (X / CST) * CST is X % CST. */
10117 if (INTEGRAL_TYPE_P (type)
10118 && TREE_CODE (arg1) == MULT_EXPR
10119 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10120 && operand_equal_p (arg0,
10121 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10122 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10123 TREE_OPERAND (arg1, 1), 0))
10124 return fold_convert (type,
10125 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10126 arg0, TREE_OPERAND (arg1, 1)));
10128 if (! FLOAT_TYPE_P (type))
10130 if (integer_zerop (arg0))
10131 return negate_expr (fold_convert (type, arg1));
10132 if (integer_zerop (arg1))
10133 return non_lvalue (fold_convert (type, arg0));
10135 /* Fold A - (A & B) into ~B & A. */
10136 if (!TREE_SIDE_EFFECTS (arg0)
10137 && TREE_CODE (arg1) == BIT_AND_EXPR)
10139 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10141 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10142 return fold_build2 (BIT_AND_EXPR, type,
10143 fold_build1 (BIT_NOT_EXPR, type, arg10),
10144 fold_convert (type, arg0));
10146 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10148 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10149 return fold_build2 (BIT_AND_EXPR, type,
10150 fold_build1 (BIT_NOT_EXPR, type, arg11),
10151 fold_convert (type, arg0));
10155 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10156 any power of 2 minus 1. */
10157 if (TREE_CODE (arg0) == BIT_AND_EXPR
10158 && TREE_CODE (arg1) == BIT_AND_EXPR
10159 && operand_equal_p (TREE_OPERAND (arg0, 0),
10160 TREE_OPERAND (arg1, 0), 0))
10162 tree mask0 = TREE_OPERAND (arg0, 1);
10163 tree mask1 = TREE_OPERAND (arg1, 1);
10164 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10166 if (operand_equal_p (tem, mask1, 0))
10168 tem = fold_build2 (BIT_XOR_EXPR, type,
10169 TREE_OPERAND (arg0, 0), mask1);
10170 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10175 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10176 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10177 return non_lvalue (fold_convert (type, arg0));
10179 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10180 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10181 (-ARG1 + ARG0) reduces to -ARG1. */
10182 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10183 return negate_expr (fold_convert (type, arg1));
10185 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10186 __complex__ ( x, -y ). This is not the same for SNaNs or if
10187 signed zeros are involved. */
10188 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10189 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10190 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10192 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10193 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10194 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10195 bool arg0rz = false, arg0iz = false;
10196 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10197 || (arg0i && (arg0iz = real_zerop (arg0i))))
10199 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10200 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10201 if (arg0rz && arg1i && real_zerop (arg1i))
10203 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10205 : build1 (REALPART_EXPR, rtype, arg1));
10206 tree ip = arg0i ? arg0i
10207 : build1 (IMAGPART_EXPR, rtype, arg0);
10208 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10210 else if (arg0iz && arg1r && real_zerop (arg1r))
10212 tree rp = arg0r ? arg0r
10213 : build1 (REALPART_EXPR, rtype, arg0);
10214 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10216 : build1 (IMAGPART_EXPR, rtype, arg1));
10217 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10222 /* Fold &x - &x. This can happen from &x.foo - &x.
10223 This is unsafe for certain floats even in non-IEEE formats.
10224 In IEEE, it is unsafe because it does wrong for NaNs.
10225 Also note that operand_equal_p is always false if an operand
10228 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10229 && operand_equal_p (arg0, arg1, 0))
10230 return fold_convert (type, integer_zero_node);
10232 /* A - B -> A + (-B) if B is easily negatable. */
10233 if (negate_expr_p (arg1)
10234 && ((FLOAT_TYPE_P (type)
10235 /* Avoid this transformation if B is a positive REAL_CST. */
10236 && (TREE_CODE (arg1) != REAL_CST
10237 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10238 || INTEGRAL_TYPE_P (type)))
10239 return fold_build2 (PLUS_EXPR, type,
10240 fold_convert (type, arg0),
10241 fold_convert (type, negate_expr (arg1)));
10243 /* Try folding difference of addresses. */
10245 HOST_WIDE_INT diff;
10247 if ((TREE_CODE (arg0) == ADDR_EXPR
10248 || TREE_CODE (arg1) == ADDR_EXPR)
10249 && ptr_difference_const (arg0, arg1, &diff))
10250 return build_int_cst_type (type, diff);
10253 /* Fold &a[i] - &a[j] to i-j. */
10254 if (TREE_CODE (arg0) == ADDR_EXPR
10255 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10256 && TREE_CODE (arg1) == ADDR_EXPR
10257 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10259 tree aref0 = TREE_OPERAND (arg0, 0);
10260 tree aref1 = TREE_OPERAND (arg1, 0);
10261 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10262 TREE_OPERAND (aref1, 0), 0))
10264 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10265 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10266 tree esz = array_ref_element_size (aref0);
10267 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10268 return fold_build2 (MULT_EXPR, type, diff,
10269 fold_convert (type, esz));
10274 if (flag_unsafe_math_optimizations
10275 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10276 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10277 && (tem = distribute_real_division (code, type, arg0, arg1)))
10280 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10281 same or one. Make sure type is not saturating.
10282 fold_plusminus_mult_expr will re-associate. */
10283 if ((TREE_CODE (arg0) == MULT_EXPR
10284 || TREE_CODE (arg1) == MULT_EXPR)
10285 && !TYPE_SATURATING (type)
10286 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10288 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10296 /* (-A) * (-B) -> A * B */
10297 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10298 return fold_build2 (MULT_EXPR, type,
10299 fold_convert (type, TREE_OPERAND (arg0, 0)),
10300 fold_convert (type, negate_expr (arg1)));
10301 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10302 return fold_build2 (MULT_EXPR, type,
10303 fold_convert (type, negate_expr (arg0)),
10304 fold_convert (type, TREE_OPERAND (arg1, 0)));
10306 if (! FLOAT_TYPE_P (type))
10308 if (integer_zerop (arg1))
10309 return omit_one_operand (type, arg1, arg0);
10310 if (integer_onep (arg1))
10311 return non_lvalue (fold_convert (type, arg0));
10312 /* Transform x * -1 into -x. Make sure to do the negation
10313 on the original operand with conversions not stripped
10314 because we can only strip non-sign-changing conversions. */
10315 if (integer_all_onesp (arg1))
10316 return fold_convert (type, negate_expr (op0));
10317 /* Transform x * -C into -x * C if x is easily negatable. */
10318 if (TREE_CODE (arg1) == INTEGER_CST
10319 && tree_int_cst_sgn (arg1) == -1
10320 && negate_expr_p (arg0)
10321 && (tem = negate_expr (arg1)) != arg1
10322 && !TREE_OVERFLOW (tem))
10323 return fold_build2 (MULT_EXPR, type,
10324 fold_convert (type, negate_expr (arg0)), tem);
10326 /* (a * (1 << b)) is (a << b) */
10327 if (TREE_CODE (arg1) == LSHIFT_EXPR
10328 && integer_onep (TREE_OPERAND (arg1, 0)))
10329 return fold_build2 (LSHIFT_EXPR, type, op0,
10330 TREE_OPERAND (arg1, 1));
10331 if (TREE_CODE (arg0) == LSHIFT_EXPR
10332 && integer_onep (TREE_OPERAND (arg0, 0)))
10333 return fold_build2 (LSHIFT_EXPR, type, op1,
10334 TREE_OPERAND (arg0, 1));
10336 strict_overflow_p = false;
10337 if (TREE_CODE (arg1) == INTEGER_CST
10338 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10339 &strict_overflow_p)))
10341 if (strict_overflow_p)
10342 fold_overflow_warning (("assuming signed overflow does not "
10343 "occur when simplifying "
10345 WARN_STRICT_OVERFLOW_MISC);
10346 return fold_convert (type, tem);
10349 /* Optimize z * conj(z) for integer complex numbers. */
10350 if (TREE_CODE (arg0) == CONJ_EXPR
10351 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10352 return fold_mult_zconjz (type, arg1);
10353 if (TREE_CODE (arg1) == CONJ_EXPR
10354 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10355 return fold_mult_zconjz (type, arg0);
10359 /* Maybe fold x * 0 to 0. The expressions aren't the same
10360 when x is NaN, since x * 0 is also NaN. Nor are they the
10361 same in modes with signed zeros, since multiplying a
10362 negative value by 0 gives -0, not +0. */
10363 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10364 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10365 && real_zerop (arg1))
10366 return omit_one_operand (type, arg1, arg0);
10367 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10368 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10369 && real_onep (arg1))
10370 return non_lvalue (fold_convert (type, arg0));
10372 /* Transform x * -1.0 into -x. */
10373 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10374 && real_minus_onep (arg1))
10375 return fold_convert (type, negate_expr (arg0));
10377 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10378 the result for floating point types due to rounding so it is applied
10379 only if -fassociative-math was specify. */
10380 if (flag_associative_math
10381 && TREE_CODE (arg0) == RDIV_EXPR
10382 && TREE_CODE (arg1) == REAL_CST
10383 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10385 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10388 return fold_build2 (RDIV_EXPR, type, tem,
10389 TREE_OPERAND (arg0, 1));
10392 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10393 if (operand_equal_p (arg0, arg1, 0))
10395 tree tem = fold_strip_sign_ops (arg0);
10396 if (tem != NULL_TREE)
10398 tem = fold_convert (type, tem);
10399 return fold_build2 (MULT_EXPR, type, tem, tem);
10403 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10404 This is not the same for NaNs or if signed zeros are
10406 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10407 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10408 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10409 && TREE_CODE (arg1) == COMPLEX_CST
10410 && real_zerop (TREE_REALPART (arg1)))
10412 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10413 if (real_onep (TREE_IMAGPART (arg1)))
10414 return fold_build2 (COMPLEX_EXPR, type,
10415 negate_expr (fold_build1 (IMAGPART_EXPR,
10417 fold_build1 (REALPART_EXPR, rtype, arg0));
10418 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10419 return fold_build2 (COMPLEX_EXPR, type,
10420 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10421 negate_expr (fold_build1 (REALPART_EXPR,
10425 /* Optimize z * conj(z) for floating point complex numbers.
10426 Guarded by flag_unsafe_math_optimizations as non-finite
10427 imaginary components don't produce scalar results. */
10428 if (flag_unsafe_math_optimizations
10429 && TREE_CODE (arg0) == CONJ_EXPR
10430 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10431 return fold_mult_zconjz (type, arg1);
10432 if (flag_unsafe_math_optimizations
10433 && TREE_CODE (arg1) == CONJ_EXPR
10434 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10435 return fold_mult_zconjz (type, arg0);
10437 if (flag_unsafe_math_optimizations)
10439 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10440 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10442 /* Optimizations of root(...)*root(...). */
10443 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10446 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10447 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10449 /* Optimize sqrt(x)*sqrt(x) as x. */
10450 if (BUILTIN_SQRT_P (fcode0)
10451 && operand_equal_p (arg00, arg10, 0)
10452 && ! HONOR_SNANS (TYPE_MODE (type)))
10455 /* Optimize root(x)*root(y) as root(x*y). */
10456 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10457 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10458 return build_call_expr (rootfn, 1, arg);
10461 /* Optimize expN(x)*expN(y) as expN(x+y). */
10462 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10464 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10465 tree arg = fold_build2 (PLUS_EXPR, type,
10466 CALL_EXPR_ARG (arg0, 0),
10467 CALL_EXPR_ARG (arg1, 0));
10468 return build_call_expr (expfn, 1, arg);
10471 /* Optimizations of pow(...)*pow(...). */
10472 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10473 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10474 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10476 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10477 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10478 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10479 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10481 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10482 if (operand_equal_p (arg01, arg11, 0))
10484 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10485 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10486 return build_call_expr (powfn, 2, arg, arg01);
10489 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10490 if (operand_equal_p (arg00, arg10, 0))
10492 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10493 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10494 return build_call_expr (powfn, 2, arg00, arg);
10498 /* Optimize tan(x)*cos(x) as sin(x). */
10499 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10500 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10501 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10502 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10503 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10504 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10505 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10506 CALL_EXPR_ARG (arg1, 0), 0))
10508 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10510 if (sinfn != NULL_TREE)
10511 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10514 /* Optimize x*pow(x,c) as pow(x,c+1). */
10515 if (fcode1 == BUILT_IN_POW
10516 || fcode1 == BUILT_IN_POWF
10517 || fcode1 == BUILT_IN_POWL)
10519 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10520 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10521 if (TREE_CODE (arg11) == REAL_CST
10522 && !TREE_OVERFLOW (arg11)
10523 && operand_equal_p (arg0, arg10, 0))
10525 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10529 c = TREE_REAL_CST (arg11);
10530 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10531 arg = build_real (type, c);
10532 return build_call_expr (powfn, 2, arg0, arg);
10536 /* Optimize pow(x,c)*x as pow(x,c+1). */
10537 if (fcode0 == BUILT_IN_POW
10538 || fcode0 == BUILT_IN_POWF
10539 || fcode0 == BUILT_IN_POWL)
10541 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10542 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10543 if (TREE_CODE (arg01) == REAL_CST
10544 && !TREE_OVERFLOW (arg01)
10545 && operand_equal_p (arg1, arg00, 0))
10547 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10551 c = TREE_REAL_CST (arg01);
10552 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10553 arg = build_real (type, c);
10554 return build_call_expr (powfn, 2, arg1, arg);
10558 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10559 if (! optimize_size
10560 && operand_equal_p (arg0, arg1, 0))
10562 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10566 tree arg = build_real (type, dconst2);
10567 return build_call_expr (powfn, 2, arg0, arg);
10576 if (integer_all_onesp (arg1))
10577 return omit_one_operand (type, arg1, arg0);
10578 if (integer_zerop (arg1))
10579 return non_lvalue (fold_convert (type, arg0));
10580 if (operand_equal_p (arg0, arg1, 0))
10581 return non_lvalue (fold_convert (type, arg0));
10583 /* ~X | X is -1. */
10584 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10585 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10587 t1 = fold_convert (type, integer_zero_node);
10588 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10589 return omit_one_operand (type, t1, arg1);
10592 /* X | ~X is -1. */
10593 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10594 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10596 t1 = fold_convert (type, integer_zero_node);
10597 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10598 return omit_one_operand (type, t1, arg0);
10601 /* Canonicalize (X & C1) | C2. */
10602 if (TREE_CODE (arg0) == BIT_AND_EXPR
10603 && TREE_CODE (arg1) == INTEGER_CST
10604 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10606 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10607 int width = TYPE_PRECISION (type), w;
10608 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10609 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10610 hi2 = TREE_INT_CST_HIGH (arg1);
10611 lo2 = TREE_INT_CST_LOW (arg1);
10613 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10614 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10615 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10617 if (width > HOST_BITS_PER_WIDE_INT)
10619 mhi = (unsigned HOST_WIDE_INT) -1
10620 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10626 mlo = (unsigned HOST_WIDE_INT) -1
10627 >> (HOST_BITS_PER_WIDE_INT - width);
10630 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10631 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10632 return fold_build2 (BIT_IOR_EXPR, type,
10633 TREE_OPERAND (arg0, 0), arg1);
10635 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10636 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10637 mode which allows further optimizations. */
10644 for (w = BITS_PER_UNIT;
10645 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10648 unsigned HOST_WIDE_INT mask
10649 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10650 if (((lo1 | lo2) & mask) == mask
10651 && (lo1 & ~mask) == 0 && hi1 == 0)
10658 if (hi3 != hi1 || lo3 != lo1)
10659 return fold_build2 (BIT_IOR_EXPR, type,
10660 fold_build2 (BIT_AND_EXPR, type,
10661 TREE_OPERAND (arg0, 0),
10662 build_int_cst_wide (type,
10667 /* (X & Y) | Y is (X, Y). */
10668 if (TREE_CODE (arg0) == BIT_AND_EXPR
10669 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10670 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10671 /* (X & Y) | X is (Y, X). */
10672 if (TREE_CODE (arg0) == BIT_AND_EXPR
10673 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10674 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10675 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10676 /* X | (X & Y) is (Y, X). */
10677 if (TREE_CODE (arg1) == BIT_AND_EXPR
10678 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10679 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10680 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10681 /* X | (Y & X) is (Y, X). */
10682 if (TREE_CODE (arg1) == BIT_AND_EXPR
10683 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10684 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10685 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10687 t1 = distribute_bit_expr (code, type, arg0, arg1);
10688 if (t1 != NULL_TREE)
10691 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10693 This results in more efficient code for machines without a NAND
10694 instruction. Combine will canonicalize to the first form
10695 which will allow use of NAND instructions provided by the
10696 backend if they exist. */
10697 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10698 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10700 return fold_build1 (BIT_NOT_EXPR, type,
10701 build2 (BIT_AND_EXPR, type,
10702 TREE_OPERAND (arg0, 0),
10703 TREE_OPERAND (arg1, 0)));
10706 /* See if this can be simplified into a rotate first. If that
10707 is unsuccessful continue in the association code. */
10711 if (integer_zerop (arg1))
10712 return non_lvalue (fold_convert (type, arg0));
10713 if (integer_all_onesp (arg1))
10714 return fold_build1 (BIT_NOT_EXPR, type, op0);
10715 if (operand_equal_p (arg0, arg1, 0))
10716 return omit_one_operand (type, integer_zero_node, arg0);
10718 /* ~X ^ X is -1. */
10719 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10720 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10722 t1 = fold_convert (type, integer_zero_node);
10723 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10724 return omit_one_operand (type, t1, arg1);
10727 /* X ^ ~X is -1. */
10728 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10729 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10731 t1 = fold_convert (type, integer_zero_node);
10732 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10733 return omit_one_operand (type, t1, arg0);
10736 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10737 with a constant, and the two constants have no bits in common,
10738 we should treat this as a BIT_IOR_EXPR since this may produce more
10739 simplifications. */
10740 if (TREE_CODE (arg0) == BIT_AND_EXPR
10741 && TREE_CODE (arg1) == BIT_AND_EXPR
10742 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10743 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10744 && integer_zerop (const_binop (BIT_AND_EXPR,
10745 TREE_OPERAND (arg0, 1),
10746 TREE_OPERAND (arg1, 1), 0)))
10748 code = BIT_IOR_EXPR;
10752 /* (X | Y) ^ X -> Y & ~ X*/
10753 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10754 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10756 tree t2 = TREE_OPERAND (arg0, 1);
10757 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10759 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10760 fold_convert (type, t1));
10764 /* (Y | X) ^ X -> Y & ~ X*/
10765 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10766 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10768 tree t2 = TREE_OPERAND (arg0, 0);
10769 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10771 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10772 fold_convert (type, t1));
10776 /* X ^ (X | Y) -> Y & ~ X*/
10777 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10778 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10780 tree t2 = TREE_OPERAND (arg1, 1);
10781 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10783 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10784 fold_convert (type, t1));
10788 /* X ^ (Y | X) -> Y & ~ X*/
10789 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10790 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10792 tree t2 = TREE_OPERAND (arg1, 0);
10793 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10795 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10796 fold_convert (type, t1));
10800 /* Convert ~X ^ ~Y to X ^ Y. */
10801 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10802 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10803 return fold_build2 (code, type,
10804 fold_convert (type, TREE_OPERAND (arg0, 0)),
10805 fold_convert (type, TREE_OPERAND (arg1, 0)));
10807 /* Convert ~X ^ C to X ^ ~C. */
10808 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10809 && TREE_CODE (arg1) == INTEGER_CST)
10810 return fold_build2 (code, type,
10811 fold_convert (type, TREE_OPERAND (arg0, 0)),
10812 fold_build1 (BIT_NOT_EXPR, type, arg1));
10814 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10815 if (TREE_CODE (arg0) == BIT_AND_EXPR
10816 && integer_onep (TREE_OPERAND (arg0, 1))
10817 && integer_onep (arg1))
10818 return fold_build2 (EQ_EXPR, type, arg0,
10819 build_int_cst (TREE_TYPE (arg0), 0));
10821 /* Fold (X & Y) ^ Y as ~X & Y. */
10822 if (TREE_CODE (arg0) == BIT_AND_EXPR
10823 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10825 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10826 return fold_build2 (BIT_AND_EXPR, type,
10827 fold_build1 (BIT_NOT_EXPR, type, tem),
10828 fold_convert (type, arg1));
10830 /* Fold (X & Y) ^ X as ~Y & X. */
10831 if (TREE_CODE (arg0) == BIT_AND_EXPR
10832 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10833 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10835 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10836 return fold_build2 (BIT_AND_EXPR, type,
10837 fold_build1 (BIT_NOT_EXPR, type, tem),
10838 fold_convert (type, arg1));
10840 /* Fold X ^ (X & Y) as X & ~Y. */
10841 if (TREE_CODE (arg1) == BIT_AND_EXPR
10842 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10844 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10845 return fold_build2 (BIT_AND_EXPR, type,
10846 fold_convert (type, arg0),
10847 fold_build1 (BIT_NOT_EXPR, type, tem));
10849 /* Fold X ^ (Y & X) as ~Y & X. */
10850 if (TREE_CODE (arg1) == BIT_AND_EXPR
10851 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10852 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10854 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10855 return fold_build2 (BIT_AND_EXPR, type,
10856 fold_build1 (BIT_NOT_EXPR, type, tem),
10857 fold_convert (type, arg0));
10860 /* See if this can be simplified into a rotate first. If that
10861 is unsuccessful continue in the association code. */
10865 if (integer_all_onesp (arg1))
10866 return non_lvalue (fold_convert (type, arg0));
10867 if (integer_zerop (arg1))
10868 return omit_one_operand (type, arg1, arg0);
10869 if (operand_equal_p (arg0, arg1, 0))
10870 return non_lvalue (fold_convert (type, arg0));
10872 /* ~X & X is always zero. */
10873 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10874 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10875 return omit_one_operand (type, integer_zero_node, arg1);
10877 /* X & ~X is always zero. */
10878 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10879 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10880 return omit_one_operand (type, integer_zero_node, arg0);
10882 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10883 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10884 && TREE_CODE (arg1) == INTEGER_CST
10885 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10887 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10888 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10889 TREE_OPERAND (arg0, 0), tmp1);
10890 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10891 TREE_OPERAND (arg0, 1), tmp1);
10892 return fold_convert (type,
10893 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10897 /* (X | Y) & Y is (X, Y). */
10898 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10899 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10900 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10901 /* (X | Y) & X is (Y, X). */
10902 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10903 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10904 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10905 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10906 /* X & (X | Y) is (Y, X). */
10907 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10908 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10909 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10910 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10911 /* X & (Y | X) is (Y, X). */
10912 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10913 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10914 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10915 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10917 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10918 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10919 && integer_onep (TREE_OPERAND (arg0, 1))
10920 && integer_onep (arg1))
10922 tem = TREE_OPERAND (arg0, 0);
10923 return fold_build2 (EQ_EXPR, type,
10924 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10925 build_int_cst (TREE_TYPE (tem), 1)),
10926 build_int_cst (TREE_TYPE (tem), 0));
10928 /* Fold ~X & 1 as (X & 1) == 0. */
10929 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10930 && integer_onep (arg1))
10932 tem = TREE_OPERAND (arg0, 0);
10933 return fold_build2 (EQ_EXPR, type,
10934 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10935 build_int_cst (TREE_TYPE (tem), 1)),
10936 build_int_cst (TREE_TYPE (tem), 0));
10939 /* Fold (X ^ Y) & Y as ~X & Y. */
10940 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10941 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10943 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10944 return fold_build2 (BIT_AND_EXPR, type,
10945 fold_build1 (BIT_NOT_EXPR, type, tem),
10946 fold_convert (type, arg1));
10948 /* Fold (X ^ Y) & X as ~Y & X. */
10949 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10950 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10951 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10953 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10954 return fold_build2 (BIT_AND_EXPR, type,
10955 fold_build1 (BIT_NOT_EXPR, type, tem),
10956 fold_convert (type, arg1));
10958 /* Fold X & (X ^ Y) as X & ~Y. */
10959 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10960 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10962 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10963 return fold_build2 (BIT_AND_EXPR, type,
10964 fold_convert (type, arg0),
10965 fold_build1 (BIT_NOT_EXPR, type, tem));
10967 /* Fold X & (Y ^ X) as ~Y & X. */
10968 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10969 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10970 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10972 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10973 return fold_build2 (BIT_AND_EXPR, type,
10974 fold_build1 (BIT_NOT_EXPR, type, tem),
10975 fold_convert (type, arg0));
10978 t1 = distribute_bit_expr (code, type, arg0, arg1);
10979 if (t1 != NULL_TREE)
10981 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10982 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10983 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10986 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10988 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10989 && (~TREE_INT_CST_LOW (arg1)
10990 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10991 return fold_convert (type, TREE_OPERAND (arg0, 0));
10994 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10996 This results in more efficient code for machines without a NOR
10997 instruction. Combine will canonicalize to the first form
10998 which will allow use of NOR instructions provided by the
10999 backend if they exist. */
11000 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11001 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11003 return fold_build1 (BIT_NOT_EXPR, type,
11004 build2 (BIT_IOR_EXPR, type,
11005 fold_convert (type,
11006 TREE_OPERAND (arg0, 0)),
11007 fold_convert (type,
11008 TREE_OPERAND (arg1, 0))));
11011 /* If arg0 is derived from the address of an object or function, we may
11012 be able to fold this expression using the object or function's
11014 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11016 unsigned HOST_WIDE_INT modulus, residue;
11017 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11019 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11021 /* This works because modulus is a power of 2. If this weren't the
11022 case, we'd have to replace it by its greatest power-of-2
11023 divisor: modulus & -modulus. */
11025 return build_int_cst (type, residue & low);
11028 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11029 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11030 if the new mask might be further optimized. */
11031 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11032 || TREE_CODE (arg0) == RSHIFT_EXPR)
11033 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11034 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11035 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11036 < TYPE_PRECISION (TREE_TYPE (arg0))
11037 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11038 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11040 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11041 unsigned HOST_WIDE_INT mask
11042 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11043 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11044 tree shift_type = TREE_TYPE (arg0);
11046 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11047 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11048 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11049 && TYPE_PRECISION (TREE_TYPE (arg0))
11050 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11052 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11053 tree arg00 = TREE_OPERAND (arg0, 0);
11054 /* See if more bits can be proven as zero because of
11056 if (TREE_CODE (arg00) == NOP_EXPR
11057 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11059 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11060 if (TYPE_PRECISION (inner_type)
11061 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11062 && TYPE_PRECISION (inner_type) < prec)
11064 prec = TYPE_PRECISION (inner_type);
11065 /* See if we can shorten the right shift. */
11067 shift_type = inner_type;
11070 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11071 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11072 zerobits <<= prec - shiftc;
11073 /* For arithmetic shift if sign bit could be set, zerobits
11074 can contain actually sign bits, so no transformation is
11075 possible, unless MASK masks them all away. In that
11076 case the shift needs to be converted into logical shift. */
11077 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11078 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11080 if ((mask & zerobits) == 0)
11081 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11087 /* ((X << 16) & 0xff00) is (X, 0). */
11088 if ((mask & zerobits) == mask)
11089 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11091 newmask = mask | zerobits;
11092 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11096 /* Only do the transformation if NEWMASK is some integer
11098 for (prec = BITS_PER_UNIT;
11099 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11100 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11102 if (prec < HOST_BITS_PER_WIDE_INT
11103 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11105 if (shift_type != TREE_TYPE (arg0))
11107 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11108 fold_convert (shift_type,
11109 TREE_OPERAND (arg0, 0)),
11110 TREE_OPERAND (arg0, 1));
11111 tem = fold_convert (type, tem);
11115 return fold_build2 (BIT_AND_EXPR, type, tem,
11116 build_int_cst_type (TREE_TYPE (op1),
11125 /* Don't touch a floating-point divide by zero unless the mode
11126 of the constant can represent infinity. */
11127 if (TREE_CODE (arg1) == REAL_CST
11128 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11129 && real_zerop (arg1))
11132 /* Optimize A / A to 1.0 if we don't care about
11133 NaNs or Infinities. Skip the transformation
11134 for non-real operands. */
11135 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11136 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11137 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11138 && operand_equal_p (arg0, arg1, 0))
11140 tree r = build_real (TREE_TYPE (arg0), dconst1);
11142 return omit_two_operands (type, r, arg0, arg1);
11145 /* The complex version of the above A / A optimization. */
11146 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11147 && operand_equal_p (arg0, arg1, 0))
11149 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11150 if (! HONOR_NANS (TYPE_MODE (elem_type))
11151 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11153 tree r = build_real (elem_type, dconst1);
11154 /* omit_two_operands will call fold_convert for us. */
11155 return omit_two_operands (type, r, arg0, arg1);
11159 /* (-A) / (-B) -> A / B */
11160 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11161 return fold_build2 (RDIV_EXPR, type,
11162 TREE_OPERAND (arg0, 0),
11163 negate_expr (arg1));
11164 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11165 return fold_build2 (RDIV_EXPR, type,
11166 negate_expr (arg0),
11167 TREE_OPERAND (arg1, 0));
11169 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11170 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11171 && real_onep (arg1))
11172 return non_lvalue (fold_convert (type, arg0));
11174 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11175 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11176 && real_minus_onep (arg1))
11177 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11179 /* If ARG1 is a constant, we can convert this to a multiply by the
11180 reciprocal. This does not have the same rounding properties,
11181 so only do this if -freciprocal-math. We can actually
11182 always safely do it if ARG1 is a power of two, but it's hard to
11183 tell if it is or not in a portable manner. */
11184 if (TREE_CODE (arg1) == REAL_CST)
11186 if (flag_reciprocal_math
11187 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11189 return fold_build2 (MULT_EXPR, type, arg0, tem);
11190 /* Find the reciprocal if optimizing and the result is exact. */
11194 r = TREE_REAL_CST (arg1);
11195 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11197 tem = build_real (type, r);
11198 return fold_build2 (MULT_EXPR, type,
11199 fold_convert (type, arg0), tem);
11203 /* Convert A/B/C to A/(B*C). */
11204 if (flag_reciprocal_math
11205 && TREE_CODE (arg0) == RDIV_EXPR)
11206 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11207 fold_build2 (MULT_EXPR, type,
11208 TREE_OPERAND (arg0, 1), arg1));
11210 /* Convert A/(B/C) to (A/B)*C. */
11211 if (flag_reciprocal_math
11212 && TREE_CODE (arg1) == RDIV_EXPR)
11213 return fold_build2 (MULT_EXPR, type,
11214 fold_build2 (RDIV_EXPR, type, arg0,
11215 TREE_OPERAND (arg1, 0)),
11216 TREE_OPERAND (arg1, 1));
11218 /* Convert C1/(X*C2) into (C1/C2)/X. */
11219 if (flag_reciprocal_math
11220 && TREE_CODE (arg1) == MULT_EXPR
11221 && TREE_CODE (arg0) == REAL_CST
11222 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11224 tree tem = const_binop (RDIV_EXPR, arg0,
11225 TREE_OPERAND (arg1, 1), 0);
11227 return fold_build2 (RDIV_EXPR, type, tem,
11228 TREE_OPERAND (arg1, 0));
11231 if (flag_unsafe_math_optimizations)
11233 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11234 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11236 /* Optimize sin(x)/cos(x) as tan(x). */
11237 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11238 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11239 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11240 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11241 CALL_EXPR_ARG (arg1, 0), 0))
11243 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11245 if (tanfn != NULL_TREE)
11246 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11249 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11250 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11251 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11252 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11253 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11254 CALL_EXPR_ARG (arg1, 0), 0))
11256 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11258 if (tanfn != NULL_TREE)
11260 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11261 return fold_build2 (RDIV_EXPR, type,
11262 build_real (type, dconst1), tmp);
11266 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11267 NaNs or Infinities. */
11268 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11269 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11270 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11272 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11273 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11275 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11276 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11277 && operand_equal_p (arg00, arg01, 0))
11279 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11281 if (cosfn != NULL_TREE)
11282 return build_call_expr (cosfn, 1, arg00);
11286 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11287 NaNs or Infinities. */
11288 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11289 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11290 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11292 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11293 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11295 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11296 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11297 && operand_equal_p (arg00, arg01, 0))
11299 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11301 if (cosfn != NULL_TREE)
11303 tree tmp = build_call_expr (cosfn, 1, arg00);
11304 return fold_build2 (RDIV_EXPR, type,
11305 build_real (type, dconst1),
11311 /* Optimize pow(x,c)/x as pow(x,c-1). */
11312 if (fcode0 == BUILT_IN_POW
11313 || fcode0 == BUILT_IN_POWF
11314 || fcode0 == BUILT_IN_POWL)
11316 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11317 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11318 if (TREE_CODE (arg01) == REAL_CST
11319 && !TREE_OVERFLOW (arg01)
11320 && operand_equal_p (arg1, arg00, 0))
11322 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11326 c = TREE_REAL_CST (arg01);
11327 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11328 arg = build_real (type, c);
11329 return build_call_expr (powfn, 2, arg1, arg);
11333 /* Optimize a/root(b/c) into a*root(c/b). */
11334 if (BUILTIN_ROOT_P (fcode1))
11336 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11338 if (TREE_CODE (rootarg) == RDIV_EXPR)
11340 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11341 tree b = TREE_OPERAND (rootarg, 0);
11342 tree c = TREE_OPERAND (rootarg, 1);
11344 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11346 tmp = build_call_expr (rootfn, 1, tmp);
11347 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11351 /* Optimize x/expN(y) into x*expN(-y). */
11352 if (BUILTIN_EXPONENT_P (fcode1))
11354 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11355 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11356 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11357 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11360 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11361 if (fcode1 == BUILT_IN_POW
11362 || fcode1 == BUILT_IN_POWF
11363 || fcode1 == BUILT_IN_POWL)
11365 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11366 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11367 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11368 tree neg11 = fold_convert (type, negate_expr (arg11));
11369 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11370 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11375 case TRUNC_DIV_EXPR:
11376 case FLOOR_DIV_EXPR:
11377 /* Simplify A / (B << N) where A and B are positive and B is
11378 a power of 2, to A >> (N + log2(B)). */
11379 strict_overflow_p = false;
11380 if (TREE_CODE (arg1) == LSHIFT_EXPR
11381 && (TYPE_UNSIGNED (type)
11382 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11384 tree sval = TREE_OPERAND (arg1, 0);
11385 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11387 tree sh_cnt = TREE_OPERAND (arg1, 1);
11388 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11390 if (strict_overflow_p)
11391 fold_overflow_warning (("assuming signed overflow does not "
11392 "occur when simplifying A / (B << N)"),
11393 WARN_STRICT_OVERFLOW_MISC);
11395 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11396 sh_cnt, build_int_cst (NULL_TREE, pow2));
11397 return fold_build2 (RSHIFT_EXPR, type,
11398 fold_convert (type, arg0), sh_cnt);
11402 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11403 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11404 if (INTEGRAL_TYPE_P (type)
11405 && TYPE_UNSIGNED (type)
11406 && code == FLOOR_DIV_EXPR)
11407 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11411 case ROUND_DIV_EXPR:
11412 case CEIL_DIV_EXPR:
11413 case EXACT_DIV_EXPR:
11414 if (integer_onep (arg1))
11415 return non_lvalue (fold_convert (type, arg0));
11416 if (integer_zerop (arg1))
11418 /* X / -1 is -X. */
11419 if (!TYPE_UNSIGNED (type)
11420 && TREE_CODE (arg1) == INTEGER_CST
11421 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11422 && TREE_INT_CST_HIGH (arg1) == -1)
11423 return fold_convert (type, negate_expr (arg0));
11425 /* Convert -A / -B to A / B when the type is signed and overflow is
11427 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11428 && TREE_CODE (arg0) == NEGATE_EXPR
11429 && negate_expr_p (arg1))
11431 if (INTEGRAL_TYPE_P (type))
11432 fold_overflow_warning (("assuming signed overflow does not occur "
11433 "when distributing negation across "
11435 WARN_STRICT_OVERFLOW_MISC);
11436 return fold_build2 (code, type,
11437 fold_convert (type, TREE_OPERAND (arg0, 0)),
11438 negate_expr (arg1));
11440 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11441 && TREE_CODE (arg1) == NEGATE_EXPR
11442 && negate_expr_p (arg0))
11444 if (INTEGRAL_TYPE_P (type))
11445 fold_overflow_warning (("assuming signed overflow does not occur "
11446 "when distributing negation across "
11448 WARN_STRICT_OVERFLOW_MISC);
11449 return fold_build2 (code, type, negate_expr (arg0),
11450 TREE_OPERAND (arg1, 0));
11453 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11454 operation, EXACT_DIV_EXPR.
11456 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11457 At one time others generated faster code, it's not clear if they do
11458 after the last round to changes to the DIV code in expmed.c. */
11459 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11460 && multiple_of_p (type, arg0, arg1))
11461 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11463 strict_overflow_p = false;
11464 if (TREE_CODE (arg1) == INTEGER_CST
11465 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11466 &strict_overflow_p)))
11468 if (strict_overflow_p)
11469 fold_overflow_warning (("assuming signed overflow does not occur "
11470 "when simplifying division"),
11471 WARN_STRICT_OVERFLOW_MISC);
11472 return fold_convert (type, tem);
11477 case CEIL_MOD_EXPR:
11478 case FLOOR_MOD_EXPR:
11479 case ROUND_MOD_EXPR:
11480 case TRUNC_MOD_EXPR:
11481 /* X % 1 is always zero, but be sure to preserve any side
11483 if (integer_onep (arg1))
11484 return omit_one_operand (type, integer_zero_node, arg0);
11486 /* X % 0, return X % 0 unchanged so that we can get the
11487 proper warnings and errors. */
11488 if (integer_zerop (arg1))
11491 /* 0 % X is always zero, but be sure to preserve any side
11492 effects in X. Place this after checking for X == 0. */
11493 if (integer_zerop (arg0))
11494 return omit_one_operand (type, integer_zero_node, arg1);
11496 /* X % -1 is zero. */
11497 if (!TYPE_UNSIGNED (type)
11498 && TREE_CODE (arg1) == INTEGER_CST
11499 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11500 && TREE_INT_CST_HIGH (arg1) == -1)
11501 return omit_one_operand (type, integer_zero_node, arg0);
11503 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11504 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11505 strict_overflow_p = false;
11506 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11507 && (TYPE_UNSIGNED (type)
11508 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11511 /* Also optimize A % (C << N) where C is a power of 2,
11512 to A & ((C << N) - 1). */
11513 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11514 c = TREE_OPERAND (arg1, 0);
11516 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11518 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11519 build_int_cst (TREE_TYPE (arg1), 1));
11520 if (strict_overflow_p)
11521 fold_overflow_warning (("assuming signed overflow does not "
11522 "occur when simplifying "
11523 "X % (power of two)"),
11524 WARN_STRICT_OVERFLOW_MISC);
11525 return fold_build2 (BIT_AND_EXPR, type,
11526 fold_convert (type, arg0),
11527 fold_convert (type, mask));
11531 /* X % -C is the same as X % C. */
11532 if (code == TRUNC_MOD_EXPR
11533 && !TYPE_UNSIGNED (type)
11534 && TREE_CODE (arg1) == INTEGER_CST
11535 && !TREE_OVERFLOW (arg1)
11536 && TREE_INT_CST_HIGH (arg1) < 0
11537 && !TYPE_OVERFLOW_TRAPS (type)
11538 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11539 && !sign_bit_p (arg1, arg1))
11540 return fold_build2 (code, type, fold_convert (type, arg0),
11541 fold_convert (type, negate_expr (arg1)));
11543 /* X % -Y is the same as X % Y. */
11544 if (code == TRUNC_MOD_EXPR
11545 && !TYPE_UNSIGNED (type)
11546 && TREE_CODE (arg1) == NEGATE_EXPR
11547 && !TYPE_OVERFLOW_TRAPS (type))
11548 return fold_build2 (code, type, fold_convert (type, arg0),
11549 fold_convert (type, TREE_OPERAND (arg1, 0)));
11551 if (TREE_CODE (arg1) == INTEGER_CST
11552 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11553 &strict_overflow_p)))
11555 if (strict_overflow_p)
11556 fold_overflow_warning (("assuming signed overflow does not occur "
11557 "when simplifying modulos"),
11558 WARN_STRICT_OVERFLOW_MISC);
11559 return fold_convert (type, tem);
11566 if (integer_all_onesp (arg0))
11567 return omit_one_operand (type, arg0, arg1);
11571 /* Optimize -1 >> x for arithmetic right shifts. */
11572 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11573 return omit_one_operand (type, arg0, arg1);
11574 /* ... fall through ... */
11578 if (integer_zerop (arg1))
11579 return non_lvalue (fold_convert (type, arg0));
11580 if (integer_zerop (arg0))
11581 return omit_one_operand (type, arg0, arg1);
11583 /* Since negative shift count is not well-defined,
11584 don't try to compute it in the compiler. */
11585 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11588 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11589 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11590 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11591 && host_integerp (TREE_OPERAND (arg0, 1), false)
11592 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11594 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11595 + TREE_INT_CST_LOW (arg1));
11597 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11598 being well defined. */
11599 if (low >= TYPE_PRECISION (type))
11601 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11602 low = low % TYPE_PRECISION (type);
11603 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11604 return build_int_cst (type, 0);
11606 low = TYPE_PRECISION (type) - 1;
11609 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11610 build_int_cst (type, low));
11613 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11614 into x & ((unsigned)-1 >> c) for unsigned types. */
11615 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11616 || (TYPE_UNSIGNED (type)
11617 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11618 && host_integerp (arg1, false)
11619 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11620 && host_integerp (TREE_OPERAND (arg0, 1), false)
11621 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11623 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11624 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11630 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11632 lshift = build_int_cst (type, -1);
11633 lshift = int_const_binop (code, lshift, arg1, 0);
11635 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11639 /* Rewrite an LROTATE_EXPR by a constant into an
11640 RROTATE_EXPR by a new constant. */
11641 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11643 tree tem = build_int_cst (TREE_TYPE (arg1),
11644 TYPE_PRECISION (type));
11645 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11646 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11649 /* If we have a rotate of a bit operation with the rotate count and
11650 the second operand of the bit operation both constant,
11651 permute the two operations. */
11652 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11653 && (TREE_CODE (arg0) == BIT_AND_EXPR
11654 || TREE_CODE (arg0) == BIT_IOR_EXPR
11655 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11656 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11657 return fold_build2 (TREE_CODE (arg0), type,
11658 fold_build2 (code, type,
11659 TREE_OPERAND (arg0, 0), arg1),
11660 fold_build2 (code, type,
11661 TREE_OPERAND (arg0, 1), arg1));
11663 /* Two consecutive rotates adding up to the precision of the
11664 type can be ignored. */
11665 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11666 && TREE_CODE (arg0) == RROTATE_EXPR
11667 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11668 && TREE_INT_CST_HIGH (arg1) == 0
11669 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11670 && ((TREE_INT_CST_LOW (arg1)
11671 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11672 == (unsigned int) TYPE_PRECISION (type)))
11673 return TREE_OPERAND (arg0, 0);
11675 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11676 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11677 if the latter can be further optimized. */
11678 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11679 && TREE_CODE (arg0) == BIT_AND_EXPR
11680 && TREE_CODE (arg1) == INTEGER_CST
11681 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11683 tree mask = fold_build2 (code, type,
11684 fold_convert (type, TREE_OPERAND (arg0, 1)),
11686 tree shift = fold_build2 (code, type,
11687 fold_convert (type, TREE_OPERAND (arg0, 0)),
11689 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11697 if (operand_equal_p (arg0, arg1, 0))
11698 return omit_one_operand (type, arg0, arg1);
11699 if (INTEGRAL_TYPE_P (type)
11700 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11701 return omit_one_operand (type, arg1, arg0);
11702 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11708 if (operand_equal_p (arg0, arg1, 0))
11709 return omit_one_operand (type, arg0, arg1);
11710 if (INTEGRAL_TYPE_P (type)
11711 && TYPE_MAX_VALUE (type)
11712 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11713 return omit_one_operand (type, arg1, arg0);
11714 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11719 case TRUTH_ANDIF_EXPR:
11720 /* Note that the operands of this must be ints
11721 and their values must be 0 or 1.
11722 ("true" is a fixed value perhaps depending on the language.) */
11723 /* If first arg is constant zero, return it. */
11724 if (integer_zerop (arg0))
11725 return fold_convert (type, arg0);
11726 case TRUTH_AND_EXPR:
11727 /* If either arg is constant true, drop it. */
11728 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11729 return non_lvalue (fold_convert (type, arg1));
11730 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11731 /* Preserve sequence points. */
11732 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11733 return non_lvalue (fold_convert (type, arg0));
11734 /* If second arg is constant zero, result is zero, but first arg
11735 must be evaluated. */
11736 if (integer_zerop (arg1))
11737 return omit_one_operand (type, arg1, arg0);
11738 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11739 case will be handled here. */
11740 if (integer_zerop (arg0))
11741 return omit_one_operand (type, arg0, arg1);
11743 /* !X && X is always false. */
11744 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11745 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11746 return omit_one_operand (type, integer_zero_node, arg1);
11747 /* X && !X is always false. */
11748 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11749 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11750 return omit_one_operand (type, integer_zero_node, arg0);
11752 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11753 means A >= Y && A != MAX, but in this case we know that
11756 if (!TREE_SIDE_EFFECTS (arg0)
11757 && !TREE_SIDE_EFFECTS (arg1))
11759 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11760 if (tem && !operand_equal_p (tem, arg0, 0))
11761 return fold_build2 (code, type, tem, arg1);
11763 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11764 if (tem && !operand_equal_p (tem, arg1, 0))
11765 return fold_build2 (code, type, arg0, tem);
11769 /* We only do these simplifications if we are optimizing. */
11773 /* Check for things like (A || B) && (A || C). We can convert this
11774 to A || (B && C). Note that either operator can be any of the four
11775 truth and/or operations and the transformation will still be
11776 valid. Also note that we only care about order for the
11777 ANDIF and ORIF operators. If B contains side effects, this
11778 might change the truth-value of A. */
11779 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11780 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11781 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11782 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11783 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11784 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11786 tree a00 = TREE_OPERAND (arg0, 0);
11787 tree a01 = TREE_OPERAND (arg0, 1);
11788 tree a10 = TREE_OPERAND (arg1, 0);
11789 tree a11 = TREE_OPERAND (arg1, 1);
11790 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11791 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11792 && (code == TRUTH_AND_EXPR
11793 || code == TRUTH_OR_EXPR));
11795 if (operand_equal_p (a00, a10, 0))
11796 return fold_build2 (TREE_CODE (arg0), type, a00,
11797 fold_build2 (code, type, a01, a11));
11798 else if (commutative && operand_equal_p (a00, a11, 0))
11799 return fold_build2 (TREE_CODE (arg0), type, a00,
11800 fold_build2 (code, type, a01, a10));
11801 else if (commutative && operand_equal_p (a01, a10, 0))
11802 return fold_build2 (TREE_CODE (arg0), type, a01,
11803 fold_build2 (code, type, a00, a11));
11805 /* This case if tricky because we must either have commutative
11806 operators or else A10 must not have side-effects. */
11808 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11809 && operand_equal_p (a01, a11, 0))
11810 return fold_build2 (TREE_CODE (arg0), type,
11811 fold_build2 (code, type, a00, a10),
11815 /* See if we can build a range comparison. */
11816 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11819 /* Check for the possibility of merging component references. If our
11820 lhs is another similar operation, try to merge its rhs with our
11821 rhs. Then try to merge our lhs and rhs. */
11822 if (TREE_CODE (arg0) == code
11823 && 0 != (tem = fold_truthop (code, type,
11824 TREE_OPERAND (arg0, 1), arg1)))
11825 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11827 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11832 case TRUTH_ORIF_EXPR:
11833 /* Note that the operands of this must be ints
11834 and their values must be 0 or true.
11835 ("true" is a fixed value perhaps depending on the language.) */
11836 /* If first arg is constant true, return it. */
11837 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11838 return fold_convert (type, arg0);
11839 case TRUTH_OR_EXPR:
11840 /* If either arg is constant zero, drop it. */
11841 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11842 return non_lvalue (fold_convert (type, arg1));
11843 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11844 /* Preserve sequence points. */
11845 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11846 return non_lvalue (fold_convert (type, arg0));
11847 /* If second arg is constant true, result is true, but we must
11848 evaluate first arg. */
11849 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11850 return omit_one_operand (type, arg1, arg0);
11851 /* Likewise for first arg, but note this only occurs here for
11853 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11854 return omit_one_operand (type, arg0, arg1);
11856 /* !X || X is always true. */
11857 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11858 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11859 return omit_one_operand (type, integer_one_node, arg1);
11860 /* X || !X is always true. */
11861 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11862 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11863 return omit_one_operand (type, integer_one_node, arg0);
11867 case TRUTH_XOR_EXPR:
11868 /* If the second arg is constant zero, drop it. */
11869 if (integer_zerop (arg1))
11870 return non_lvalue (fold_convert (type, arg0));
11871 /* If the second arg is constant true, this is a logical inversion. */
11872 if (integer_onep (arg1))
11874 /* Only call invert_truthvalue if operand is a truth value. */
11875 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11876 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11878 tem = invert_truthvalue (arg0);
11879 return non_lvalue (fold_convert (type, tem));
11881 /* Identical arguments cancel to zero. */
11882 if (operand_equal_p (arg0, arg1, 0))
11883 return omit_one_operand (type, integer_zero_node, arg0);
11885 /* !X ^ X is always true. */
11886 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11887 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11888 return omit_one_operand (type, integer_one_node, arg1);
11890 /* X ^ !X is always true. */
11891 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11892 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11893 return omit_one_operand (type, integer_one_node, arg0);
11899 tem = fold_comparison (code, type, op0, op1);
11900 if (tem != NULL_TREE)
11903 /* bool_var != 0 becomes bool_var. */
11904 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11905 && code == NE_EXPR)
11906 return non_lvalue (fold_convert (type, arg0));
11908 /* bool_var == 1 becomes bool_var. */
11909 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11910 && code == EQ_EXPR)
11911 return non_lvalue (fold_convert (type, arg0));
11913 /* bool_var != 1 becomes !bool_var. */
11914 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11915 && code == NE_EXPR)
11916 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11918 /* bool_var == 0 becomes !bool_var. */
11919 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11920 && code == EQ_EXPR)
11921 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11923 /* If this is an equality comparison of the address of two non-weak,
11924 unaliased symbols neither of which are extern (since we do not
11925 have access to attributes for externs), then we know the result. */
11926 if (TREE_CODE (arg0) == ADDR_EXPR
11927 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11928 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11929 && ! lookup_attribute ("alias",
11930 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11931 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11932 && TREE_CODE (arg1) == ADDR_EXPR
11933 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11934 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11935 && ! lookup_attribute ("alias",
11936 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11937 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11939 /* We know that we're looking at the address of two
11940 non-weak, unaliased, static _DECL nodes.
11942 It is both wasteful and incorrect to call operand_equal_p
11943 to compare the two ADDR_EXPR nodes. It is wasteful in that
11944 all we need to do is test pointer equality for the arguments
11945 to the two ADDR_EXPR nodes. It is incorrect to use
11946 operand_equal_p as that function is NOT equivalent to a
11947 C equality test. It can in fact return false for two
11948 objects which would test as equal using the C equality
11950 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11951 return constant_boolean_node (equal
11952 ? code == EQ_EXPR : code != EQ_EXPR,
11956 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11957 a MINUS_EXPR of a constant, we can convert it into a comparison with
11958 a revised constant as long as no overflow occurs. */
11959 if (TREE_CODE (arg1) == INTEGER_CST
11960 && (TREE_CODE (arg0) == PLUS_EXPR
11961 || TREE_CODE (arg0) == MINUS_EXPR)
11962 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11963 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11964 ? MINUS_EXPR : PLUS_EXPR,
11965 fold_convert (TREE_TYPE (arg0), arg1),
11966 TREE_OPERAND (arg0, 1), 0))
11967 && !TREE_OVERFLOW (tem))
11968 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11970 /* Similarly for a NEGATE_EXPR. */
11971 if (TREE_CODE (arg0) == NEGATE_EXPR
11972 && TREE_CODE (arg1) == INTEGER_CST
11973 && 0 != (tem = negate_expr (arg1))
11974 && TREE_CODE (tem) == INTEGER_CST
11975 && !TREE_OVERFLOW (tem))
11976 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11978 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11979 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11980 && TREE_CODE (arg1) == INTEGER_CST
11981 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11982 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11983 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11984 fold_convert (TREE_TYPE (arg0), arg1),
11985 TREE_OPERAND (arg0, 1)));
11987 /* Transform comparisons of the form X +- C CMP X. */
11988 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11989 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11990 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11991 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11992 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11994 tree cst = TREE_OPERAND (arg0, 1);
11996 if (code == EQ_EXPR
11997 && !integer_zerop (cst))
11998 return omit_two_operands (type, boolean_false_node,
11999 TREE_OPERAND (arg0, 0), arg1);
12001 return omit_two_operands (type, boolean_true_node,
12002 TREE_OPERAND (arg0, 0), arg1);
12005 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12006 for !=. Don't do this for ordered comparisons due to overflow. */
12007 if (TREE_CODE (arg0) == MINUS_EXPR
12008 && integer_zerop (arg1))
12009 return fold_build2 (code, type,
12010 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12012 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12013 if (TREE_CODE (arg0) == ABS_EXPR
12014 && (integer_zerop (arg1) || real_zerop (arg1)))
12015 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12017 /* If this is an EQ or NE comparison with zero and ARG0 is
12018 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12019 two operations, but the latter can be done in one less insn
12020 on machines that have only two-operand insns or on which a
12021 constant cannot be the first operand. */
12022 if (TREE_CODE (arg0) == BIT_AND_EXPR
12023 && integer_zerop (arg1))
12025 tree arg00 = TREE_OPERAND (arg0, 0);
12026 tree arg01 = TREE_OPERAND (arg0, 1);
12027 if (TREE_CODE (arg00) == LSHIFT_EXPR
12028 && integer_onep (TREE_OPERAND (arg00, 0)))
12030 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12031 arg01, TREE_OPERAND (arg00, 1));
12032 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12033 build_int_cst (TREE_TYPE (arg0), 1));
12034 return fold_build2 (code, type,
12035 fold_convert (TREE_TYPE (arg1), tem), arg1);
12037 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12038 && integer_onep (TREE_OPERAND (arg01, 0)))
12040 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12041 arg00, TREE_OPERAND (arg01, 1));
12042 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12043 build_int_cst (TREE_TYPE (arg0), 1));
12044 return fold_build2 (code, type,
12045 fold_convert (TREE_TYPE (arg1), tem), arg1);
12049 /* If this is an NE or EQ comparison of zero against the result of a
12050 signed MOD operation whose second operand is a power of 2, make
12051 the MOD operation unsigned since it is simpler and equivalent. */
12052 if (integer_zerop (arg1)
12053 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12054 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12055 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12056 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12057 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12058 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12060 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12061 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12062 fold_convert (newtype,
12063 TREE_OPERAND (arg0, 0)),
12064 fold_convert (newtype,
12065 TREE_OPERAND (arg0, 1)));
12067 return fold_build2 (code, type, newmod,
12068 fold_convert (newtype, arg1));
12071 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12072 C1 is a valid shift constant, and C2 is a power of two, i.e.
12074 if (TREE_CODE (arg0) == BIT_AND_EXPR
12075 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12076 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12078 && integer_pow2p (TREE_OPERAND (arg0, 1))
12079 && integer_zerop (arg1))
12081 tree itype = TREE_TYPE (arg0);
12082 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12083 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12085 /* Check for a valid shift count. */
12086 if (TREE_INT_CST_HIGH (arg001) == 0
12087 && TREE_INT_CST_LOW (arg001) < prec)
12089 tree arg01 = TREE_OPERAND (arg0, 1);
12090 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12091 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12092 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12093 can be rewritten as (X & (C2 << C1)) != 0. */
12094 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12096 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12097 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12098 return fold_build2 (code, type, tem, arg1);
12100 /* Otherwise, for signed (arithmetic) shifts,
12101 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12102 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12103 else if (!TYPE_UNSIGNED (itype))
12104 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12105 arg000, build_int_cst (itype, 0));
12106 /* Otherwise, of unsigned (logical) shifts,
12107 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12108 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12110 return omit_one_operand (type,
12111 code == EQ_EXPR ? integer_one_node
12112 : integer_zero_node,
12117 /* If this is an NE comparison of zero with an AND of one, remove the
12118 comparison since the AND will give the correct value. */
12119 if (code == NE_EXPR
12120 && integer_zerop (arg1)
12121 && TREE_CODE (arg0) == BIT_AND_EXPR
12122 && integer_onep (TREE_OPERAND (arg0, 1)))
12123 return fold_convert (type, arg0);
12125 /* If we have (A & C) == C where C is a power of 2, convert this into
12126 (A & C) != 0. Similarly for NE_EXPR. */
12127 if (TREE_CODE (arg0) == BIT_AND_EXPR
12128 && integer_pow2p (TREE_OPERAND (arg0, 1))
12129 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12130 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12131 arg0, fold_convert (TREE_TYPE (arg0),
12132 integer_zero_node));
12134 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12135 bit, then fold the expression into A < 0 or A >= 0. */
12136 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12140 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12141 Similarly for NE_EXPR. */
12142 if (TREE_CODE (arg0) == BIT_AND_EXPR
12143 && TREE_CODE (arg1) == INTEGER_CST
12144 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12146 tree notc = fold_build1 (BIT_NOT_EXPR,
12147 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12148 TREE_OPERAND (arg0, 1));
12149 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12151 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12152 if (integer_nonzerop (dandnotc))
12153 return omit_one_operand (type, rslt, arg0);
12156 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12157 Similarly for NE_EXPR. */
12158 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12159 && TREE_CODE (arg1) == INTEGER_CST
12160 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12162 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12163 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12164 TREE_OPERAND (arg0, 1), notd);
12165 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12166 if (integer_nonzerop (candnotd))
12167 return omit_one_operand (type, rslt, arg0);
12170 /* If this is a comparison of a field, we may be able to simplify it. */
12171 if ((TREE_CODE (arg0) == COMPONENT_REF
12172 || TREE_CODE (arg0) == BIT_FIELD_REF)
12173 /* Handle the constant case even without -O
12174 to make sure the warnings are given. */
12175 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12177 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12182 /* Optimize comparisons of strlen vs zero to a compare of the
12183 first character of the string vs zero. To wit,
12184 strlen(ptr) == 0 => *ptr == 0
12185 strlen(ptr) != 0 => *ptr != 0
12186 Other cases should reduce to one of these two (or a constant)
12187 due to the return value of strlen being unsigned. */
12188 if (TREE_CODE (arg0) == CALL_EXPR
12189 && integer_zerop (arg1))
12191 tree fndecl = get_callee_fndecl (arg0);
12194 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12195 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12196 && call_expr_nargs (arg0) == 1
12197 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12199 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12200 return fold_build2 (code, type, iref,
12201 build_int_cst (TREE_TYPE (iref), 0));
12205 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12206 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12207 if (TREE_CODE (arg0) == RSHIFT_EXPR
12208 && integer_zerop (arg1)
12209 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12211 tree arg00 = TREE_OPERAND (arg0, 0);
12212 tree arg01 = TREE_OPERAND (arg0, 1);
12213 tree itype = TREE_TYPE (arg00);
12214 if (TREE_INT_CST_HIGH (arg01) == 0
12215 && TREE_INT_CST_LOW (arg01)
12216 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12218 if (TYPE_UNSIGNED (itype))
12220 itype = signed_type_for (itype);
12221 arg00 = fold_convert (itype, arg00);
12223 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12224 type, arg00, build_int_cst (itype, 0));
12228 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12229 if (integer_zerop (arg1)
12230 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12231 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12232 TREE_OPERAND (arg0, 1));
12234 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12235 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12236 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12237 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12238 build_int_cst (TREE_TYPE (arg1), 0));
12239 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12240 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12241 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12242 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12243 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12244 build_int_cst (TREE_TYPE (arg1), 0));
12246 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12247 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12248 && TREE_CODE (arg1) == INTEGER_CST
12249 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12250 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12251 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12252 TREE_OPERAND (arg0, 1), arg1));
12254 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12255 (X & C) == 0 when C is a single bit. */
12256 if (TREE_CODE (arg0) == BIT_AND_EXPR
12257 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12258 && integer_zerop (arg1)
12259 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12261 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12262 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12263 TREE_OPERAND (arg0, 1));
12264 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12268 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12269 constant C is a power of two, i.e. a single bit. */
12270 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12271 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12272 && integer_zerop (arg1)
12273 && integer_pow2p (TREE_OPERAND (arg0, 1))
12274 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12275 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12277 tree arg00 = TREE_OPERAND (arg0, 0);
12278 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12279 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12282 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12283 when is C is a power of two, i.e. a single bit. */
12284 if (TREE_CODE (arg0) == BIT_AND_EXPR
12285 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12286 && integer_zerop (arg1)
12287 && integer_pow2p (TREE_OPERAND (arg0, 1))
12288 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12289 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12291 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12292 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12293 arg000, TREE_OPERAND (arg0, 1));
12294 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12295 tem, build_int_cst (TREE_TYPE (tem), 0));
12298 if (integer_zerop (arg1)
12299 && tree_expr_nonzero_p (arg0))
12301 tree res = constant_boolean_node (code==NE_EXPR, type);
12302 return omit_one_operand (type, res, arg0);
12305 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12306 if (TREE_CODE (arg0) == NEGATE_EXPR
12307 && TREE_CODE (arg1) == NEGATE_EXPR)
12308 return fold_build2 (code, type,
12309 TREE_OPERAND (arg0, 0),
12310 TREE_OPERAND (arg1, 0));
12312 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12313 if (TREE_CODE (arg0) == BIT_AND_EXPR
12314 && TREE_CODE (arg1) == BIT_AND_EXPR)
12316 tree arg00 = TREE_OPERAND (arg0, 0);
12317 tree arg01 = TREE_OPERAND (arg0, 1);
12318 tree arg10 = TREE_OPERAND (arg1, 0);
12319 tree arg11 = TREE_OPERAND (arg1, 1);
12320 tree itype = TREE_TYPE (arg0);
12322 if (operand_equal_p (arg01, arg11, 0))
12323 return fold_build2 (code, type,
12324 fold_build2 (BIT_AND_EXPR, itype,
12325 fold_build2 (BIT_XOR_EXPR, itype,
12328 build_int_cst (itype, 0));
12330 if (operand_equal_p (arg01, arg10, 0))
12331 return fold_build2 (code, type,
12332 fold_build2 (BIT_AND_EXPR, itype,
12333 fold_build2 (BIT_XOR_EXPR, itype,
12336 build_int_cst (itype, 0));
12338 if (operand_equal_p (arg00, arg11, 0))
12339 return fold_build2 (code, type,
12340 fold_build2 (BIT_AND_EXPR, itype,
12341 fold_build2 (BIT_XOR_EXPR, itype,
12344 build_int_cst (itype, 0));
12346 if (operand_equal_p (arg00, arg10, 0))
12347 return fold_build2 (code, type,
12348 fold_build2 (BIT_AND_EXPR, itype,
12349 fold_build2 (BIT_XOR_EXPR, itype,
12352 build_int_cst (itype, 0));
12355 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12356 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12358 tree arg00 = TREE_OPERAND (arg0, 0);
12359 tree arg01 = TREE_OPERAND (arg0, 1);
12360 tree arg10 = TREE_OPERAND (arg1, 0);
12361 tree arg11 = TREE_OPERAND (arg1, 1);
12362 tree itype = TREE_TYPE (arg0);
12364 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12365 operand_equal_p guarantees no side-effects so we don't need
12366 to use omit_one_operand on Z. */
12367 if (operand_equal_p (arg01, arg11, 0))
12368 return fold_build2 (code, type, arg00, arg10);
12369 if (operand_equal_p (arg01, arg10, 0))
12370 return fold_build2 (code, type, arg00, arg11);
12371 if (operand_equal_p (arg00, arg11, 0))
12372 return fold_build2 (code, type, arg01, arg10);
12373 if (operand_equal_p (arg00, arg10, 0))
12374 return fold_build2 (code, type, arg01, arg11);
12376 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12377 if (TREE_CODE (arg01) == INTEGER_CST
12378 && TREE_CODE (arg11) == INTEGER_CST)
12379 return fold_build2 (code, type,
12380 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12381 fold_build2 (BIT_XOR_EXPR, itype,
12386 /* Attempt to simplify equality/inequality comparisons of complex
12387 values. Only lower the comparison if the result is known or
12388 can be simplified to a single scalar comparison. */
12389 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12390 || TREE_CODE (arg0) == COMPLEX_CST)
12391 && (TREE_CODE (arg1) == COMPLEX_EXPR
12392 || TREE_CODE (arg1) == COMPLEX_CST))
12394 tree real0, imag0, real1, imag1;
12397 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12399 real0 = TREE_OPERAND (arg0, 0);
12400 imag0 = TREE_OPERAND (arg0, 1);
12404 real0 = TREE_REALPART (arg0);
12405 imag0 = TREE_IMAGPART (arg0);
12408 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12410 real1 = TREE_OPERAND (arg1, 0);
12411 imag1 = TREE_OPERAND (arg1, 1);
12415 real1 = TREE_REALPART (arg1);
12416 imag1 = TREE_IMAGPART (arg1);
12419 rcond = fold_binary (code, type, real0, real1);
12420 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12422 if (integer_zerop (rcond))
12424 if (code == EQ_EXPR)
12425 return omit_two_operands (type, boolean_false_node,
12427 return fold_build2 (NE_EXPR, type, imag0, imag1);
12431 if (code == NE_EXPR)
12432 return omit_two_operands (type, boolean_true_node,
12434 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12438 icond = fold_binary (code, type, imag0, imag1);
12439 if (icond && TREE_CODE (icond) == INTEGER_CST)
12441 if (integer_zerop (icond))
12443 if (code == EQ_EXPR)
12444 return omit_two_operands (type, boolean_false_node,
12446 return fold_build2 (NE_EXPR, type, real0, real1);
12450 if (code == NE_EXPR)
12451 return omit_two_operands (type, boolean_true_node,
12453 return fold_build2 (EQ_EXPR, type, real0, real1);
12464 tem = fold_comparison (code, type, op0, op1);
12465 if (tem != NULL_TREE)
12468 /* Transform comparisons of the form X +- C CMP X. */
12469 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12470 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12471 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12472 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12473 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12474 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12476 tree arg01 = TREE_OPERAND (arg0, 1);
12477 enum tree_code code0 = TREE_CODE (arg0);
12480 if (TREE_CODE (arg01) == REAL_CST)
12481 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12483 is_positive = tree_int_cst_sgn (arg01);
12485 /* (X - c) > X becomes false. */
12486 if (code == GT_EXPR
12487 && ((code0 == MINUS_EXPR && is_positive >= 0)
12488 || (code0 == PLUS_EXPR && is_positive <= 0)))
12490 if (TREE_CODE (arg01) == INTEGER_CST
12491 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12492 fold_overflow_warning (("assuming signed overflow does not "
12493 "occur when assuming that (X - c) > X "
12494 "is always false"),
12495 WARN_STRICT_OVERFLOW_ALL);
12496 return constant_boolean_node (0, type);
12499 /* Likewise (X + c) < X becomes false. */
12500 if (code == LT_EXPR
12501 && ((code0 == PLUS_EXPR && is_positive >= 0)
12502 || (code0 == MINUS_EXPR && is_positive <= 0)))
12504 if (TREE_CODE (arg01) == INTEGER_CST
12505 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12506 fold_overflow_warning (("assuming signed overflow does not "
12507 "occur when assuming that "
12508 "(X + c) < X is always false"),
12509 WARN_STRICT_OVERFLOW_ALL);
12510 return constant_boolean_node (0, type);
12513 /* Convert (X - c) <= X to true. */
12514 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12516 && ((code0 == MINUS_EXPR && is_positive >= 0)
12517 || (code0 == PLUS_EXPR && is_positive <= 0)))
12519 if (TREE_CODE (arg01) == INTEGER_CST
12520 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12521 fold_overflow_warning (("assuming signed overflow does not "
12522 "occur when assuming that "
12523 "(X - c) <= X is always true"),
12524 WARN_STRICT_OVERFLOW_ALL);
12525 return constant_boolean_node (1, type);
12528 /* Convert (X + c) >= X to true. */
12529 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12531 && ((code0 == PLUS_EXPR && is_positive >= 0)
12532 || (code0 == MINUS_EXPR && is_positive <= 0)))
12534 if (TREE_CODE (arg01) == INTEGER_CST
12535 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12536 fold_overflow_warning (("assuming signed overflow does not "
12537 "occur when assuming that "
12538 "(X + c) >= X is always true"),
12539 WARN_STRICT_OVERFLOW_ALL);
12540 return constant_boolean_node (1, type);
12543 if (TREE_CODE (arg01) == INTEGER_CST)
12545 /* Convert X + c > X and X - c < X to true for integers. */
12546 if (code == GT_EXPR
12547 && ((code0 == PLUS_EXPR && is_positive > 0)
12548 || (code0 == MINUS_EXPR && is_positive < 0)))
12550 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12551 fold_overflow_warning (("assuming signed overflow does "
12552 "not occur when assuming that "
12553 "(X + c) > X is always true"),
12554 WARN_STRICT_OVERFLOW_ALL);
12555 return constant_boolean_node (1, type);
12558 if (code == LT_EXPR
12559 && ((code0 == MINUS_EXPR && is_positive > 0)
12560 || (code0 == PLUS_EXPR && is_positive < 0)))
12562 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12563 fold_overflow_warning (("assuming signed overflow does "
12564 "not occur when assuming that "
12565 "(X - c) < X is always true"),
12566 WARN_STRICT_OVERFLOW_ALL);
12567 return constant_boolean_node (1, type);
12570 /* Convert X + c <= X and X - c >= X to false for integers. */
12571 if (code == LE_EXPR
12572 && ((code0 == PLUS_EXPR && is_positive > 0)
12573 || (code0 == MINUS_EXPR && is_positive < 0)))
12575 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12576 fold_overflow_warning (("assuming signed overflow does "
12577 "not occur when assuming that "
12578 "(X + c) <= X is always false"),
12579 WARN_STRICT_OVERFLOW_ALL);
12580 return constant_boolean_node (0, type);
12583 if (code == GE_EXPR
12584 && ((code0 == MINUS_EXPR && is_positive > 0)
12585 || (code0 == PLUS_EXPR && is_positive < 0)))
12587 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12588 fold_overflow_warning (("assuming signed overflow does "
12589 "not occur when assuming that "
12590 "(X - c) >= X is always false"),
12591 WARN_STRICT_OVERFLOW_ALL);
12592 return constant_boolean_node (0, type);
12597 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12598 This transformation affects the cases which are handled in later
12599 optimizations involving comparisons with non-negative constants. */
12600 if (TREE_CODE (arg1) == INTEGER_CST
12601 && TREE_CODE (arg0) != INTEGER_CST
12602 && tree_int_cst_sgn (arg1) > 0)
12604 if (code == GE_EXPR)
12606 arg1 = const_binop (MINUS_EXPR, arg1,
12607 build_int_cst (TREE_TYPE (arg1), 1), 0);
12608 return fold_build2 (GT_EXPR, type, arg0,
12609 fold_convert (TREE_TYPE (arg0), arg1));
12611 if (code == LT_EXPR)
12613 arg1 = const_binop (MINUS_EXPR, arg1,
12614 build_int_cst (TREE_TYPE (arg1), 1), 0);
12615 return fold_build2 (LE_EXPR, type, arg0,
12616 fold_convert (TREE_TYPE (arg0), arg1));
12620 /* Comparisons with the highest or lowest possible integer of
12621 the specified precision will have known values. */
12623 tree arg1_type = TREE_TYPE (arg1);
12624 unsigned int width = TYPE_PRECISION (arg1_type);
12626 if (TREE_CODE (arg1) == INTEGER_CST
12627 && !TREE_OVERFLOW (arg1)
12628 && width <= 2 * HOST_BITS_PER_WIDE_INT
12629 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12631 HOST_WIDE_INT signed_max_hi;
12632 unsigned HOST_WIDE_INT signed_max_lo;
12633 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12635 if (width <= HOST_BITS_PER_WIDE_INT)
12637 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12642 if (TYPE_UNSIGNED (arg1_type))
12644 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12650 max_lo = signed_max_lo;
12651 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12657 width -= HOST_BITS_PER_WIDE_INT;
12658 signed_max_lo = -1;
12659 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12664 if (TYPE_UNSIGNED (arg1_type))
12666 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12671 max_hi = signed_max_hi;
12672 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12676 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12677 && TREE_INT_CST_LOW (arg1) == max_lo)
12681 return omit_one_operand (type, integer_zero_node, arg0);
12684 return fold_build2 (EQ_EXPR, type, op0, op1);
12687 return omit_one_operand (type, integer_one_node, arg0);
12690 return fold_build2 (NE_EXPR, type, op0, op1);
12692 /* The GE_EXPR and LT_EXPR cases above are not normally
12693 reached because of previous transformations. */
12698 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12700 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12704 arg1 = const_binop (PLUS_EXPR, arg1,
12705 build_int_cst (TREE_TYPE (arg1), 1), 0);
12706 return fold_build2 (EQ_EXPR, type,
12707 fold_convert (TREE_TYPE (arg1), arg0),
12710 arg1 = const_binop (PLUS_EXPR, arg1,
12711 build_int_cst (TREE_TYPE (arg1), 1), 0);
12712 return fold_build2 (NE_EXPR, type,
12713 fold_convert (TREE_TYPE (arg1), arg0),
12718 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12720 && TREE_INT_CST_LOW (arg1) == min_lo)
12724 return omit_one_operand (type, integer_zero_node, arg0);
12727 return fold_build2 (EQ_EXPR, type, op0, op1);
12730 return omit_one_operand (type, integer_one_node, arg0);
12733 return fold_build2 (NE_EXPR, type, op0, op1);
12738 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12740 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12744 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12745 return fold_build2 (NE_EXPR, type,
12746 fold_convert (TREE_TYPE (arg1), arg0),
12749 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12750 return fold_build2 (EQ_EXPR, type,
12751 fold_convert (TREE_TYPE (arg1), arg0),
12757 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12758 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12759 && TYPE_UNSIGNED (arg1_type)
12760 /* We will flip the signedness of the comparison operator
12761 associated with the mode of arg1, so the sign bit is
12762 specified by this mode. Check that arg1 is the signed
12763 max associated with this sign bit. */
12764 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12765 /* signed_type does not work on pointer types. */
12766 && INTEGRAL_TYPE_P (arg1_type))
12768 /* The following case also applies to X < signed_max+1
12769 and X >= signed_max+1 because previous transformations. */
12770 if (code == LE_EXPR || code == GT_EXPR)
12773 st = signed_type_for (TREE_TYPE (arg1));
12774 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12775 type, fold_convert (st, arg0),
12776 build_int_cst (st, 0));
12782 /* If we are comparing an ABS_EXPR with a constant, we can
12783 convert all the cases into explicit comparisons, but they may
12784 well not be faster than doing the ABS and one comparison.
12785 But ABS (X) <= C is a range comparison, which becomes a subtraction
12786 and a comparison, and is probably faster. */
12787 if (code == LE_EXPR
12788 && TREE_CODE (arg1) == INTEGER_CST
12789 && TREE_CODE (arg0) == ABS_EXPR
12790 && ! TREE_SIDE_EFFECTS (arg0)
12791 && (0 != (tem = negate_expr (arg1)))
12792 && TREE_CODE (tem) == INTEGER_CST
12793 && !TREE_OVERFLOW (tem))
12794 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12795 build2 (GE_EXPR, type,
12796 TREE_OPERAND (arg0, 0), tem),
12797 build2 (LE_EXPR, type,
12798 TREE_OPERAND (arg0, 0), arg1));
12800 /* Convert ABS_EXPR<x> >= 0 to true. */
12801 strict_overflow_p = false;
12802 if (code == GE_EXPR
12803 && (integer_zerop (arg1)
12804 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12805 && real_zerop (arg1)))
12806 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12808 if (strict_overflow_p)
12809 fold_overflow_warning (("assuming signed overflow does not occur "
12810 "when simplifying comparison of "
12811 "absolute value and zero"),
12812 WARN_STRICT_OVERFLOW_CONDITIONAL);
12813 return omit_one_operand (type, integer_one_node, arg0);
12816 /* Convert ABS_EXPR<x> < 0 to false. */
12817 strict_overflow_p = false;
12818 if (code == LT_EXPR
12819 && (integer_zerop (arg1) || real_zerop (arg1))
12820 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12822 if (strict_overflow_p)
12823 fold_overflow_warning (("assuming signed overflow does not occur "
12824 "when simplifying comparison of "
12825 "absolute value and zero"),
12826 WARN_STRICT_OVERFLOW_CONDITIONAL);
12827 return omit_one_operand (type, integer_zero_node, arg0);
12830 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12831 and similarly for >= into !=. */
12832 if ((code == LT_EXPR || code == GE_EXPR)
12833 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12834 && TREE_CODE (arg1) == LSHIFT_EXPR
12835 && integer_onep (TREE_OPERAND (arg1, 0)))
12836 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12837 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12838 TREE_OPERAND (arg1, 1)),
12839 build_int_cst (TREE_TYPE (arg0), 0));
12841 if ((code == LT_EXPR || code == GE_EXPR)
12842 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12843 && (TREE_CODE (arg1) == NOP_EXPR
12844 || TREE_CODE (arg1) == CONVERT_EXPR)
12845 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12846 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12848 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12849 fold_convert (TREE_TYPE (arg0),
12850 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12851 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12853 build_int_cst (TREE_TYPE (arg0), 0));
12857 case UNORDERED_EXPR:
12865 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12867 t1 = fold_relational_const (code, type, arg0, arg1);
12868 if (t1 != NULL_TREE)
12872 /* If the first operand is NaN, the result is constant. */
12873 if (TREE_CODE (arg0) == REAL_CST
12874 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12875 && (code != LTGT_EXPR || ! flag_trapping_math))
12877 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12878 ? integer_zero_node
12879 : integer_one_node;
12880 return omit_one_operand (type, t1, arg1);
12883 /* If the second operand is NaN, the result is constant. */
12884 if (TREE_CODE (arg1) == REAL_CST
12885 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12886 && (code != LTGT_EXPR || ! flag_trapping_math))
12888 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12889 ? integer_zero_node
12890 : integer_one_node;
12891 return omit_one_operand (type, t1, arg0);
12894 /* Simplify unordered comparison of something with itself. */
12895 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12896 && operand_equal_p (arg0, arg1, 0))
12897 return constant_boolean_node (1, type);
12899 if (code == LTGT_EXPR
12900 && !flag_trapping_math
12901 && operand_equal_p (arg0, arg1, 0))
12902 return constant_boolean_node (0, type);
12904 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12906 tree targ0 = strip_float_extensions (arg0);
12907 tree targ1 = strip_float_extensions (arg1);
12908 tree newtype = TREE_TYPE (targ0);
12910 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12911 newtype = TREE_TYPE (targ1);
12913 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12914 return fold_build2 (code, type, fold_convert (newtype, targ0),
12915 fold_convert (newtype, targ1));
12920 case COMPOUND_EXPR:
12921 /* When pedantic, a compound expression can be neither an lvalue
12922 nor an integer constant expression. */
12923 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12925 /* Don't let (0, 0) be null pointer constant. */
12926 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12927 : fold_convert (type, arg1);
12928 return pedantic_non_lvalue (tem);
12931 if ((TREE_CODE (arg0) == REAL_CST
12932 && TREE_CODE (arg1) == REAL_CST)
12933 || (TREE_CODE (arg0) == INTEGER_CST
12934 && TREE_CODE (arg1) == INTEGER_CST))
12935 return build_complex (type, arg0, arg1);
12939 /* An ASSERT_EXPR should never be passed to fold_binary. */
12940 gcc_unreachable ();
12944 } /* switch (code) */
12947 /* Callback for walk_tree, looking for LABEL_EXPR.
12948 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12949 Do not check the sub-tree of GOTO_EXPR. */
12952 contains_label_1 (tree *tp,
12953 int *walk_subtrees,
12954 void *data ATTRIBUTE_UNUSED)
12956 switch (TREE_CODE (*tp))
12961 *walk_subtrees = 0;
12968 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12969 accessible from outside the sub-tree. Returns NULL_TREE if no
12970 addressable label is found. */
12973 contains_label_p (tree st)
12975 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12978 /* Fold a ternary expression of code CODE and type TYPE with operands
12979 OP0, OP1, and OP2. Return the folded expression if folding is
12980 successful. Otherwise, return NULL_TREE. */
12983 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12986 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12987 enum tree_code_class kind = TREE_CODE_CLASS (code);
12989 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12990 && TREE_CODE_LENGTH (code) == 3);
12992 /* Strip any conversions that don't change the mode. This is safe
12993 for every expression, except for a comparison expression because
12994 its signedness is derived from its operands. So, in the latter
12995 case, only strip conversions that don't change the signedness.
12997 Note that this is done as an internal manipulation within the
12998 constant folder, in order to find the simplest representation of
12999 the arguments so that their form can be studied. In any cases,
13000 the appropriate type conversions should be put back in the tree
13001 that will get out of the constant folder. */
13016 case COMPONENT_REF:
13017 if (TREE_CODE (arg0) == CONSTRUCTOR
13018 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13020 unsigned HOST_WIDE_INT idx;
13022 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13029 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13030 so all simple results must be passed through pedantic_non_lvalue. */
13031 if (TREE_CODE (arg0) == INTEGER_CST)
13033 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13034 tem = integer_zerop (arg0) ? op2 : op1;
13035 /* Only optimize constant conditions when the selected branch
13036 has the same type as the COND_EXPR. This avoids optimizing
13037 away "c ? x : throw", where the throw has a void type.
13038 Avoid throwing away that operand which contains label. */
13039 if ((!TREE_SIDE_EFFECTS (unused_op)
13040 || !contains_label_p (unused_op))
13041 && (! VOID_TYPE_P (TREE_TYPE (tem))
13042 || VOID_TYPE_P (type)))
13043 return pedantic_non_lvalue (tem);
13046 if (operand_equal_p (arg1, op2, 0))
13047 return pedantic_omit_one_operand (type, arg1, arg0);
13049 /* If we have A op B ? A : C, we may be able to convert this to a
13050 simpler expression, depending on the operation and the values
13051 of B and C. Signed zeros prevent all of these transformations,
13052 for reasons given above each one.
13054 Also try swapping the arguments and inverting the conditional. */
13055 if (COMPARISON_CLASS_P (arg0)
13056 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13057 arg1, TREE_OPERAND (arg0, 1))
13058 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13060 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13065 if (COMPARISON_CLASS_P (arg0)
13066 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13068 TREE_OPERAND (arg0, 1))
13069 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13071 tem = fold_truth_not_expr (arg0);
13072 if (tem && COMPARISON_CLASS_P (tem))
13074 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13080 /* If the second operand is simpler than the third, swap them
13081 since that produces better jump optimization results. */
13082 if (truth_value_p (TREE_CODE (arg0))
13083 && tree_swap_operands_p (op1, op2, false))
13085 /* See if this can be inverted. If it can't, possibly because
13086 it was a floating-point inequality comparison, don't do
13088 tem = fold_truth_not_expr (arg0);
13090 return fold_build3 (code, type, tem, op2, op1);
13093 /* Convert A ? 1 : 0 to simply A. */
13094 if (integer_onep (op1)
13095 && integer_zerop (op2)
13096 /* If we try to convert OP0 to our type, the
13097 call to fold will try to move the conversion inside
13098 a COND, which will recurse. In that case, the COND_EXPR
13099 is probably the best choice, so leave it alone. */
13100 && type == TREE_TYPE (arg0))
13101 return pedantic_non_lvalue (arg0);
13103 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13104 over COND_EXPR in cases such as floating point comparisons. */
13105 if (integer_zerop (op1)
13106 && integer_onep (op2)
13107 && truth_value_p (TREE_CODE (arg0)))
13108 return pedantic_non_lvalue (fold_convert (type,
13109 invert_truthvalue (arg0)));
13111 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13112 if (TREE_CODE (arg0) == LT_EXPR
13113 && integer_zerop (TREE_OPERAND (arg0, 1))
13114 && integer_zerop (op2)
13115 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13117 /* sign_bit_p only checks ARG1 bits within A's precision.
13118 If <sign bit of A> has wider type than A, bits outside
13119 of A's precision in <sign bit of A> need to be checked.
13120 If they are all 0, this optimization needs to be done
13121 in unsigned A's type, if they are all 1 in signed A's type,
13122 otherwise this can't be done. */
13123 if (TYPE_PRECISION (TREE_TYPE (tem))
13124 < TYPE_PRECISION (TREE_TYPE (arg1))
13125 && TYPE_PRECISION (TREE_TYPE (tem))
13126 < TYPE_PRECISION (type))
13128 unsigned HOST_WIDE_INT mask_lo;
13129 HOST_WIDE_INT mask_hi;
13130 int inner_width, outer_width;
13133 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13134 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13135 if (outer_width > TYPE_PRECISION (type))
13136 outer_width = TYPE_PRECISION (type);
13138 if (outer_width > HOST_BITS_PER_WIDE_INT)
13140 mask_hi = ((unsigned HOST_WIDE_INT) -1
13141 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13147 mask_lo = ((unsigned HOST_WIDE_INT) -1
13148 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13150 if (inner_width > HOST_BITS_PER_WIDE_INT)
13152 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13153 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13157 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13158 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13160 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13161 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13163 tem_type = signed_type_for (TREE_TYPE (tem));
13164 tem = fold_convert (tem_type, tem);
13166 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13167 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13169 tem_type = unsigned_type_for (TREE_TYPE (tem));
13170 tem = fold_convert (tem_type, tem);
13177 return fold_convert (type,
13178 fold_build2 (BIT_AND_EXPR,
13179 TREE_TYPE (tem), tem,
13180 fold_convert (TREE_TYPE (tem),
13184 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13185 already handled above. */
13186 if (TREE_CODE (arg0) == BIT_AND_EXPR
13187 && integer_onep (TREE_OPERAND (arg0, 1))
13188 && integer_zerop (op2)
13189 && integer_pow2p (arg1))
13191 tree tem = TREE_OPERAND (arg0, 0);
13193 if (TREE_CODE (tem) == RSHIFT_EXPR
13194 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13195 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13196 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13197 return fold_build2 (BIT_AND_EXPR, type,
13198 TREE_OPERAND (tem, 0), arg1);
13201 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13202 is probably obsolete because the first operand should be a
13203 truth value (that's why we have the two cases above), but let's
13204 leave it in until we can confirm this for all front-ends. */
13205 if (integer_zerop (op2)
13206 && TREE_CODE (arg0) == NE_EXPR
13207 && integer_zerop (TREE_OPERAND (arg0, 1))
13208 && integer_pow2p (arg1)
13209 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13210 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13211 arg1, OEP_ONLY_CONST))
13212 return pedantic_non_lvalue (fold_convert (type,
13213 TREE_OPERAND (arg0, 0)));
13215 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13216 if (integer_zerop (op2)
13217 && truth_value_p (TREE_CODE (arg0))
13218 && truth_value_p (TREE_CODE (arg1)))
13219 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13220 fold_convert (type, arg0),
13223 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13224 if (integer_onep (op2)
13225 && truth_value_p (TREE_CODE (arg0))
13226 && truth_value_p (TREE_CODE (arg1)))
13228 /* Only perform transformation if ARG0 is easily inverted. */
13229 tem = fold_truth_not_expr (arg0);
13231 return fold_build2 (TRUTH_ORIF_EXPR, type,
13232 fold_convert (type, tem),
13236 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13237 if (integer_zerop (arg1)
13238 && truth_value_p (TREE_CODE (arg0))
13239 && truth_value_p (TREE_CODE (op2)))
13241 /* Only perform transformation if ARG0 is easily inverted. */
13242 tem = fold_truth_not_expr (arg0);
13244 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13245 fold_convert (type, tem),
13249 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13250 if (integer_onep (arg1)
13251 && truth_value_p (TREE_CODE (arg0))
13252 && truth_value_p (TREE_CODE (op2)))
13253 return fold_build2 (TRUTH_ORIF_EXPR, type,
13254 fold_convert (type, arg0),
13260 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13261 of fold_ternary on them. */
13262 gcc_unreachable ();
13264 case BIT_FIELD_REF:
13265 if ((TREE_CODE (arg0) == VECTOR_CST
13266 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13267 && type == TREE_TYPE (TREE_TYPE (arg0))
13268 && host_integerp (arg1, 1)
13269 && host_integerp (op2, 1))
13271 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13272 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13275 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13276 && (idx % width) == 0
13277 && (idx = idx / width)
13278 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13280 tree elements = NULL_TREE;
13282 if (TREE_CODE (arg0) == VECTOR_CST)
13283 elements = TREE_VECTOR_CST_ELTS (arg0);
13286 unsigned HOST_WIDE_INT idx;
13289 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13290 elements = tree_cons (NULL_TREE, value, elements);
13292 while (idx-- > 0 && elements)
13293 elements = TREE_CHAIN (elements);
13295 return TREE_VALUE (elements);
13297 return fold_convert (type, integer_zero_node);
13304 } /* switch (code) */
13307 /* Perform constant folding and related simplification of EXPR.
13308 The related simplifications include x*1 => x, x*0 => 0, etc.,
13309 and application of the associative law.
13310 NOP_EXPR conversions may be removed freely (as long as we
13311 are careful not to change the type of the overall expression).
13312 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13313 but we can constant-fold them if they have constant operands. */
13315 #ifdef ENABLE_FOLD_CHECKING
13316 # define fold(x) fold_1 (x)
13317 static tree fold_1 (tree);
13323 const tree t = expr;
13324 enum tree_code code = TREE_CODE (t);
13325 enum tree_code_class kind = TREE_CODE_CLASS (code);
13328 /* Return right away if a constant. */
13329 if (kind == tcc_constant)
13332 /* CALL_EXPR-like objects with variable numbers of operands are
13333 treated specially. */
13334 if (kind == tcc_vl_exp)
13336 if (code == CALL_EXPR)
13338 tem = fold_call_expr (expr, false);
13339 return tem ? tem : expr;
13344 if (IS_EXPR_CODE_CLASS (kind)
13345 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13347 tree type = TREE_TYPE (t);
13348 tree op0, op1, op2;
13350 switch (TREE_CODE_LENGTH (code))
13353 op0 = TREE_OPERAND (t, 0);
13354 tem = fold_unary (code, type, op0);
13355 return tem ? tem : expr;
13357 op0 = TREE_OPERAND (t, 0);
13358 op1 = TREE_OPERAND (t, 1);
13359 tem = fold_binary (code, type, op0, op1);
13360 return tem ? tem : expr;
13362 op0 = TREE_OPERAND (t, 0);
13363 op1 = TREE_OPERAND (t, 1);
13364 op2 = TREE_OPERAND (t, 2);
13365 tem = fold_ternary (code, type, op0, op1, op2);
13366 return tem ? tem : expr;
13375 return fold (DECL_INITIAL (t));
13379 } /* switch (code) */
13382 #ifdef ENABLE_FOLD_CHECKING
13385 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13386 static void fold_check_failed (const_tree, const_tree);
13387 void print_fold_checksum (const_tree);
13389 /* When --enable-checking=fold, compute a digest of expr before
13390 and after actual fold call to see if fold did not accidentally
13391 change original expr. */
13397 struct md5_ctx ctx;
13398 unsigned char checksum_before[16], checksum_after[16];
13401 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13402 md5_init_ctx (&ctx);
13403 fold_checksum_tree (expr, &ctx, ht);
13404 md5_finish_ctx (&ctx, checksum_before);
13407 ret = fold_1 (expr);
13409 md5_init_ctx (&ctx);
13410 fold_checksum_tree (expr, &ctx, ht);
13411 md5_finish_ctx (&ctx, checksum_after);
13414 if (memcmp (checksum_before, checksum_after, 16))
13415 fold_check_failed (expr, ret);
13421 print_fold_checksum (const_tree expr)
13423 struct md5_ctx ctx;
13424 unsigned char checksum[16], cnt;
13427 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13428 md5_init_ctx (&ctx);
13429 fold_checksum_tree (expr, &ctx, ht);
13430 md5_finish_ctx (&ctx, checksum);
13432 for (cnt = 0; cnt < 16; ++cnt)
13433 fprintf (stderr, "%02x", checksum[cnt]);
13434 putc ('\n', stderr);
13438 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13440 internal_error ("fold check: original tree changed by fold");
13444 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13447 enum tree_code code;
13448 struct tree_function_decl buf;
13453 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13454 <= sizeof (struct tree_function_decl))
13455 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13458 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13462 code = TREE_CODE (expr);
13463 if (TREE_CODE_CLASS (code) == tcc_declaration
13464 && DECL_ASSEMBLER_NAME_SET_P (expr))
13466 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13467 memcpy ((char *) &buf, expr, tree_size (expr));
13468 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13469 expr = (tree) &buf;
13471 else if (TREE_CODE_CLASS (code) == tcc_type
13472 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13473 || TYPE_CACHED_VALUES_P (expr)
13474 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13476 /* Allow these fields to be modified. */
13478 memcpy ((char *) &buf, expr, tree_size (expr));
13479 expr = tmp = (tree) &buf;
13480 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13481 TYPE_POINTER_TO (tmp) = NULL;
13482 TYPE_REFERENCE_TO (tmp) = NULL;
13483 if (TYPE_CACHED_VALUES_P (tmp))
13485 TYPE_CACHED_VALUES_P (tmp) = 0;
13486 TYPE_CACHED_VALUES (tmp) = NULL;
13489 md5_process_bytes (expr, tree_size (expr), ctx);
13490 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13491 if (TREE_CODE_CLASS (code) != tcc_type
13492 && TREE_CODE_CLASS (code) != tcc_declaration
13493 && code != TREE_LIST
13494 && code != SSA_NAME)
13495 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13496 switch (TREE_CODE_CLASS (code))
13502 md5_process_bytes (TREE_STRING_POINTER (expr),
13503 TREE_STRING_LENGTH (expr), ctx);
13506 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13507 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13510 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13516 case tcc_exceptional:
13520 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13521 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13522 expr = TREE_CHAIN (expr);
13523 goto recursive_label;
13526 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13527 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13533 case tcc_expression:
13534 case tcc_reference:
13535 case tcc_comparison:
13538 case tcc_statement:
13540 len = TREE_OPERAND_LENGTH (expr);
13541 for (i = 0; i < len; ++i)
13542 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13544 case tcc_declaration:
13545 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13546 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13547 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13549 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13550 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13551 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13552 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13553 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13555 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13556 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13558 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13560 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13561 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13562 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13566 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13567 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13568 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13569 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13570 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13571 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13572 if (INTEGRAL_TYPE_P (expr)
13573 || SCALAR_FLOAT_TYPE_P (expr))
13575 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13576 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13578 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13579 if (TREE_CODE (expr) == RECORD_TYPE
13580 || TREE_CODE (expr) == UNION_TYPE
13581 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13582 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13583 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13590 /* Helper function for outputting the checksum of a tree T. When
13591 debugging with gdb, you can "define mynext" to be "next" followed
13592 by "call debug_fold_checksum (op0)", then just trace down till the
13596 debug_fold_checksum (const_tree t)
13599 unsigned char checksum[16];
13600 struct md5_ctx ctx;
13601 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13603 md5_init_ctx (&ctx);
13604 fold_checksum_tree (t, &ctx, ht);
13605 md5_finish_ctx (&ctx, checksum);
13608 for (i = 0; i < 16; i++)
13609 fprintf (stderr, "%d ", checksum[i]);
13611 fprintf (stderr, "\n");
13616 /* Fold a unary tree expression with code CODE of type TYPE with an
13617 operand OP0. Return a folded expression if successful. Otherwise,
13618 return a tree expression with code CODE of type TYPE with an
13622 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13625 #ifdef ENABLE_FOLD_CHECKING
13626 unsigned char checksum_before[16], checksum_after[16];
13627 struct md5_ctx ctx;
13630 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13631 md5_init_ctx (&ctx);
13632 fold_checksum_tree (op0, &ctx, ht);
13633 md5_finish_ctx (&ctx, checksum_before);
13637 tem = fold_unary (code, type, op0);
13639 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13641 #ifdef ENABLE_FOLD_CHECKING
13642 md5_init_ctx (&ctx);
13643 fold_checksum_tree (op0, &ctx, ht);
13644 md5_finish_ctx (&ctx, checksum_after);
13647 if (memcmp (checksum_before, checksum_after, 16))
13648 fold_check_failed (op0, tem);
13653 /* Fold a binary tree expression with code CODE of type TYPE with
13654 operands OP0 and OP1. Return a folded expression if successful.
13655 Otherwise, return a tree expression with code CODE of type TYPE
13656 with operands OP0 and OP1. */
13659 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13663 #ifdef ENABLE_FOLD_CHECKING
13664 unsigned char checksum_before_op0[16],
13665 checksum_before_op1[16],
13666 checksum_after_op0[16],
13667 checksum_after_op1[16];
13668 struct md5_ctx ctx;
13671 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13672 md5_init_ctx (&ctx);
13673 fold_checksum_tree (op0, &ctx, ht);
13674 md5_finish_ctx (&ctx, checksum_before_op0);
13677 md5_init_ctx (&ctx);
13678 fold_checksum_tree (op1, &ctx, ht);
13679 md5_finish_ctx (&ctx, checksum_before_op1);
13683 tem = fold_binary (code, type, op0, op1);
13685 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13687 #ifdef ENABLE_FOLD_CHECKING
13688 md5_init_ctx (&ctx);
13689 fold_checksum_tree (op0, &ctx, ht);
13690 md5_finish_ctx (&ctx, checksum_after_op0);
13693 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13694 fold_check_failed (op0, tem);
13696 md5_init_ctx (&ctx);
13697 fold_checksum_tree (op1, &ctx, ht);
13698 md5_finish_ctx (&ctx, checksum_after_op1);
13701 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13702 fold_check_failed (op1, tem);
13707 /* Fold a ternary tree expression with code CODE of type TYPE with
13708 operands OP0, OP1, and OP2. Return a folded expression if
13709 successful. Otherwise, return a tree expression with code CODE of
13710 type TYPE with operands OP0, OP1, and OP2. */
13713 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13717 #ifdef ENABLE_FOLD_CHECKING
13718 unsigned char checksum_before_op0[16],
13719 checksum_before_op1[16],
13720 checksum_before_op2[16],
13721 checksum_after_op0[16],
13722 checksum_after_op1[16],
13723 checksum_after_op2[16];
13724 struct md5_ctx ctx;
13727 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13728 md5_init_ctx (&ctx);
13729 fold_checksum_tree (op0, &ctx, ht);
13730 md5_finish_ctx (&ctx, checksum_before_op0);
13733 md5_init_ctx (&ctx);
13734 fold_checksum_tree (op1, &ctx, ht);
13735 md5_finish_ctx (&ctx, checksum_before_op1);
13738 md5_init_ctx (&ctx);
13739 fold_checksum_tree (op2, &ctx, ht);
13740 md5_finish_ctx (&ctx, checksum_before_op2);
13744 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13745 tem = fold_ternary (code, type, op0, op1, op2);
13747 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13749 #ifdef ENABLE_FOLD_CHECKING
13750 md5_init_ctx (&ctx);
13751 fold_checksum_tree (op0, &ctx, ht);
13752 md5_finish_ctx (&ctx, checksum_after_op0);
13755 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13756 fold_check_failed (op0, tem);
13758 md5_init_ctx (&ctx);
13759 fold_checksum_tree (op1, &ctx, ht);
13760 md5_finish_ctx (&ctx, checksum_after_op1);
13763 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13764 fold_check_failed (op1, tem);
13766 md5_init_ctx (&ctx);
13767 fold_checksum_tree (op2, &ctx, ht);
13768 md5_finish_ctx (&ctx, checksum_after_op2);
13771 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13772 fold_check_failed (op2, tem);
13777 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13778 arguments in ARGARRAY, and a null static chain.
13779 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13780 of type TYPE from the given operands as constructed by build_call_array. */
13783 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13786 #ifdef ENABLE_FOLD_CHECKING
13787 unsigned char checksum_before_fn[16],
13788 checksum_before_arglist[16],
13789 checksum_after_fn[16],
13790 checksum_after_arglist[16];
13791 struct md5_ctx ctx;
13795 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13796 md5_init_ctx (&ctx);
13797 fold_checksum_tree (fn, &ctx, ht);
13798 md5_finish_ctx (&ctx, checksum_before_fn);
13801 md5_init_ctx (&ctx);
13802 for (i = 0; i < nargs; i++)
13803 fold_checksum_tree (argarray[i], &ctx, ht);
13804 md5_finish_ctx (&ctx, checksum_before_arglist);
13808 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13810 #ifdef ENABLE_FOLD_CHECKING
13811 md5_init_ctx (&ctx);
13812 fold_checksum_tree (fn, &ctx, ht);
13813 md5_finish_ctx (&ctx, checksum_after_fn);
13816 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13817 fold_check_failed (fn, tem);
13819 md5_init_ctx (&ctx);
13820 for (i = 0; i < nargs; i++)
13821 fold_checksum_tree (argarray[i], &ctx, ht);
13822 md5_finish_ctx (&ctx, checksum_after_arglist);
13825 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13826 fold_check_failed (NULL_TREE, tem);
13831 /* Perform constant folding and related simplification of initializer
13832 expression EXPR. These behave identically to "fold_buildN" but ignore
13833 potential run-time traps and exceptions that fold must preserve. */
13835 #define START_FOLD_INIT \
13836 int saved_signaling_nans = flag_signaling_nans;\
13837 int saved_trapping_math = flag_trapping_math;\
13838 int saved_rounding_math = flag_rounding_math;\
13839 int saved_trapv = flag_trapv;\
13840 int saved_folding_initializer = folding_initializer;\
13841 flag_signaling_nans = 0;\
13842 flag_trapping_math = 0;\
13843 flag_rounding_math = 0;\
13845 folding_initializer = 1;
13847 #define END_FOLD_INIT \
13848 flag_signaling_nans = saved_signaling_nans;\
13849 flag_trapping_math = saved_trapping_math;\
13850 flag_rounding_math = saved_rounding_math;\
13851 flag_trapv = saved_trapv;\
13852 folding_initializer = saved_folding_initializer;
13855 fold_build1_initializer (enum tree_code code, tree type, tree op)
13860 result = fold_build1 (code, type, op);
13867 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13872 result = fold_build2 (code, type, op0, op1);
13879 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13885 result = fold_build3 (code, type, op0, op1, op2);
13892 fold_build_call_array_initializer (tree type, tree fn,
13893 int nargs, tree *argarray)
13898 result = fold_build_call_array (type, fn, nargs, argarray);
13904 #undef START_FOLD_INIT
13905 #undef END_FOLD_INIT
13907 /* Determine if first argument is a multiple of second argument. Return 0 if
13908 it is not, or we cannot easily determined it to be.
13910 An example of the sort of thing we care about (at this point; this routine
13911 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13912 fold cases do now) is discovering that
13914 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13920 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13922 This code also handles discovering that
13924 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13926 is a multiple of 8 so we don't have to worry about dealing with a
13927 possible remainder.
13929 Note that we *look* inside a SAVE_EXPR only to determine how it was
13930 calculated; it is not safe for fold to do much of anything else with the
13931 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13932 at run time. For example, the latter example above *cannot* be implemented
13933 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13934 evaluation time of the original SAVE_EXPR is not necessarily the same at
13935 the time the new expression is evaluated. The only optimization of this
13936 sort that would be valid is changing
13938 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13942 SAVE_EXPR (I) * SAVE_EXPR (J)
13944 (where the same SAVE_EXPR (J) is used in the original and the
13945 transformed version). */
13948 multiple_of_p (tree type, const_tree top, const_tree bottom)
13950 if (operand_equal_p (top, bottom, 0))
13953 if (TREE_CODE (type) != INTEGER_TYPE)
13956 switch (TREE_CODE (top))
13959 /* Bitwise and provides a power of two multiple. If the mask is
13960 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13961 if (!integer_pow2p (bottom))
13966 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13967 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13971 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13972 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13975 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13979 op1 = TREE_OPERAND (top, 1);
13980 /* const_binop may not detect overflow correctly,
13981 so check for it explicitly here. */
13982 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13983 > TREE_INT_CST_LOW (op1)
13984 && TREE_INT_CST_HIGH (op1) == 0
13985 && 0 != (t1 = fold_convert (type,
13986 const_binop (LSHIFT_EXPR,
13989 && !TREE_OVERFLOW (t1))
13990 return multiple_of_p (type, t1, bottom);
13995 /* Can't handle conversions from non-integral or wider integral type. */
13996 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13997 || (TYPE_PRECISION (type)
13998 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14001 /* .. fall through ... */
14004 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14007 if (TREE_CODE (bottom) != INTEGER_CST
14008 || integer_zerop (bottom)
14009 || (TYPE_UNSIGNED (type)
14010 && (tree_int_cst_sgn (top) < 0
14011 || tree_int_cst_sgn (bottom) < 0)))
14013 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14021 /* Return true if `t' is known to be non-negative. If the return
14022 value is based on the assumption that signed overflow is undefined,
14023 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14024 *STRICT_OVERFLOW_P. */
14027 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14029 if (t == error_mark_node)
14032 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14035 switch (TREE_CODE (t))
14038 /* Query VRP to see if it has recorded any information about
14039 the range of this object. */
14040 return ssa_name_nonnegative_p (t);
14043 /* We can't return 1 if flag_wrapv is set because
14044 ABS_EXPR<INT_MIN> = INT_MIN. */
14045 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14047 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
14049 *strict_overflow_p = true;
14055 return tree_int_cst_sgn (t) >= 0;
14058 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14061 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14063 case POINTER_PLUS_EXPR:
14065 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14066 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14068 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14069 strict_overflow_p));
14071 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14072 both unsigned and at least 2 bits shorter than the result. */
14073 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14074 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14075 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14077 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14078 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14079 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14080 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14082 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14083 TYPE_PRECISION (inner2)) + 1;
14084 return prec < TYPE_PRECISION (TREE_TYPE (t));
14090 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14092 /* x * x for floating point x is always non-negative. */
14093 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
14095 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14097 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14098 strict_overflow_p));
14101 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14102 both unsigned and their total bits is shorter than the result. */
14103 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14104 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14105 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14107 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14108 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14109 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14110 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14111 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14112 < TYPE_PRECISION (TREE_TYPE (t));
14118 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14120 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14121 strict_overflow_p));
14127 case TRUNC_DIV_EXPR:
14128 case CEIL_DIV_EXPR:
14129 case FLOOR_DIV_EXPR:
14130 case ROUND_DIV_EXPR:
14131 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14133 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14134 strict_overflow_p));
14136 case TRUNC_MOD_EXPR:
14137 case CEIL_MOD_EXPR:
14138 case FLOOR_MOD_EXPR:
14139 case ROUND_MOD_EXPR:
14141 case NON_LVALUE_EXPR:
14143 case FIX_TRUNC_EXPR:
14144 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14145 strict_overflow_p);
14147 case COMPOUND_EXPR:
14149 case GIMPLE_MODIFY_STMT:
14150 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14151 strict_overflow_p);
14154 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14155 strict_overflow_p);
14158 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14160 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14161 strict_overflow_p));
14165 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14166 tree outer_type = TREE_TYPE (t);
14168 if (TREE_CODE (outer_type) == REAL_TYPE)
14170 if (TREE_CODE (inner_type) == REAL_TYPE)
14171 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14172 strict_overflow_p);
14173 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14175 if (TYPE_UNSIGNED (inner_type))
14177 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14178 strict_overflow_p);
14181 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14183 if (TREE_CODE (inner_type) == REAL_TYPE)
14184 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
14185 strict_overflow_p);
14186 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14187 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14188 && TYPE_UNSIGNED (inner_type);
14195 tree temp = TARGET_EXPR_SLOT (t);
14196 t = TARGET_EXPR_INITIAL (t);
14198 /* If the initializer is non-void, then it's a normal expression
14199 that will be assigned to the slot. */
14200 if (!VOID_TYPE_P (t))
14201 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14203 /* Otherwise, the initializer sets the slot in some way. One common
14204 way is an assignment statement at the end of the initializer. */
14207 if (TREE_CODE (t) == BIND_EXPR)
14208 t = expr_last (BIND_EXPR_BODY (t));
14209 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14210 || TREE_CODE (t) == TRY_CATCH_EXPR)
14211 t = expr_last (TREE_OPERAND (t, 0));
14212 else if (TREE_CODE (t) == STATEMENT_LIST)
14217 if ((TREE_CODE (t) == MODIFY_EXPR
14218 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14219 && GENERIC_TREE_OPERAND (t, 0) == temp)
14220 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14221 strict_overflow_p);
14228 tree fndecl = get_callee_fndecl (t);
14229 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14230 switch (DECL_FUNCTION_CODE (fndecl))
14232 CASE_FLT_FN (BUILT_IN_ACOS):
14233 CASE_FLT_FN (BUILT_IN_ACOSH):
14234 CASE_FLT_FN (BUILT_IN_CABS):
14235 CASE_FLT_FN (BUILT_IN_COSH):
14236 CASE_FLT_FN (BUILT_IN_ERFC):
14237 CASE_FLT_FN (BUILT_IN_EXP):
14238 CASE_FLT_FN (BUILT_IN_EXP10):
14239 CASE_FLT_FN (BUILT_IN_EXP2):
14240 CASE_FLT_FN (BUILT_IN_FABS):
14241 CASE_FLT_FN (BUILT_IN_FDIM):
14242 CASE_FLT_FN (BUILT_IN_HYPOT):
14243 CASE_FLT_FN (BUILT_IN_POW10):
14244 CASE_INT_FN (BUILT_IN_FFS):
14245 CASE_INT_FN (BUILT_IN_PARITY):
14246 CASE_INT_FN (BUILT_IN_POPCOUNT):
14247 case BUILT_IN_BSWAP32:
14248 case BUILT_IN_BSWAP64:
14252 CASE_FLT_FN (BUILT_IN_SQRT):
14253 /* sqrt(-0.0) is -0.0. */
14254 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14256 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14257 strict_overflow_p);
14259 CASE_FLT_FN (BUILT_IN_ASINH):
14260 CASE_FLT_FN (BUILT_IN_ATAN):
14261 CASE_FLT_FN (BUILT_IN_ATANH):
14262 CASE_FLT_FN (BUILT_IN_CBRT):
14263 CASE_FLT_FN (BUILT_IN_CEIL):
14264 CASE_FLT_FN (BUILT_IN_ERF):
14265 CASE_FLT_FN (BUILT_IN_EXPM1):
14266 CASE_FLT_FN (BUILT_IN_FLOOR):
14267 CASE_FLT_FN (BUILT_IN_FMOD):
14268 CASE_FLT_FN (BUILT_IN_FREXP):
14269 CASE_FLT_FN (BUILT_IN_LCEIL):
14270 CASE_FLT_FN (BUILT_IN_LDEXP):
14271 CASE_FLT_FN (BUILT_IN_LFLOOR):
14272 CASE_FLT_FN (BUILT_IN_LLCEIL):
14273 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14274 CASE_FLT_FN (BUILT_IN_LLRINT):
14275 CASE_FLT_FN (BUILT_IN_LLROUND):
14276 CASE_FLT_FN (BUILT_IN_LRINT):
14277 CASE_FLT_FN (BUILT_IN_LROUND):
14278 CASE_FLT_FN (BUILT_IN_MODF):
14279 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14280 CASE_FLT_FN (BUILT_IN_RINT):
14281 CASE_FLT_FN (BUILT_IN_ROUND):
14282 CASE_FLT_FN (BUILT_IN_SCALB):
14283 CASE_FLT_FN (BUILT_IN_SCALBLN):
14284 CASE_FLT_FN (BUILT_IN_SCALBN):
14285 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14286 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14287 CASE_FLT_FN (BUILT_IN_SINH):
14288 CASE_FLT_FN (BUILT_IN_TANH):
14289 CASE_FLT_FN (BUILT_IN_TRUNC):
14290 /* True if the 1st argument is nonnegative. */
14291 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14292 strict_overflow_p);
14294 CASE_FLT_FN (BUILT_IN_FMAX):
14295 /* True if the 1st OR 2nd arguments are nonnegative. */
14296 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14298 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14299 strict_overflow_p)));
14301 CASE_FLT_FN (BUILT_IN_FMIN):
14302 /* True if the 1st AND 2nd arguments are nonnegative. */
14303 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14305 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14306 strict_overflow_p)));
14308 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14309 /* True if the 2nd argument is nonnegative. */
14310 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14311 strict_overflow_p);
14313 CASE_FLT_FN (BUILT_IN_POWI):
14314 /* True if the 1st argument is nonnegative or the second
14315 argument is an even integer. */
14316 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14318 tree arg1 = CALL_EXPR_ARG (t, 1);
14319 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14322 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14323 strict_overflow_p);
14325 CASE_FLT_FN (BUILT_IN_POW):
14326 /* True if the 1st argument is nonnegative or the second
14327 argument is an even integer valued real. */
14328 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14333 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14334 n = real_to_integer (&c);
14337 REAL_VALUE_TYPE cint;
14338 real_from_integer (&cint, VOIDmode, n,
14339 n < 0 ? -1 : 0, 0);
14340 if (real_identical (&c, &cint))
14344 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14345 strict_overflow_p);
14352 /* ... fall through ... */
14356 tree type = TREE_TYPE (t);
14357 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14358 && truth_value_p (TREE_CODE (t)))
14359 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14360 have a signed:1 type (where the value is -1 and 0). */
14365 /* We don't know sign of `t', so be conservative and return false. */
14369 /* Return true if `t' is known to be non-negative. Handle warnings
14370 about undefined signed overflow. */
14373 tree_expr_nonnegative_p (tree t)
14375 bool ret, strict_overflow_p;
14377 strict_overflow_p = false;
14378 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14379 if (strict_overflow_p)
14380 fold_overflow_warning (("assuming signed overflow does not occur when "
14381 "determining that expression is always "
14383 WARN_STRICT_OVERFLOW_MISC);
14387 /* Return true when T is an address and is known to be nonzero.
14388 For floating point we further ensure that T is not denormal.
14389 Similar logic is present in nonzero_address in rtlanal.h.
14391 If the return value is based on the assumption that signed overflow
14392 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14393 change *STRICT_OVERFLOW_P. */
14396 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14398 tree type = TREE_TYPE (t);
14399 bool sub_strict_overflow_p;
14401 /* Doing something useful for floating point would need more work. */
14402 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14405 switch (TREE_CODE (t))
14408 /* Query VRP to see if it has recorded any information about
14409 the range of this object. */
14410 return ssa_name_nonzero_p (t);
14413 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14414 strict_overflow_p);
14417 return !integer_zerop (t);
14419 case POINTER_PLUS_EXPR:
14421 if (TYPE_OVERFLOW_UNDEFINED (type))
14423 /* With the presence of negative values it is hard
14424 to say something. */
14425 sub_strict_overflow_p = false;
14426 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14427 &sub_strict_overflow_p)
14428 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14429 &sub_strict_overflow_p))
14431 /* One of operands must be positive and the other non-negative. */
14432 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14433 overflows, on a twos-complement machine the sum of two
14434 nonnegative numbers can never be zero. */
14435 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14437 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14438 strict_overflow_p));
14443 if (TYPE_OVERFLOW_UNDEFINED (type))
14445 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14447 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14448 strict_overflow_p))
14450 *strict_overflow_p = true;
14458 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14459 tree outer_type = TREE_TYPE (t);
14461 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14462 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14463 strict_overflow_p));
14469 tree base = get_base_address (TREE_OPERAND (t, 0));
14474 /* Weak declarations may link to NULL. */
14475 if (VAR_OR_FUNCTION_DECL_P (base))
14476 return !DECL_WEAK (base);
14478 /* Constants are never weak. */
14479 if (CONSTANT_CLASS_P (base))
14486 sub_strict_overflow_p = false;
14487 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14488 &sub_strict_overflow_p)
14489 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14490 &sub_strict_overflow_p))
14492 if (sub_strict_overflow_p)
14493 *strict_overflow_p = true;
14499 sub_strict_overflow_p = false;
14500 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14501 &sub_strict_overflow_p)
14502 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14503 &sub_strict_overflow_p))
14505 if (sub_strict_overflow_p)
14506 *strict_overflow_p = true;
14511 sub_strict_overflow_p = false;
14512 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14513 &sub_strict_overflow_p))
14515 if (sub_strict_overflow_p)
14516 *strict_overflow_p = true;
14518 /* When both operands are nonzero, then MAX must be too. */
14519 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14520 strict_overflow_p))
14523 /* MAX where operand 0 is positive is positive. */
14524 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14525 strict_overflow_p);
14527 /* MAX where operand 1 is positive is positive. */
14528 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14529 &sub_strict_overflow_p)
14530 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14531 &sub_strict_overflow_p))
14533 if (sub_strict_overflow_p)
14534 *strict_overflow_p = true;
14539 case COMPOUND_EXPR:
14541 case GIMPLE_MODIFY_STMT:
14543 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14544 strict_overflow_p);
14547 case NON_LVALUE_EXPR:
14548 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14549 strict_overflow_p);
14552 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14554 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14555 strict_overflow_p));
14558 return alloca_call_p (t);
14566 /* Return true when T is an address and is known to be nonzero.
14567 Handle warnings about undefined signed overflow. */
14570 tree_expr_nonzero_p (tree t)
14572 bool ret, strict_overflow_p;
14574 strict_overflow_p = false;
14575 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14576 if (strict_overflow_p)
14577 fold_overflow_warning (("assuming signed overflow does not occur when "
14578 "determining that expression is always "
14580 WARN_STRICT_OVERFLOW_MISC);
14584 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14585 attempt to fold the expression to a constant without modifying TYPE,
14588 If the expression could be simplified to a constant, then return
14589 the constant. If the expression would not be simplified to a
14590 constant, then return NULL_TREE. */
14593 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14595 tree tem = fold_binary (code, type, op0, op1);
14596 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14599 /* Given the components of a unary expression CODE, TYPE and OP0,
14600 attempt to fold the expression to a constant without modifying
14603 If the expression could be simplified to a constant, then return
14604 the constant. If the expression would not be simplified to a
14605 constant, then return NULL_TREE. */
14608 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14610 tree tem = fold_unary (code, type, op0);
14611 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14614 /* If EXP represents referencing an element in a constant string
14615 (either via pointer arithmetic or array indexing), return the
14616 tree representing the value accessed, otherwise return NULL. */
14619 fold_read_from_constant_string (tree exp)
14621 if ((TREE_CODE (exp) == INDIRECT_REF
14622 || TREE_CODE (exp) == ARRAY_REF)
14623 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14625 tree exp1 = TREE_OPERAND (exp, 0);
14629 if (TREE_CODE (exp) == INDIRECT_REF)
14630 string = string_constant (exp1, &index);
14633 tree low_bound = array_ref_low_bound (exp);
14634 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14636 /* Optimize the special-case of a zero lower bound.
14638 We convert the low_bound to sizetype to avoid some problems
14639 with constant folding. (E.g. suppose the lower bound is 1,
14640 and its mode is QI. Without the conversion,l (ARRAY
14641 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14642 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14643 if (! integer_zerop (low_bound))
14644 index = size_diffop (index, fold_convert (sizetype, low_bound));
14650 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14651 && TREE_CODE (string) == STRING_CST
14652 && TREE_CODE (index) == INTEGER_CST
14653 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14654 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14656 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14657 return build_int_cst_type (TREE_TYPE (exp),
14658 (TREE_STRING_POINTER (string)
14659 [TREE_INT_CST_LOW (index)]));
14664 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14665 an integer constant, real, or fixed-point constant.
14667 TYPE is the type of the result. */
14670 fold_negate_const (tree arg0, tree type)
14672 tree t = NULL_TREE;
14674 switch (TREE_CODE (arg0))
14678 unsigned HOST_WIDE_INT low;
14679 HOST_WIDE_INT high;
14680 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14681 TREE_INT_CST_HIGH (arg0),
14683 t = force_fit_type_double (type, low, high, 1,
14684 (overflow | TREE_OVERFLOW (arg0))
14685 && !TYPE_UNSIGNED (type));
14690 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14695 FIXED_VALUE_TYPE f;
14696 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14697 &(TREE_FIXED_CST (arg0)), NULL,
14698 TYPE_SATURATING (type));
14699 t = build_fixed (type, f);
14700 /* Propagate overflow flags. */
14701 if (overflow_p | TREE_OVERFLOW (arg0))
14703 TREE_OVERFLOW (t) = 1;
14704 TREE_CONSTANT_OVERFLOW (t) = 1;
14706 else if (TREE_CONSTANT_OVERFLOW (arg0))
14707 TREE_CONSTANT_OVERFLOW (t) = 1;
14712 gcc_unreachable ();
14718 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14719 an integer constant or real constant.
14721 TYPE is the type of the result. */
14724 fold_abs_const (tree arg0, tree type)
14726 tree t = NULL_TREE;
14728 switch (TREE_CODE (arg0))
14731 /* If the value is unsigned, then the absolute value is
14732 the same as the ordinary value. */
14733 if (TYPE_UNSIGNED (type))
14735 /* Similarly, if the value is non-negative. */
14736 else if (INT_CST_LT (integer_minus_one_node, arg0))
14738 /* If the value is negative, then the absolute value is
14742 unsigned HOST_WIDE_INT low;
14743 HOST_WIDE_INT high;
14744 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14745 TREE_INT_CST_HIGH (arg0),
14747 t = force_fit_type_double (type, low, high, -1,
14748 overflow | TREE_OVERFLOW (arg0));
14753 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14754 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14760 gcc_unreachable ();
14766 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14767 constant. TYPE is the type of the result. */
14770 fold_not_const (tree arg0, tree type)
14772 tree t = NULL_TREE;
14774 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14776 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14777 ~TREE_INT_CST_HIGH (arg0), 0,
14778 TREE_OVERFLOW (arg0));
14783 /* Given CODE, a relational operator, the target type, TYPE and two
14784 constant operands OP0 and OP1, return the result of the
14785 relational operation. If the result is not a compile time
14786 constant, then return NULL_TREE. */
14789 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14791 int result, invert;
14793 /* From here on, the only cases we handle are when the result is
14794 known to be a constant. */
14796 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14798 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14799 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14801 /* Handle the cases where either operand is a NaN. */
14802 if (real_isnan (c0) || real_isnan (c1))
14812 case UNORDERED_EXPR:
14826 if (flag_trapping_math)
14832 gcc_unreachable ();
14835 return constant_boolean_node (result, type);
14838 return constant_boolean_node (real_compare (code, c0, c1), type);
14841 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14843 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14844 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14845 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14848 /* Handle equality/inequality of complex constants. */
14849 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14851 tree rcond = fold_relational_const (code, type,
14852 TREE_REALPART (op0),
14853 TREE_REALPART (op1));
14854 tree icond = fold_relational_const (code, type,
14855 TREE_IMAGPART (op0),
14856 TREE_IMAGPART (op1));
14857 if (code == EQ_EXPR)
14858 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14859 else if (code == NE_EXPR)
14860 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14865 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14867 To compute GT, swap the arguments and do LT.
14868 To compute GE, do LT and invert the result.
14869 To compute LE, swap the arguments, do LT and invert the result.
14870 To compute NE, do EQ and invert the result.
14872 Therefore, the code below must handle only EQ and LT. */
14874 if (code == LE_EXPR || code == GT_EXPR)
14879 code = swap_tree_comparison (code);
14882 /* Note that it is safe to invert for real values here because we
14883 have already handled the one case that it matters. */
14886 if (code == NE_EXPR || code == GE_EXPR)
14889 code = invert_tree_comparison (code, false);
14892 /* Compute a result for LT or EQ if args permit;
14893 Otherwise return T. */
14894 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14896 if (code == EQ_EXPR)
14897 result = tree_int_cst_equal (op0, op1);
14898 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14899 result = INT_CST_LT_UNSIGNED (op0, op1);
14901 result = INT_CST_LT (op0, op1);
14908 return constant_boolean_node (result, type);
14911 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14912 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14916 fold_build_cleanup_point_expr (tree type, tree expr)
14918 /* If the expression does not have side effects then we don't have to wrap
14919 it with a cleanup point expression. */
14920 if (!TREE_SIDE_EFFECTS (expr))
14923 /* If the expression is a return, check to see if the expression inside the
14924 return has no side effects or the right hand side of the modify expression
14925 inside the return. If either don't have side effects set we don't need to
14926 wrap the expression in a cleanup point expression. Note we don't check the
14927 left hand side of the modify because it should always be a return decl. */
14928 if (TREE_CODE (expr) == RETURN_EXPR)
14930 tree op = TREE_OPERAND (expr, 0);
14931 if (!op || !TREE_SIDE_EFFECTS (op))
14933 op = TREE_OPERAND (op, 1);
14934 if (!TREE_SIDE_EFFECTS (op))
14938 return build1 (CLEANUP_POINT_EXPR, type, expr);
14941 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14942 of an indirection through OP0, or NULL_TREE if no simplification is
14946 fold_indirect_ref_1 (tree type, tree op0)
14952 subtype = TREE_TYPE (sub);
14953 if (!POINTER_TYPE_P (subtype))
14956 if (TREE_CODE (sub) == ADDR_EXPR)
14958 tree op = TREE_OPERAND (sub, 0);
14959 tree optype = TREE_TYPE (op);
14960 /* *&CONST_DECL -> to the value of the const decl. */
14961 if (TREE_CODE (op) == CONST_DECL)
14962 return DECL_INITIAL (op);
14963 /* *&p => p; make sure to handle *&"str"[cst] here. */
14964 if (type == optype)
14966 tree fop = fold_read_from_constant_string (op);
14972 /* *(foo *)&fooarray => fooarray[0] */
14973 else if (TREE_CODE (optype) == ARRAY_TYPE
14974 && type == TREE_TYPE (optype))
14976 tree type_domain = TYPE_DOMAIN (optype);
14977 tree min_val = size_zero_node;
14978 if (type_domain && TYPE_MIN_VALUE (type_domain))
14979 min_val = TYPE_MIN_VALUE (type_domain);
14980 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
14982 /* *(foo *)&complexfoo => __real__ complexfoo */
14983 else if (TREE_CODE (optype) == COMPLEX_TYPE
14984 && type == TREE_TYPE (optype))
14985 return fold_build1 (REALPART_EXPR, type, op);
14986 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14987 else if (TREE_CODE (optype) == VECTOR_TYPE
14988 && type == TREE_TYPE (optype))
14990 tree part_width = TYPE_SIZE (type);
14991 tree index = bitsize_int (0);
14992 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
14996 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14997 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14998 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15000 tree op00 = TREE_OPERAND (sub, 0);
15001 tree op01 = TREE_OPERAND (sub, 1);
15005 op00type = TREE_TYPE (op00);
15006 if (TREE_CODE (op00) == ADDR_EXPR
15007 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15008 && type == TREE_TYPE (TREE_TYPE (op00type)))
15010 tree size = TYPE_SIZE_UNIT (type);
15011 if (tree_int_cst_equal (size, op01))
15012 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15016 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15017 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15018 && type == TREE_TYPE (TREE_TYPE (subtype)))
15021 tree min_val = size_zero_node;
15022 sub = build_fold_indirect_ref (sub);
15023 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15024 if (type_domain && TYPE_MIN_VALUE (type_domain))
15025 min_val = TYPE_MIN_VALUE (type_domain);
15026 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15032 /* Builds an expression for an indirection through T, simplifying some
15036 build_fold_indirect_ref (tree t)
15038 tree type = TREE_TYPE (TREE_TYPE (t));
15039 tree sub = fold_indirect_ref_1 (type, t);
15044 return build1 (INDIRECT_REF, type, t);
15047 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15050 fold_indirect_ref (tree t)
15052 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15060 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15061 whose result is ignored. The type of the returned tree need not be
15062 the same as the original expression. */
15065 fold_ignored_result (tree t)
15067 if (!TREE_SIDE_EFFECTS (t))
15068 return integer_zero_node;
15071 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15074 t = TREE_OPERAND (t, 0);
15078 case tcc_comparison:
15079 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15080 t = TREE_OPERAND (t, 0);
15081 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15082 t = TREE_OPERAND (t, 1);
15087 case tcc_expression:
15088 switch (TREE_CODE (t))
15090 case COMPOUND_EXPR:
15091 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15093 t = TREE_OPERAND (t, 0);
15097 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15098 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15100 t = TREE_OPERAND (t, 0);
15113 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15114 This can only be applied to objects of a sizetype. */
15117 round_up (tree value, int divisor)
15119 tree div = NULL_TREE;
15121 gcc_assert (divisor > 0);
15125 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15126 have to do anything. Only do this when we are not given a const,
15127 because in that case, this check is more expensive than just
15129 if (TREE_CODE (value) != INTEGER_CST)
15131 div = build_int_cst (TREE_TYPE (value), divisor);
15133 if (multiple_of_p (TREE_TYPE (value), value, div))
15137 /* If divisor is a power of two, simplify this to bit manipulation. */
15138 if (divisor == (divisor & -divisor))
15140 if (TREE_CODE (value) == INTEGER_CST)
15142 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15143 unsigned HOST_WIDE_INT high;
15146 if ((low & (divisor - 1)) == 0)
15149 overflow_p = TREE_OVERFLOW (value);
15150 high = TREE_INT_CST_HIGH (value);
15151 low &= ~(divisor - 1);
15160 return force_fit_type_double (TREE_TYPE (value), low, high,
15167 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15168 value = size_binop (PLUS_EXPR, value, t);
15169 t = build_int_cst (TREE_TYPE (value), -divisor);
15170 value = size_binop (BIT_AND_EXPR, value, t);
15176 div = build_int_cst (TREE_TYPE (value), divisor);
15177 value = size_binop (CEIL_DIV_EXPR, value, div);
15178 value = size_binop (MULT_EXPR, value, div);
15184 /* Likewise, but round down. */
15187 round_down (tree value, int divisor)
15189 tree div = NULL_TREE;
15191 gcc_assert (divisor > 0);
15195 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15196 have to do anything. Only do this when we are not given a const,
15197 because in that case, this check is more expensive than just
15199 if (TREE_CODE (value) != INTEGER_CST)
15201 div = build_int_cst (TREE_TYPE (value), divisor);
15203 if (multiple_of_p (TREE_TYPE (value), value, div))
15207 /* If divisor is a power of two, simplify this to bit manipulation. */
15208 if (divisor == (divisor & -divisor))
15212 t = build_int_cst (TREE_TYPE (value), -divisor);
15213 value = size_binop (BIT_AND_EXPR, value, t);
15218 div = build_int_cst (TREE_TYPE (value), divisor);
15219 value = size_binop (FLOOR_DIV_EXPR, value, div);
15220 value = size_binop (MULT_EXPR, value, div);
15226 /* Returns the pointer to the base of the object addressed by EXP and
15227 extracts the information about the offset of the access, storing it
15228 to PBITPOS and POFFSET. */
15231 split_address_to_core_and_offset (tree exp,
15232 HOST_WIDE_INT *pbitpos, tree *poffset)
15235 enum machine_mode mode;
15236 int unsignedp, volatilep;
15237 HOST_WIDE_INT bitsize;
15239 if (TREE_CODE (exp) == ADDR_EXPR)
15241 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15242 poffset, &mode, &unsignedp, &volatilep,
15244 core = fold_addr_expr (core);
15250 *poffset = NULL_TREE;
15256 /* Returns true if addresses of E1 and E2 differ by a constant, false
15257 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15260 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15263 HOST_WIDE_INT bitpos1, bitpos2;
15264 tree toffset1, toffset2, tdiff, type;
15266 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15267 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15269 if (bitpos1 % BITS_PER_UNIT != 0
15270 || bitpos2 % BITS_PER_UNIT != 0
15271 || !operand_equal_p (core1, core2, 0))
15274 if (toffset1 && toffset2)
15276 type = TREE_TYPE (toffset1);
15277 if (type != TREE_TYPE (toffset2))
15278 toffset2 = fold_convert (type, toffset2);
15280 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15281 if (!cst_and_fits_in_hwi (tdiff))
15284 *diff = int_cst_value (tdiff);
15286 else if (toffset1 || toffset2)
15288 /* If only one of the offsets is non-constant, the difference cannot
15295 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15299 /* Simplify the floating point expression EXP when the sign of the
15300 result is not significant. Return NULL_TREE if no simplification
15304 fold_strip_sign_ops (tree exp)
15308 switch (TREE_CODE (exp))
15312 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15313 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15317 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15319 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15320 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15321 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15322 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15323 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15324 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15327 case COMPOUND_EXPR:
15328 arg0 = TREE_OPERAND (exp, 0);
15329 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15331 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15335 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15336 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15338 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15339 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15340 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15345 const enum built_in_function fcode = builtin_mathfn_code (exp);
15348 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15349 /* Strip copysign function call, return the 1st argument. */
15350 arg0 = CALL_EXPR_ARG (exp, 0);
15351 arg1 = CALL_EXPR_ARG (exp, 1);
15352 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15355 /* Strip sign ops from the argument of "odd" math functions. */
15356 if (negate_mathfn_p (fcode))
15358 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15360 return build_call_expr (get_callee_fndecl (exp), 1, arg0);