1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = TYPE_PRECISION (type);
210 int sign_extended_type;
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
283 int sign_extended_type;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 unsigned HOST_WIDE_INT l;
329 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330 + (unsigned HOST_WIDE_INT) h2
337 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
357 return (*hv & h1) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
395 for (j = 0; j < 4; j++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
449 if (SHIFT_COUNT_TRUNCATED)
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 unsigned HOST_WIDE_INT signmask;
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 if (SHIFT_COUNT_TRUNCATED)
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
707 { /* scale divisor and dividend */
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
786 decode (quo, lquo, hquo);
789 /* If result is negative, make it so. */
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, <wice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den <= ltwice)))
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
890 /* The sign of the division is according to operand two, that
891 does the correct thing for POINTER_PLUS_EXPR where we want
892 a signed division. */
893 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
898 int1l = TREE_INT_CST_LOW (arg1);
899 int1h = TREE_INT_CST_HIGH (arg1);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
908 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
975 if (gimple_no_warning_p (stmt))
978 /* Use the smallest code level when deciding to issue the
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
987 locus = input_location;
989 locus = gimple_location (stmt);
990 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1136 /* We want to canonicalize to positive real constants. Pretend
1137 that only negative ones can be easily negated. */
1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1141 return negate_expr_p (TREE_REALPART (t))
1142 && negate_expr_p (TREE_IMAGPART (t));
1145 return negate_expr_p (TREE_OPERAND (t, 0))
1146 && negate_expr_p (TREE_OPERAND (t, 1));
1149 return negate_expr_p (TREE_OPERAND (t, 0));
1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t, 1))
1157 && reorder_operands_p (TREE_OPERAND (t, 0),
1158 TREE_OPERAND (t, 1)))
1160 /* -(A + B) -> (-A) - B. */
1161 return negate_expr_p (TREE_OPERAND (t, 0));
1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167 && reorder_operands_p (TREE_OPERAND (t, 0),
1168 TREE_OPERAND (t, 1));
1171 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178 return negate_expr_p (TREE_OPERAND (t, 1))
1179 || negate_expr_p (TREE_OPERAND (t, 0));
1182 case TRUNC_DIV_EXPR:
1183 case ROUND_DIV_EXPR:
1184 case FLOOR_DIV_EXPR:
1186 case EXACT_DIV_EXPR:
1187 /* In general we can't negate A / B, because if A is INT_MIN and
1188 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189 and actually traps on some architectures. But if overflow is
1190 undefined, we can negate, because - (INT_MIN / 1) is an
1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1195 return negate_expr_p (TREE_OPERAND (t, 1))
1196 || negate_expr_p (TREE_OPERAND (t, 0));
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type) == REAL_TYPE)
1202 tree tem = strip_float_extensions (t);
1204 return negate_expr_p (tem);
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t)))
1211 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1218 tree op1 = TREE_OPERAND (t, 1);
1219 if (TREE_INT_CST_HIGH (op1) == 0
1220 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221 == TREE_INT_CST_LOW (op1))
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1238 fold_negate_expr (location_t loc, tree t)
1240 tree type = TREE_TYPE (t);
1243 switch (TREE_CODE (t))
1245 /* Convert - (~A) to A + 1. */
1247 if (INTEGRAL_TYPE_P (type))
1248 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249 build_int_cst (type, 1));
1253 tem = fold_negate_const (t, type);
1254 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255 || !TYPE_OVERFLOW_TRAPS (type))
1260 tem = fold_negate_const (t, type);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1267 tem = fold_negate_const (t, type);
1272 tree rpart = negate_expr (TREE_REALPART (t));
1273 tree ipart = negate_expr (TREE_IMAGPART (t));
1275 if ((TREE_CODE (rpart) == REAL_CST
1276 && TREE_CODE (ipart) == REAL_CST)
1277 || (TREE_CODE (rpart) == INTEGER_CST
1278 && TREE_CODE (ipart) == INTEGER_CST))
1279 return build_complex (type, rpart, ipart);
1284 if (negate_expr_p (t))
1285 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1291 if (negate_expr_p (t))
1292 return fold_build1_loc (loc, CONJ_EXPR, type,
1293 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1297 return TREE_OPERAND (t, 0);
1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1303 /* -(A + B) -> (-B) - A. */
1304 if (negate_expr_p (TREE_OPERAND (t, 1))
1305 && reorder_operands_p (TREE_OPERAND (t, 0),
1306 TREE_OPERAND (t, 1)))
1308 tem = negate_expr (TREE_OPERAND (t, 1));
1309 return fold_build2_loc (loc, MINUS_EXPR, type,
1310 tem, TREE_OPERAND (t, 0));
1313 /* -(A + B) -> (-A) - B. */
1314 if (negate_expr_p (TREE_OPERAND (t, 0)))
1316 tem = negate_expr (TREE_OPERAND (t, 0));
1317 return fold_build2_loc (loc, MINUS_EXPR, type,
1318 tem, TREE_OPERAND (t, 1));
1324 /* - (A - B) -> B - A */
1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328 return fold_build2_loc (loc, MINUS_EXPR, type,
1329 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1333 if (TYPE_UNSIGNED (type))
1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1341 tem = TREE_OPERAND (t, 1);
1342 if (negate_expr_p (tem))
1343 return fold_build2_loc (loc, TREE_CODE (t), type,
1344 TREE_OPERAND (t, 0), negate_expr (tem));
1345 tem = TREE_OPERAND (t, 0);
1346 if (negate_expr_p (tem))
1347 return fold_build2_loc (loc, TREE_CODE (t), type,
1348 negate_expr (tem), TREE_OPERAND (t, 1));
1352 case TRUNC_DIV_EXPR:
1353 case ROUND_DIV_EXPR:
1354 case FLOOR_DIV_EXPR:
1356 case EXACT_DIV_EXPR:
1357 /* In general we can't negate A / B, because if A is INT_MIN and
1358 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359 and actually traps on some architectures. But if overflow is
1360 undefined, we can negate, because - (INT_MIN / 1) is an
1362 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1364 const char * const warnmsg = G_("assuming signed overflow does not "
1365 "occur when negating a division");
1366 tem = TREE_OPERAND (t, 1);
1367 if (negate_expr_p (tem))
1369 if (INTEGRAL_TYPE_P (type)
1370 && (TREE_CODE (tem) != INTEGER_CST
1371 || integer_onep (tem)))
1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373 return fold_build2_loc (loc, TREE_CODE (t), type,
1374 TREE_OPERAND (t, 0), negate_expr (tem));
1376 tem = TREE_OPERAND (t, 0);
1377 if (negate_expr_p (tem))
1379 if (INTEGRAL_TYPE_P (type)
1380 && (TREE_CODE (tem) != INTEGER_CST
1381 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383 return fold_build2_loc (loc, TREE_CODE (t), type,
1384 negate_expr (tem), TREE_OPERAND (t, 1));
1390 /* Convert -((double)float) into (double)(-float). */
1391 if (TREE_CODE (type) == REAL_TYPE)
1393 tem = strip_float_extensions (t);
1394 if (tem != t && negate_expr_p (tem))
1395 return fold_convert_loc (loc, type, negate_expr (tem));
1400 /* Negate -f(x) as f(-x). */
1401 if (negate_mathfn_p (builtin_mathfn_code (t))
1402 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1406 fndecl = get_callee_fndecl (t);
1407 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408 return build_call_expr_loc (loc, fndecl, 1, arg);
1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1414 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1416 tree op1 = TREE_OPERAND (t, 1);
1417 if (TREE_INT_CST_HIGH (op1) == 0
1418 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419 == TREE_INT_CST_LOW (op1))
1421 tree ntype = TYPE_UNSIGNED (type)
1422 ? signed_type_for (type)
1423 : unsigned_type_for (type);
1424 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426 return fold_convert_loc (loc, type, temp);
1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1440 return NULL_TREE. */
1443 negate_expr (tree t)
1451 loc = EXPR_LOCATION (t);
1452 type = TREE_TYPE (t);
1453 STRIP_SIGN_NOPS (t);
1455 tem = fold_negate_expr (loc, t);
1458 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459 SET_EXPR_LOCATION (tem, loc);
1461 return fold_convert_loc (loc, type, tem);
1464 /* Split a tree IN into a constant, literal and variable parts that could be
1465 combined with CODE to make IN. "constant" means an expression with
1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1467 commutative arithmetic operation. Store the constant part into *CONP,
1468 the literal in *LITP and return the variable part. If a part isn't
1469 present, set it to null. If the tree does not decompose in this way,
1470 return the entire tree as the variable part and the other parts as null.
1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1473 case, we negate an operand that was subtracted. Except if it is a
1474 literal for which we use *MINUS_LITP instead.
1476 If NEGATE_P is true, we are negating all of IN, again except a literal
1477 for which we use *MINUS_LITP instead.
1479 If IN is itself a literal or constant, return it as appropriate.
1481 Note that we do not guarantee that any of the three values will be the
1482 same type as IN, but they will have the same signedness and mode. */
1485 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486 tree *minus_litp, int negate_p)
1494 /* Strip any conversions that don't change the machine mode or signedness. */
1495 STRIP_SIGN_NOPS (in);
1497 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498 || TREE_CODE (in) == FIXED_CST)
1500 else if (TREE_CODE (in) == code
1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503 /* We can associate addition and subtraction together (even
1504 though the C standard doesn't say so) for integers because
1505 the value is not affected. For reals, the value might be
1506 affected, so we can't. */
1507 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1510 tree op0 = TREE_OPERAND (in, 0);
1511 tree op1 = TREE_OPERAND (in, 1);
1512 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1515 /* First see if either of the operands is a literal, then a constant. */
1516 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517 || TREE_CODE (op0) == FIXED_CST)
1518 *litp = op0, op0 = 0;
1519 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520 || TREE_CODE (op1) == FIXED_CST)
1521 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1523 if (op0 != 0 && TREE_CONSTANT (op0))
1524 *conp = op0, op0 = 0;
1525 else if (op1 != 0 && TREE_CONSTANT (op1))
1526 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1528 /* If we haven't dealt with either operand, this is not a case we can
1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1530 if (op0 != 0 && op1 != 0)
1535 var = op1, neg_var_p = neg1_p;
1537 /* Now do any needed negations. */
1539 *minus_litp = *litp, *litp = 0;
1541 *conp = negate_expr (*conp);
1543 var = negate_expr (var);
1545 else if (TREE_CONSTANT (in))
1553 *minus_litp = *litp, *litp = 0;
1554 else if (*minus_litp)
1555 *litp = *minus_litp, *minus_litp = 0;
1556 *conp = negate_expr (*conp);
1557 var = negate_expr (var);
1563 /* Re-associate trees split by the above function. T1 and T2 are
1564 either expressions to associate or null. Return the new
1565 expression, if any. LOC is the location of the new expression. If
1566 we build an operation, do it in TYPE and with CODE. */
1569 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579 try to fold this since we will have infinite recursion. But do
1580 deal with any NEGATE_EXPRs. */
1581 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1584 if (code == PLUS_EXPR)
1586 if (TREE_CODE (t1) == NEGATE_EXPR)
1587 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589 else if (TREE_CODE (t2) == NEGATE_EXPR)
1590 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592 else if (integer_zerop (t2))
1593 return fold_convert_loc (loc, type, t1);
1595 else if (code == MINUS_EXPR)
1597 if (integer_zerop (t2))
1598 return fold_convert_loc (loc, type, t1);
1601 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602 fold_convert_loc (loc, type, t2));
1603 goto associate_trees_exit;
1606 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607 fold_convert_loc (loc, type, t2));
1608 associate_trees_exit:
1609 protected_set_expr_location (tem, loc);
1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614 for use in int_const_binop, size_binop and size_diffop. */
1617 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1619 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1621 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1636 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638 && TYPE_MODE (type1) == TYPE_MODE (type2);
1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1643 to produce a new constant. Return NULL_TREE if we don't know how
1644 to evaluate CODE at compile-time.
1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1649 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1651 unsigned HOST_WIDE_INT int1l, int2l;
1652 HOST_WIDE_INT int1h, int2h;
1653 unsigned HOST_WIDE_INT low;
1655 unsigned HOST_WIDE_INT garbagel;
1656 HOST_WIDE_INT garbageh;
1658 tree type = TREE_TYPE (arg1);
1659 int uns = TYPE_UNSIGNED (type);
1661 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1664 int1l = TREE_INT_CST_LOW (arg1);
1665 int1h = TREE_INT_CST_HIGH (arg1);
1666 int2l = TREE_INT_CST_LOW (arg2);
1667 int2h = TREE_INT_CST_HIGH (arg2);
1672 low = int1l | int2l, hi = int1h | int2h;
1676 low = int1l ^ int2l, hi = int1h ^ int2h;
1680 low = int1l & int2l, hi = int1h & int2h;
1686 /* It's unclear from the C standard whether shifts can overflow.
1687 The following code ignores overflow; perhaps a C standard
1688 interpretation ruling is needed. */
1689 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1696 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1701 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1705 neg_double (int2l, int2h, &low, &hi);
1706 add_double (int1l, int1h, low, hi, &low, &hi);
1707 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1711 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1714 case TRUNC_DIV_EXPR:
1715 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716 case EXACT_DIV_EXPR:
1717 /* This is a shortcut for a common special case. */
1718 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719 && !TREE_OVERFLOW (arg1)
1720 && !TREE_OVERFLOW (arg2)
1721 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1723 if (code == CEIL_DIV_EXPR)
1726 low = int1l / int2l, hi = 0;
1730 /* ... fall through ... */
1732 case ROUND_DIV_EXPR:
1733 if (int2h == 0 && int2l == 0)
1735 if (int2h == 0 && int2l == 1)
1737 low = int1l, hi = int1h;
1740 if (int1l == int2l && int1h == int2h
1741 && ! (int1l == 0 && int1h == 0))
1746 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747 &low, &hi, &garbagel, &garbageh);
1750 case TRUNC_MOD_EXPR:
1751 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752 /* This is a shortcut for a common special case. */
1753 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754 && !TREE_OVERFLOW (arg1)
1755 && !TREE_OVERFLOW (arg2)
1756 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1758 if (code == CEIL_MOD_EXPR)
1760 low = int1l % int2l, hi = 0;
1764 /* ... fall through ... */
1766 case ROUND_MOD_EXPR:
1767 if (int2h == 0 && int2l == 0)
1769 overflow = div_and_round_double (code, uns,
1770 int1l, int1h, int2l, int2h,
1771 &garbagel, &garbageh, &low, &hi);
1777 low = (((unsigned HOST_WIDE_INT) int1h
1778 < (unsigned HOST_WIDE_INT) int2h)
1779 || (((unsigned HOST_WIDE_INT) int1h
1780 == (unsigned HOST_WIDE_INT) int2h)
1783 low = (int1h < int2h
1784 || (int1h == int2h && int1l < int2l));
1786 if (low == (code == MIN_EXPR))
1787 low = int1l, hi = int1h;
1789 low = int2l, hi = int2h;
1798 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1800 /* Propagate overflow flags ourselves. */
1801 if (((!uns || is_sizetype) && overflow)
1802 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1805 TREE_OVERFLOW (t) = 1;
1809 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810 ((!uns || is_sizetype) && overflow)
1811 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817 constant. We assume ARG1 and ARG2 have the same data type, or at least
1818 are the same kind of constant and the same machine mode. Return zero if
1819 combining the constants is not allowed in the current operating mode.
1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1824 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1826 /* Sanity check for the recursive cases. */
1833 if (TREE_CODE (arg1) == INTEGER_CST)
1834 return int_const_binop (code, arg1, arg2, notrunc);
1836 if (TREE_CODE (arg1) == REAL_CST)
1838 enum machine_mode mode;
1841 REAL_VALUE_TYPE value;
1842 REAL_VALUE_TYPE result;
1846 /* The following codes are handled by real_arithmetic. */
1861 d1 = TREE_REAL_CST (arg1);
1862 d2 = TREE_REAL_CST (arg2);
1864 type = TREE_TYPE (arg1);
1865 mode = TYPE_MODE (type);
1867 /* Don't perform operation if we honor signaling NaNs and
1868 either operand is a NaN. */
1869 if (HONOR_SNANS (mode)
1870 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1873 /* Don't perform operation if it would raise a division
1874 by zero exception. */
1875 if (code == RDIV_EXPR
1876 && REAL_VALUES_EQUAL (d2, dconst0)
1877 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1880 /* If either operand is a NaN, just return it. Otherwise, set up
1881 for floating-point trap; we return an overflow. */
1882 if (REAL_VALUE_ISNAN (d1))
1884 else if (REAL_VALUE_ISNAN (d2))
1887 inexact = real_arithmetic (&value, code, &d1, &d2);
1888 real_convert (&result, mode, &value);
1890 /* Don't constant fold this floating point operation if
1891 the result has overflowed and flag_trapping_math. */
1892 if (flag_trapping_math
1893 && MODE_HAS_INFINITIES (mode)
1894 && REAL_VALUE_ISINF (result)
1895 && !REAL_VALUE_ISINF (d1)
1896 && !REAL_VALUE_ISINF (d2))
1899 /* Don't constant fold this floating point operation if the
1900 result may dependent upon the run-time rounding mode and
1901 flag_rounding_math is set, or if GCC's software emulation
1902 is unable to accurately represent the result. */
1903 if ((flag_rounding_math
1904 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905 && (inexact || !real_identical (&result, &value)))
1908 t = build_real (type, result);
1910 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1914 if (TREE_CODE (arg1) == FIXED_CST)
1916 FIXED_VALUE_TYPE f1;
1917 FIXED_VALUE_TYPE f2;
1918 FIXED_VALUE_TYPE result;
1923 /* The following codes are handled by fixed_arithmetic. */
1929 case TRUNC_DIV_EXPR:
1930 f2 = TREE_FIXED_CST (arg2);
1935 f2.data.high = TREE_INT_CST_HIGH (arg2);
1936 f2.data.low = TREE_INT_CST_LOW (arg2);
1944 f1 = TREE_FIXED_CST (arg1);
1945 type = TREE_TYPE (arg1);
1946 sat_p = TYPE_SATURATING (type);
1947 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948 t = build_fixed (type, result);
1949 /* Propagate overflow flags. */
1950 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951 TREE_OVERFLOW (t) = 1;
1955 if (TREE_CODE (arg1) == COMPLEX_CST)
1957 tree type = TREE_TYPE (arg1);
1958 tree r1 = TREE_REALPART (arg1);
1959 tree i1 = TREE_IMAGPART (arg1);
1960 tree r2 = TREE_REALPART (arg2);
1961 tree i2 = TREE_IMAGPART (arg2);
1968 real = const_binop (code, r1, r2, notrunc);
1969 imag = const_binop (code, i1, i2, notrunc);
1973 if (COMPLEX_FLOAT_TYPE_P (type))
1974 return do_mpc_arg2 (arg1, arg2, type,
1975 /* do_nonfinite= */ folding_initializer,
1978 real = const_binop (MINUS_EXPR,
1979 const_binop (MULT_EXPR, r1, r2, notrunc),
1980 const_binop (MULT_EXPR, i1, i2, notrunc),
1982 imag = const_binop (PLUS_EXPR,
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 const_binop (MULT_EXPR, i1, r2, notrunc),
1989 if (COMPLEX_FLOAT_TYPE_P (type))
1990 return do_mpc_arg2 (arg1, arg2, type,
1991 /* do_nonfinite= */ folding_initializer,
1994 case TRUNC_DIV_EXPR:
1996 case FLOOR_DIV_EXPR:
1997 case ROUND_DIV_EXPR:
1998 if (flag_complex_method == 0)
2000 /* Keep this algorithm in sync with
2001 tree-complex.c:expand_complex_div_straight().
2003 Expand complex division to scalars, straightforward algorithm.
2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2008 = const_binop (PLUS_EXPR,
2009 const_binop (MULT_EXPR, r2, r2, notrunc),
2010 const_binop (MULT_EXPR, i2, i2, notrunc),
2013 = const_binop (PLUS_EXPR,
2014 const_binop (MULT_EXPR, r1, r2, notrunc),
2015 const_binop (MULT_EXPR, i1, i2, notrunc),
2018 = const_binop (MINUS_EXPR,
2019 const_binop (MULT_EXPR, i1, r2, notrunc),
2020 const_binop (MULT_EXPR, r1, i2, notrunc),
2023 real = const_binop (code, t1, magsquared, notrunc);
2024 imag = const_binop (code, t2, magsquared, notrunc);
2028 /* Keep this algorithm in sync with
2029 tree-complex.c:expand_complex_div_wide().
2031 Expand complex division to scalars, modified algorithm to minimize
2032 overflow with wide input ranges. */
2033 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034 fold_abs_const (r2, TREE_TYPE (type)),
2035 fold_abs_const (i2, TREE_TYPE (type)));
2037 if (integer_nonzerop (compare))
2039 /* In the TRUE branch, we compute
2041 div = (br * ratio) + bi;
2042 tr = (ar * ratio) + ai;
2043 ti = (ai * ratio) - ar;
2046 tree ratio = const_binop (code, r2, i2, notrunc);
2047 tree div = const_binop (PLUS_EXPR, i2,
2048 const_binop (MULT_EXPR, r2, ratio,
2051 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052 real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053 real = const_binop (code, real, div, notrunc);
2055 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057 imag = const_binop (code, imag, div, notrunc);
2061 /* In the FALSE branch, we compute
2063 divisor = (d * ratio) + c;
2064 tr = (b * ratio) + a;
2065 ti = b - (a * ratio);
2068 tree ratio = const_binop (code, i2, r2, notrunc);
2069 tree div = const_binop (PLUS_EXPR, r2,
2070 const_binop (MULT_EXPR, i2, ratio,
2074 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075 real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076 real = const_binop (code, real, div, notrunc);
2078 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080 imag = const_binop (code, imag, div, notrunc);
2090 return build_complex (type, real, imag);
2093 if (TREE_CODE (arg1) == VECTOR_CST)
2095 tree type = TREE_TYPE(arg1);
2096 int count = TYPE_VECTOR_SUBPARTS (type), i;
2097 tree elements1, elements2, list = NULL_TREE;
2099 if(TREE_CODE(arg2) != VECTOR_CST)
2102 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2105 for (i = 0; i < count; i++)
2107 tree elem1, elem2, elem;
2109 /* The trailing elements can be empty and should be treated as 0 */
2111 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2114 elem1 = TREE_VALUE(elements1);
2115 elements1 = TREE_CHAIN (elements1);
2119 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2122 elem2 = TREE_VALUE(elements2);
2123 elements2 = TREE_CHAIN (elements2);
2126 elem = const_binop (code, elem1, elem2, notrunc);
2128 /* It is possible that const_binop cannot handle the given
2129 code and return NULL_TREE */
2130 if(elem == NULL_TREE)
2133 list = tree_cons (NULL_TREE, elem, list);
2135 return build_vector(type, nreverse(list));
2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2141 indicates which particular sizetype to create. */
2144 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2146 return build_int_cst (sizetype_tab[(int) kind], number);
2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2150 is a tree code. The type of the result is taken from the operands.
2151 Both must be equivalent integer types, ala int_binop_types_match_p.
2152 If the operands are constant, so is the result. */
2155 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2157 tree type = TREE_TYPE (arg0);
2159 if (arg0 == error_mark_node || arg1 == error_mark_node)
2160 return error_mark_node;
2162 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2165 /* Handle the special case of two integer constants faster. */
2166 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2168 /* And some specific cases even faster than that. */
2169 if (code == PLUS_EXPR)
2171 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2173 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2176 else if (code == MINUS_EXPR)
2178 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2181 else if (code == MULT_EXPR)
2183 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2187 /* Handle general case of two integer constants. */
2188 return int_const_binop (code, arg0, arg1, 0);
2191 return fold_build2_loc (loc, code, type, arg0, arg1);
2194 /* Given two values, either both of sizetype or both of bitsizetype,
2195 compute the difference between the two values. Return the value
2196 in signed type corresponding to the type of the operands. */
2199 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2201 tree type = TREE_TYPE (arg0);
2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2207 /* If the type is already signed, just do the simple thing. */
2208 if (!TYPE_UNSIGNED (type))
2209 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2211 if (type == sizetype)
2213 else if (type == bitsizetype)
2214 ctype = sbitsizetype;
2216 ctype = signed_type_for (type);
2218 /* If either operand is not a constant, do the conversions to the signed
2219 type and subtract. The hardware will do the right thing with any
2220 overflow in the subtraction. */
2221 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222 return size_binop_loc (loc, MINUS_EXPR,
2223 fold_convert_loc (loc, ctype, arg0),
2224 fold_convert_loc (loc, ctype, arg1));
2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228 overflow) and negate (which can't either). Special-case a result
2229 of zero while we're here. */
2230 if (tree_int_cst_equal (arg0, arg1))
2231 return build_int_cst (ctype, 0);
2232 else if (tree_int_cst_lt (arg1, arg0))
2233 return fold_convert_loc (loc, ctype,
2234 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2236 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237 fold_convert_loc (loc, ctype,
2238 size_binop_loc (loc,
2243 /* A subroutine of fold_convert_const handling conversions of an
2244 INTEGER_CST to another integer type. */
2247 fold_convert_const_int_from_int (tree type, const_tree arg1)
2251 /* Given an integer constant, make new constant with new type,
2252 appropriately sign-extended or truncated. */
2253 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254 TREE_INT_CST_HIGH (arg1),
2255 !POINTER_TYPE_P (TREE_TYPE (arg1)),
2256 (TREE_INT_CST_HIGH (arg1) < 0
2257 && (TYPE_UNSIGNED (type)
2258 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2259 | TREE_OVERFLOW (arg1));
2264 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2265 to an integer type. */
2268 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2273 /* The following code implements the floating point to integer
2274 conversion rules required by the Java Language Specification,
2275 that IEEE NaNs are mapped to zero and values that overflow
2276 the target precision saturate, i.e. values greater than
2277 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2278 are mapped to INT_MIN. These semantics are allowed by the
2279 C and C++ standards that simply state that the behavior of
2280 FP-to-integer conversion is unspecified upon overflow. */
2282 HOST_WIDE_INT high, low;
2284 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2288 case FIX_TRUNC_EXPR:
2289 real_trunc (&r, VOIDmode, &x);
2296 /* If R is NaN, return zero and show we have an overflow. */
2297 if (REAL_VALUE_ISNAN (r))
2304 /* See if R is less than the lower bound or greater than the
2309 tree lt = TYPE_MIN_VALUE (type);
2310 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2311 if (REAL_VALUES_LESS (r, l))
2314 high = TREE_INT_CST_HIGH (lt);
2315 low = TREE_INT_CST_LOW (lt);
2321 tree ut = TYPE_MAX_VALUE (type);
2324 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2325 if (REAL_VALUES_LESS (u, r))
2328 high = TREE_INT_CST_HIGH (ut);
2329 low = TREE_INT_CST_LOW (ut);
2335 REAL_VALUE_TO_INT (&low, &high, r);
2337 t = force_fit_type_double (type, low, high, -1,
2338 overflow | TREE_OVERFLOW (arg1));
2342 /* A subroutine of fold_convert_const handling conversions of a
2343 FIXED_CST to an integer type. */
2346 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2349 double_int temp, temp_trunc;
2352 /* Right shift FIXED_CST to temp by fbit. */
2353 temp = TREE_FIXED_CST (arg1).data;
2354 mode = TREE_FIXED_CST (arg1).mode;
2355 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2357 lshift_double (temp.low, temp.high,
2358 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2359 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2361 /* Left shift temp to temp_trunc by fbit. */
2362 lshift_double (temp.low, temp.high,
2363 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2364 &temp_trunc.low, &temp_trunc.high,
2365 SIGNED_FIXED_POINT_MODE_P (mode));
2372 temp_trunc.high = 0;
2375 /* If FIXED_CST is negative, we need to round the value toward 0.
2376 By checking if the fractional bits are not zero to add 1 to temp. */
2377 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2378 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2383 temp = double_int_add (temp, one);
2386 /* Given a fixed-point constant, make new constant with new type,
2387 appropriately sign-extended or truncated. */
2388 t = force_fit_type_double (type, temp.low, temp.high, -1,
2390 && (TYPE_UNSIGNED (type)
2391 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2392 | TREE_OVERFLOW (arg1));
2397 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2398 to another floating point type. */
2401 fold_convert_const_real_from_real (tree type, const_tree arg1)
2403 REAL_VALUE_TYPE value;
2406 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2407 t = build_real (type, value);
2409 /* If converting an infinity or NAN to a representation that doesn't
2410 have one, set the overflow bit so that we can produce some kind of
2411 error message at the appropriate point if necessary. It's not the
2412 most user-friendly message, but it's better than nothing. */
2413 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2414 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2415 TREE_OVERFLOW (t) = 1;
2416 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2417 && !MODE_HAS_NANS (TYPE_MODE (type)))
2418 TREE_OVERFLOW (t) = 1;
2419 /* Regular overflow, conversion produced an infinity in a mode that
2420 can't represent them. */
2421 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2422 && REAL_VALUE_ISINF (value)
2423 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2424 TREE_OVERFLOW (t) = 1;
2426 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2430 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2431 to a floating point type. */
2434 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2436 REAL_VALUE_TYPE value;
2439 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2440 t = build_real (type, value);
2442 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447 to another fixed-point type. */
2450 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2452 FIXED_VALUE_TYPE value;
2456 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2457 TYPE_SATURATING (type));
2458 t = build_fixed (type, value);
2460 /* Propagate overflow flags. */
2461 if (overflow_p | TREE_OVERFLOW (arg1))
2462 TREE_OVERFLOW (t) = 1;
2466 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2467 to a fixed-point type. */
2470 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2472 FIXED_VALUE_TYPE value;
2476 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2477 TREE_INT_CST (arg1),
2478 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2479 TYPE_SATURATING (type));
2480 t = build_fixed (type, value);
2482 /* Propagate overflow flags. */
2483 if (overflow_p | TREE_OVERFLOW (arg1))
2484 TREE_OVERFLOW (t) = 1;
2488 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2489 to a fixed-point type. */
2492 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2494 FIXED_VALUE_TYPE value;
2498 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2499 &TREE_REAL_CST (arg1),
2500 TYPE_SATURATING (type));
2501 t = build_fixed (type, value);
2503 /* Propagate overflow flags. */
2504 if (overflow_p | TREE_OVERFLOW (arg1))
2505 TREE_OVERFLOW (t) = 1;
2509 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2510 type TYPE. If no simplification can be done return NULL_TREE. */
2513 fold_convert_const (enum tree_code code, tree type, tree arg1)
2515 if (TREE_TYPE (arg1) == type)
2518 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2519 || TREE_CODE (type) == OFFSET_TYPE)
2521 if (TREE_CODE (arg1) == INTEGER_CST)
2522 return fold_convert_const_int_from_int (type, arg1);
2523 else if (TREE_CODE (arg1) == REAL_CST)
2524 return fold_convert_const_int_from_real (code, type, arg1);
2525 else if (TREE_CODE (arg1) == FIXED_CST)
2526 return fold_convert_const_int_from_fixed (type, arg1);
2528 else if (TREE_CODE (type) == REAL_TYPE)
2530 if (TREE_CODE (arg1) == INTEGER_CST)
2531 return build_real_from_int_cst (type, arg1);
2532 else if (TREE_CODE (arg1) == REAL_CST)
2533 return fold_convert_const_real_from_real (type, arg1);
2534 else if (TREE_CODE (arg1) == FIXED_CST)
2535 return fold_convert_const_real_from_fixed (type, arg1);
2537 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2539 if (TREE_CODE (arg1) == FIXED_CST)
2540 return fold_convert_const_fixed_from_fixed (type, arg1);
2541 else if (TREE_CODE (arg1) == INTEGER_CST)
2542 return fold_convert_const_fixed_from_int (type, arg1);
2543 else if (TREE_CODE (arg1) == REAL_CST)
2544 return fold_convert_const_fixed_from_real (type, arg1);
2549 /* Construct a vector of zero elements of vector type TYPE. */
2552 build_zero_vector (tree type)
2557 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2558 units = TYPE_VECTOR_SUBPARTS (type);
2561 for (i = 0; i < units; i++)
2562 list = tree_cons (NULL_TREE, elem, list);
2563 return build_vector (type, list);
2566 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2569 fold_convertible_p (const_tree type, const_tree arg)
2571 tree orig = TREE_TYPE (arg);
2576 if (TREE_CODE (arg) == ERROR_MARK
2577 || TREE_CODE (type) == ERROR_MARK
2578 || TREE_CODE (orig) == ERROR_MARK)
2581 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2584 switch (TREE_CODE (type))
2586 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2587 case POINTER_TYPE: case REFERENCE_TYPE:
2589 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2590 || TREE_CODE (orig) == OFFSET_TYPE)
2592 return (TREE_CODE (orig) == VECTOR_TYPE
2593 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2596 case FIXED_POINT_TYPE:
2600 return TREE_CODE (type) == TREE_CODE (orig);
2607 /* Convert expression ARG to type TYPE. Used by the middle-end for
2608 simple conversions in preference to calling the front-end's convert. */
2611 fold_convert_loc (location_t loc, tree type, tree arg)
2613 tree orig = TREE_TYPE (arg);
2619 if (TREE_CODE (arg) == ERROR_MARK
2620 || TREE_CODE (type) == ERROR_MARK
2621 || TREE_CODE (orig) == ERROR_MARK)
2622 return error_mark_node;
2624 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2625 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2627 switch (TREE_CODE (type))
2630 case REFERENCE_TYPE:
2631 /* Handle conversions between pointers to different address spaces. */
2632 if (POINTER_TYPE_P (orig)
2633 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2634 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2635 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2638 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2640 if (TREE_CODE (arg) == INTEGER_CST)
2642 tem = fold_convert_const (NOP_EXPR, type, arg);
2643 if (tem != NULL_TREE)
2646 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2647 || TREE_CODE (orig) == OFFSET_TYPE)
2648 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2649 if (TREE_CODE (orig) == COMPLEX_TYPE)
2650 return fold_convert_loc (loc, type,
2651 fold_build1_loc (loc, REALPART_EXPR,
2652 TREE_TYPE (orig), arg));
2653 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2654 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2655 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2658 if (TREE_CODE (arg) == INTEGER_CST)
2660 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2661 if (tem != NULL_TREE)
2664 else if (TREE_CODE (arg) == REAL_CST)
2666 tem = fold_convert_const (NOP_EXPR, type, arg);
2667 if (tem != NULL_TREE)
2670 else if (TREE_CODE (arg) == FIXED_CST)
2672 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2673 if (tem != NULL_TREE)
2677 switch (TREE_CODE (orig))
2680 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2681 case POINTER_TYPE: case REFERENCE_TYPE:
2682 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2685 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2687 case FIXED_POINT_TYPE:
2688 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2691 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2692 return fold_convert_loc (loc, type, tem);
2698 case FIXED_POINT_TYPE:
2699 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2700 || TREE_CODE (arg) == REAL_CST)
2702 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2703 if (tem != NULL_TREE)
2704 goto fold_convert_exit;
2707 switch (TREE_CODE (orig))
2709 case FIXED_POINT_TYPE:
2714 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2717 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2718 return fold_convert_loc (loc, type, tem);
2725 switch (TREE_CODE (orig))
2728 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2729 case POINTER_TYPE: case REFERENCE_TYPE:
2731 case FIXED_POINT_TYPE:
2732 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2733 fold_convert_loc (loc, TREE_TYPE (type), arg),
2734 fold_convert_loc (loc, TREE_TYPE (type),
2735 integer_zero_node));
2740 if (TREE_CODE (arg) == COMPLEX_EXPR)
2742 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2743 TREE_OPERAND (arg, 0));
2744 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2745 TREE_OPERAND (arg, 1));
2746 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2749 arg = save_expr (arg);
2750 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2751 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2752 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2753 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2754 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2762 if (integer_zerop (arg))
2763 return build_zero_vector (type);
2764 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2765 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2766 || TREE_CODE (orig) == VECTOR_TYPE);
2767 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2770 tem = fold_ignored_result (arg);
2771 if (TREE_CODE (tem) == MODIFY_EXPR)
2772 goto fold_convert_exit;
2773 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2779 protected_set_expr_location (tem, loc);
2783 /* Return false if expr can be assumed not to be an lvalue, true
2787 maybe_lvalue_p (const_tree x)
2789 /* We only need to wrap lvalue tree codes. */
2790 switch (TREE_CODE (x))
2801 case ALIGN_INDIRECT_REF:
2802 case MISALIGNED_INDIRECT_REF:
2804 case ARRAY_RANGE_REF:
2810 case PREINCREMENT_EXPR:
2811 case PREDECREMENT_EXPR:
2813 case TRY_CATCH_EXPR:
2814 case WITH_CLEANUP_EXPR:
2823 /* Assume the worst for front-end tree codes. */
2824 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2832 /* Return an expr equal to X but certainly not valid as an lvalue. */
2835 non_lvalue_loc (location_t loc, tree x)
2837 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2842 if (! maybe_lvalue_p (x))
2844 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2845 SET_EXPR_LOCATION (x, loc);
2849 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2850 Zero means allow extended lvalues. */
2852 int pedantic_lvalues;
2854 /* When pedantic, return an expr equal to X but certainly not valid as a
2855 pedantic lvalue. Otherwise, return X. */
2858 pedantic_non_lvalue_loc (location_t loc, tree x)
2860 if (pedantic_lvalues)
2861 return non_lvalue_loc (loc, x);
2862 protected_set_expr_location (x, loc);
2866 /* Given a tree comparison code, return the code that is the logical inverse
2867 of the given code. It is not safe to do this for floating-point
2868 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2869 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2872 invert_tree_comparison (enum tree_code code, bool honor_nans)
2874 if (honor_nans && flag_trapping_math)
2884 return honor_nans ? UNLE_EXPR : LE_EXPR;
2886 return honor_nans ? UNLT_EXPR : LT_EXPR;
2888 return honor_nans ? UNGE_EXPR : GE_EXPR;
2890 return honor_nans ? UNGT_EXPR : GT_EXPR;
2904 return UNORDERED_EXPR;
2905 case UNORDERED_EXPR:
2906 return ORDERED_EXPR;
2912 /* Similar, but return the comparison that results if the operands are
2913 swapped. This is safe for floating-point. */
2916 swap_tree_comparison (enum tree_code code)
2923 case UNORDERED_EXPR:
2949 /* Convert a comparison tree code from an enum tree_code representation
2950 into a compcode bit-based encoding. This function is the inverse of
2951 compcode_to_comparison. */
2953 static enum comparison_code
2954 comparison_to_compcode (enum tree_code code)
2971 return COMPCODE_ORD;
2972 case UNORDERED_EXPR:
2973 return COMPCODE_UNORD;
2975 return COMPCODE_UNLT;
2977 return COMPCODE_UNEQ;
2979 return COMPCODE_UNLE;
2981 return COMPCODE_UNGT;
2983 return COMPCODE_LTGT;
2985 return COMPCODE_UNGE;
2991 /* Convert a compcode bit-based encoding of a comparison operator back
2992 to GCC's enum tree_code representation. This function is the
2993 inverse of comparison_to_compcode. */
2995 static enum tree_code
2996 compcode_to_comparison (enum comparison_code code)
3013 return ORDERED_EXPR;
3014 case COMPCODE_UNORD:
3015 return UNORDERED_EXPR;
3033 /* Return a tree for the comparison which is the combination of
3034 doing the AND or OR (depending on CODE) of the two operations LCODE
3035 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3036 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3037 if this makes the transformation invalid. */
3040 combine_comparisons (location_t loc,
3041 enum tree_code code, enum tree_code lcode,
3042 enum tree_code rcode, tree truth_type,
3043 tree ll_arg, tree lr_arg)
3045 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3046 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3047 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3052 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3053 compcode = lcompcode & rcompcode;
3056 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3057 compcode = lcompcode | rcompcode;
3066 /* Eliminate unordered comparisons, as well as LTGT and ORD
3067 which are not used unless the mode has NaNs. */
3068 compcode &= ~COMPCODE_UNORD;
3069 if (compcode == COMPCODE_LTGT)
3070 compcode = COMPCODE_NE;
3071 else if (compcode == COMPCODE_ORD)
3072 compcode = COMPCODE_TRUE;
3074 else if (flag_trapping_math)
3076 /* Check that the original operation and the optimized ones will trap
3077 under the same condition. */
3078 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3079 && (lcompcode != COMPCODE_EQ)
3080 && (lcompcode != COMPCODE_ORD);
3081 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3082 && (rcompcode != COMPCODE_EQ)
3083 && (rcompcode != COMPCODE_ORD);
3084 bool trap = (compcode & COMPCODE_UNORD) == 0
3085 && (compcode != COMPCODE_EQ)
3086 && (compcode != COMPCODE_ORD);
3088 /* In a short-circuited boolean expression the LHS might be
3089 such that the RHS, if evaluated, will never trap. For
3090 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3091 if neither x nor y is NaN. (This is a mixed blessing: for
3092 example, the expression above will never trap, hence
3093 optimizing it to x < y would be invalid). */
3094 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3095 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3098 /* If the comparison was short-circuited, and only the RHS
3099 trapped, we may now generate a spurious trap. */
3101 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3104 /* If we changed the conditions that cause a trap, we lose. */
3105 if ((ltrap || rtrap) != trap)
3109 if (compcode == COMPCODE_TRUE)
3110 return constant_boolean_node (true, truth_type);
3111 else if (compcode == COMPCODE_FALSE)
3112 return constant_boolean_node (false, truth_type);
3115 enum tree_code tcode;
3117 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3118 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3122 /* Return nonzero if two operands (typically of the same tree node)
3123 are necessarily equal. If either argument has side-effects this
3124 function returns zero. FLAGS modifies behavior as follows:
3126 If OEP_ONLY_CONST is set, only return nonzero for constants.
3127 This function tests whether the operands are indistinguishable;
3128 it does not test whether they are equal using C's == operation.
3129 The distinction is important for IEEE floating point, because
3130 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3131 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3133 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3134 even though it may hold multiple values during a function.
3135 This is because a GCC tree node guarantees that nothing else is
3136 executed between the evaluation of its "operands" (which may often
3137 be evaluated in arbitrary order). Hence if the operands themselves
3138 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3139 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3140 unset means assuming isochronic (or instantaneous) tree equivalence.
3141 Unless comparing arbitrary expression trees, such as from different
3142 statements, this flag can usually be left unset.
3144 If OEP_PURE_SAME is set, then pure functions with identical arguments
3145 are considered the same. It is used when the caller has other ways
3146 to ensure that global memory is unchanged in between. */
3149 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3151 /* If either is ERROR_MARK, they aren't equal. */
3152 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3153 || TREE_TYPE (arg0) == error_mark_node
3154 || TREE_TYPE (arg1) == error_mark_node)
3157 /* Similar, if either does not have a type (like a released SSA name),
3158 they aren't equal. */
3159 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
3162 /* Check equality of integer constants before bailing out due to
3163 precision differences. */
3164 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3165 return tree_int_cst_equal (arg0, arg1);
3167 /* If both types don't have the same signedness, then we can't consider
3168 them equal. We must check this before the STRIP_NOPS calls
3169 because they may change the signedness of the arguments. As pointers
3170 strictly don't have a signedness, require either two pointers or
3171 two non-pointers as well. */
3172 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3173 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3176 /* We cannot consider pointers to different address space equal. */
3177 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3178 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3179 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3182 /* If both types don't have the same precision, then it is not safe
3184 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3190 /* In case both args are comparisons but with different comparison
3191 code, try to swap the comparison operands of one arg to produce
3192 a match and compare that variant. */
3193 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3194 && COMPARISON_CLASS_P (arg0)
3195 && COMPARISON_CLASS_P (arg1))
3197 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3199 if (TREE_CODE (arg0) == swap_code)
3200 return operand_equal_p (TREE_OPERAND (arg0, 0),
3201 TREE_OPERAND (arg1, 1), flags)
3202 && operand_equal_p (TREE_OPERAND (arg0, 1),
3203 TREE_OPERAND (arg1, 0), flags);
3206 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3207 /* This is needed for conversions and for COMPONENT_REF.
3208 Might as well play it safe and always test this. */
3209 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3210 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3211 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3214 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3215 We don't care about side effects in that case because the SAVE_EXPR
3216 takes care of that for us. In all other cases, two expressions are
3217 equal if they have no side effects. If we have two identical
3218 expressions with side effects that should be treated the same due
3219 to the only side effects being identical SAVE_EXPR's, that will
3220 be detected in the recursive calls below. */
3221 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3222 && (TREE_CODE (arg0) == SAVE_EXPR
3223 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3226 /* Next handle constant cases, those for which we can return 1 even
3227 if ONLY_CONST is set. */
3228 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3229 switch (TREE_CODE (arg0))
3232 return tree_int_cst_equal (arg0, arg1);
3235 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3236 TREE_FIXED_CST (arg1));
3239 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3240 TREE_REAL_CST (arg1)))
3244 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3246 /* If we do not distinguish between signed and unsigned zero,
3247 consider them equal. */
3248 if (real_zerop (arg0) && real_zerop (arg1))
3257 v1 = TREE_VECTOR_CST_ELTS (arg0);
3258 v2 = TREE_VECTOR_CST_ELTS (arg1);
3261 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3264 v1 = TREE_CHAIN (v1);
3265 v2 = TREE_CHAIN (v2);
3272 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3274 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3278 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3279 && ! memcmp (TREE_STRING_POINTER (arg0),
3280 TREE_STRING_POINTER (arg1),
3281 TREE_STRING_LENGTH (arg0)));
3284 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3290 if (flags & OEP_ONLY_CONST)
3293 /* Define macros to test an operand from arg0 and arg1 for equality and a
3294 variant that allows null and views null as being different from any
3295 non-null value. In the latter case, if either is null, the both
3296 must be; otherwise, do the normal comparison. */
3297 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3298 TREE_OPERAND (arg1, N), flags)
3300 #define OP_SAME_WITH_NULL(N) \
3301 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3302 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3304 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3307 /* Two conversions are equal only if signedness and modes match. */
3308 switch (TREE_CODE (arg0))
3311 case FIX_TRUNC_EXPR:
3312 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3313 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3323 case tcc_comparison:
3325 if (OP_SAME (0) && OP_SAME (1))
3328 /* For commutative ops, allow the other order. */
3329 return (commutative_tree_code (TREE_CODE (arg0))
3330 && operand_equal_p (TREE_OPERAND (arg0, 0),
3331 TREE_OPERAND (arg1, 1), flags)
3332 && operand_equal_p (TREE_OPERAND (arg0, 1),
3333 TREE_OPERAND (arg1, 0), flags));
3336 /* If either of the pointer (or reference) expressions we are
3337 dereferencing contain a side effect, these cannot be equal. */
3338 if (TREE_SIDE_EFFECTS (arg0)
3339 || TREE_SIDE_EFFECTS (arg1))
3342 switch (TREE_CODE (arg0))
3345 case ALIGN_INDIRECT_REF:
3346 case MISALIGNED_INDIRECT_REF:
3352 case ARRAY_RANGE_REF:
3353 /* Operands 2 and 3 may be null.
3354 Compare the array index by value if it is constant first as we
3355 may have different types but same value here. */
3357 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3358 TREE_OPERAND (arg1, 1))
3360 && OP_SAME_WITH_NULL (2)
3361 && OP_SAME_WITH_NULL (3));
3364 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3365 may be NULL when we're called to compare MEM_EXPRs. */
3366 return OP_SAME_WITH_NULL (0)
3368 && OP_SAME_WITH_NULL (2);
3371 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3377 case tcc_expression:
3378 switch (TREE_CODE (arg0))
3381 case TRUTH_NOT_EXPR:
3384 case TRUTH_ANDIF_EXPR:
3385 case TRUTH_ORIF_EXPR:
3386 return OP_SAME (0) && OP_SAME (1);
3388 case TRUTH_AND_EXPR:
3390 case TRUTH_XOR_EXPR:
3391 if (OP_SAME (0) && OP_SAME (1))
3394 /* Otherwise take into account this is a commutative operation. */
3395 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3396 TREE_OPERAND (arg1, 1), flags)
3397 && operand_equal_p (TREE_OPERAND (arg0, 1),
3398 TREE_OPERAND (arg1, 0), flags));
3401 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3408 switch (TREE_CODE (arg0))
3411 /* If the CALL_EXPRs call different functions, then they
3412 clearly can not be equal. */
3413 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3418 unsigned int cef = call_expr_flags (arg0);
3419 if (flags & OEP_PURE_SAME)
3420 cef &= ECF_CONST | ECF_PURE;
3427 /* Now see if all the arguments are the same. */
3429 const_call_expr_arg_iterator iter0, iter1;
3431 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3432 a1 = first_const_call_expr_arg (arg1, &iter1);
3434 a0 = next_const_call_expr_arg (&iter0),
3435 a1 = next_const_call_expr_arg (&iter1))
3436 if (! operand_equal_p (a0, a1, flags))
3439 /* If we get here and both argument lists are exhausted
3440 then the CALL_EXPRs are equal. */
3441 return ! (a0 || a1);
3447 case tcc_declaration:
3448 /* Consider __builtin_sqrt equal to sqrt. */
3449 return (TREE_CODE (arg0) == FUNCTION_DECL
3450 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3451 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3452 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3459 #undef OP_SAME_WITH_NULL
3462 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3463 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3465 When in doubt, return 0. */
3468 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3470 int unsignedp1, unsignedpo;
3471 tree primarg0, primarg1, primother;
3472 unsigned int correct_width;
3474 if (operand_equal_p (arg0, arg1, 0))
3477 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3478 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3481 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3482 and see if the inner values are the same. This removes any
3483 signedness comparison, which doesn't matter here. */
3484 primarg0 = arg0, primarg1 = arg1;
3485 STRIP_NOPS (primarg0);
3486 STRIP_NOPS (primarg1);
3487 if (operand_equal_p (primarg0, primarg1, 0))
3490 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3491 actual comparison operand, ARG0.
3493 First throw away any conversions to wider types
3494 already present in the operands. */
3496 primarg1 = get_narrower (arg1, &unsignedp1);
3497 primother = get_narrower (other, &unsignedpo);
3499 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3500 if (unsignedp1 == unsignedpo
3501 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3502 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3504 tree type = TREE_TYPE (arg0);
3506 /* Make sure shorter operand is extended the right way
3507 to match the longer operand. */
3508 primarg1 = fold_convert (signed_or_unsigned_type_for
3509 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3511 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3518 /* See if ARG is an expression that is either a comparison or is performing
3519 arithmetic on comparisons. The comparisons must only be comparing
3520 two different values, which will be stored in *CVAL1 and *CVAL2; if
3521 they are nonzero it means that some operands have already been found.
3522 No variables may be used anywhere else in the expression except in the
3523 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3524 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3526 If this is true, return 1. Otherwise, return zero. */
3529 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3531 enum tree_code code = TREE_CODE (arg);
3532 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3534 /* We can handle some of the tcc_expression cases here. */
3535 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3537 else if (tclass == tcc_expression
3538 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3539 || code == COMPOUND_EXPR))
3540 tclass = tcc_binary;
3542 else if (tclass == tcc_expression && code == SAVE_EXPR
3543 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3545 /* If we've already found a CVAL1 or CVAL2, this expression is
3546 two complex to handle. */
3547 if (*cval1 || *cval2)
3557 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3560 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3561 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3562 cval1, cval2, save_p));
3567 case tcc_expression:
3568 if (code == COND_EXPR)
3569 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3570 cval1, cval2, save_p)
3571 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3572 cval1, cval2, save_p)
3573 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3574 cval1, cval2, save_p));
3577 case tcc_comparison:
3578 /* First see if we can handle the first operand, then the second. For
3579 the second operand, we know *CVAL1 can't be zero. It must be that
3580 one side of the comparison is each of the values; test for the
3581 case where this isn't true by failing if the two operands
3584 if (operand_equal_p (TREE_OPERAND (arg, 0),
3585 TREE_OPERAND (arg, 1), 0))
3589 *cval1 = TREE_OPERAND (arg, 0);
3590 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3592 else if (*cval2 == 0)
3593 *cval2 = TREE_OPERAND (arg, 0);
3594 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3599 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3601 else if (*cval2 == 0)
3602 *cval2 = TREE_OPERAND (arg, 1);
3603 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3615 /* ARG is a tree that is known to contain just arithmetic operations and
3616 comparisons. Evaluate the operations in the tree substituting NEW0 for
3617 any occurrence of OLD0 as an operand of a comparison and likewise for
3621 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3622 tree old1, tree new1)
3624 tree type = TREE_TYPE (arg);
3625 enum tree_code code = TREE_CODE (arg);
3626 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3628 /* We can handle some of the tcc_expression cases here. */
3629 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3631 else if (tclass == tcc_expression
3632 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3633 tclass = tcc_binary;
3638 return fold_build1_loc (loc, code, type,
3639 eval_subst (loc, TREE_OPERAND (arg, 0),
3640 old0, new0, old1, new1));
3643 return fold_build2_loc (loc, code, type,
3644 eval_subst (loc, TREE_OPERAND (arg, 0),
3645 old0, new0, old1, new1),
3646 eval_subst (loc, TREE_OPERAND (arg, 1),
3647 old0, new0, old1, new1));
3649 case tcc_expression:
3653 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3657 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3661 return fold_build3_loc (loc, code, type,
3662 eval_subst (loc, TREE_OPERAND (arg, 0),
3663 old0, new0, old1, new1),
3664 eval_subst (loc, TREE_OPERAND (arg, 1),
3665 old0, new0, old1, new1),
3666 eval_subst (loc, TREE_OPERAND (arg, 2),
3667 old0, new0, old1, new1));
3671 /* Fall through - ??? */
3673 case tcc_comparison:
3675 tree arg0 = TREE_OPERAND (arg, 0);
3676 tree arg1 = TREE_OPERAND (arg, 1);
3678 /* We need to check both for exact equality and tree equality. The
3679 former will be true if the operand has a side-effect. In that
3680 case, we know the operand occurred exactly once. */
3682 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3684 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3687 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3689 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3692 return fold_build2_loc (loc, code, type, arg0, arg1);
3700 /* Return a tree for the case when the result of an expression is RESULT
3701 converted to TYPE and OMITTED was previously an operand of the expression
3702 but is now not needed (e.g., we folded OMITTED * 0).
3704 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3705 the conversion of RESULT to TYPE. */
3708 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3710 tree t = fold_convert_loc (loc, type, result);
3712 /* If the resulting operand is an empty statement, just return the omitted
3713 statement casted to void. */
3714 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3716 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3717 goto omit_one_operand_exit;
3720 if (TREE_SIDE_EFFECTS (omitted))
3722 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3723 goto omit_one_operand_exit;
3726 return non_lvalue_loc (loc, t);
3728 omit_one_operand_exit:
3729 protected_set_expr_location (t, loc);
3733 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3736 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3739 tree t = fold_convert_loc (loc, type, result);
3741 /* If the resulting operand is an empty statement, just return the omitted
3742 statement casted to void. */
3743 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3745 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3746 goto pedantic_omit_one_operand_exit;
3749 if (TREE_SIDE_EFFECTS (omitted))
3751 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3752 goto pedantic_omit_one_operand_exit;
3755 return pedantic_non_lvalue_loc (loc, t);
3757 pedantic_omit_one_operand_exit:
3758 protected_set_expr_location (t, loc);
3762 /* Return a tree for the case when the result of an expression is RESULT
3763 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3764 of the expression but are now not needed.
3766 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3767 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3768 evaluated before OMITTED2. Otherwise, if neither has side effects,
3769 just do the conversion of RESULT to TYPE. */
3772 omit_two_operands_loc (location_t loc, tree type, tree result,
3773 tree omitted1, tree omitted2)
3775 tree t = fold_convert_loc (loc, type, result);
3777 if (TREE_SIDE_EFFECTS (omitted2))
3779 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3780 SET_EXPR_LOCATION (t, loc);
3782 if (TREE_SIDE_EFFECTS (omitted1))
3784 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3785 SET_EXPR_LOCATION (t, loc);
3788 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3792 /* Return a simplified tree node for the truth-negation of ARG. This
3793 never alters ARG itself. We assume that ARG is an operation that
3794 returns a truth value (0 or 1).
3796 FIXME: one would think we would fold the result, but it causes
3797 problems with the dominator optimizer. */
3800 fold_truth_not_expr (location_t loc, tree arg)
3802 tree t, type = TREE_TYPE (arg);
3803 enum tree_code code = TREE_CODE (arg);
3804 location_t loc1, loc2;
3806 /* If this is a comparison, we can simply invert it, except for
3807 floating-point non-equality comparisons, in which case we just
3808 enclose a TRUTH_NOT_EXPR around what we have. */
3810 if (TREE_CODE_CLASS (code) == tcc_comparison)
3812 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3813 if (FLOAT_TYPE_P (op_type)
3814 && flag_trapping_math
3815 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3816 && code != NE_EXPR && code != EQ_EXPR)
3819 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3820 if (code == ERROR_MARK)
3823 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3824 SET_EXPR_LOCATION (t, loc);
3831 return constant_boolean_node (integer_zerop (arg), type);
3833 case TRUTH_AND_EXPR:
3834 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3835 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3836 if (loc1 == UNKNOWN_LOCATION)
3838 if (loc2 == UNKNOWN_LOCATION)
3840 t = build2 (TRUTH_OR_EXPR, type,
3841 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3842 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3846 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3847 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3848 if (loc1 == UNKNOWN_LOCATION)
3850 if (loc2 == UNKNOWN_LOCATION)
3852 t = build2 (TRUTH_AND_EXPR, type,
3853 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3854 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3857 case TRUTH_XOR_EXPR:
3858 /* Here we can invert either operand. We invert the first operand
3859 unless the second operand is a TRUTH_NOT_EXPR in which case our
3860 result is the XOR of the first operand with the inside of the
3861 negation of the second operand. */
3863 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3864 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3865 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3867 t = build2 (TRUTH_XOR_EXPR, type,
3868 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3869 TREE_OPERAND (arg, 1));
3872 case TRUTH_ANDIF_EXPR:
3873 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3874 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3875 if (loc1 == UNKNOWN_LOCATION)
3877 if (loc2 == UNKNOWN_LOCATION)
3879 t = build2 (TRUTH_ORIF_EXPR, type,
3880 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3881 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3884 case TRUTH_ORIF_EXPR:
3885 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3886 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3887 if (loc1 == UNKNOWN_LOCATION)
3889 if (loc2 == UNKNOWN_LOCATION)
3891 t = build2 (TRUTH_ANDIF_EXPR, type,
3892 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3893 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3896 case TRUTH_NOT_EXPR:
3897 return TREE_OPERAND (arg, 0);
3901 tree arg1 = TREE_OPERAND (arg, 1);
3902 tree arg2 = TREE_OPERAND (arg, 2);
3904 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3905 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3906 if (loc1 == UNKNOWN_LOCATION)
3908 if (loc2 == UNKNOWN_LOCATION)
3911 /* A COND_EXPR may have a throw as one operand, which
3912 then has void type. Just leave void operands
3914 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3915 VOID_TYPE_P (TREE_TYPE (arg1))
3916 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3917 VOID_TYPE_P (TREE_TYPE (arg2))
3918 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3923 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3924 if (loc1 == UNKNOWN_LOCATION)
3926 t = build2 (COMPOUND_EXPR, type,
3927 TREE_OPERAND (arg, 0),
3928 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3931 case NON_LVALUE_EXPR:
3932 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3933 if (loc1 == UNKNOWN_LOCATION)
3935 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3938 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3940 t = build1 (TRUTH_NOT_EXPR, type, arg);
3944 /* ... fall through ... */
3947 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3948 if (loc1 == UNKNOWN_LOCATION)
3950 t = build1 (TREE_CODE (arg), type,
3951 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3955 if (!integer_onep (TREE_OPERAND (arg, 1)))
3957 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3961 t = build1 (TRUTH_NOT_EXPR, type, arg);
3964 case CLEANUP_POINT_EXPR:
3965 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3966 if (loc1 == UNKNOWN_LOCATION)
3968 t = build1 (CLEANUP_POINT_EXPR, type,
3969 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3978 SET_EXPR_LOCATION (t, loc);
3983 /* Return a simplified tree node for the truth-negation of ARG. This
3984 never alters ARG itself. We assume that ARG is an operation that
3985 returns a truth value (0 or 1).
3987 FIXME: one would think we would fold the result, but it causes
3988 problems with the dominator optimizer. */
3991 invert_truthvalue_loc (location_t loc, tree arg)
3995 if (TREE_CODE (arg) == ERROR_MARK)
3998 tem = fold_truth_not_expr (loc, arg);
4001 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4002 SET_EXPR_LOCATION (tem, loc);
4008 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4009 operands are another bit-wise operation with a common input. If so,
4010 distribute the bit operations to save an operation and possibly two if
4011 constants are involved. For example, convert
4012 (A | B) & (A | C) into A | (B & C)
4013 Further simplification will occur if B and C are constants.
4015 If this optimization cannot be done, 0 will be returned. */
4018 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4019 tree arg0, tree arg1)
4024 if (TREE_CODE (arg0) != TREE_CODE (arg1)
4025 || TREE_CODE (arg0) == code
4026 || (TREE_CODE (arg0) != BIT_AND_EXPR
4027 && TREE_CODE (arg0) != BIT_IOR_EXPR))
4030 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4032 common = TREE_OPERAND (arg0, 0);
4033 left = TREE_OPERAND (arg0, 1);
4034 right = TREE_OPERAND (arg1, 1);
4036 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4038 common = TREE_OPERAND (arg0, 0);
4039 left = TREE_OPERAND (arg0, 1);
4040 right = TREE_OPERAND (arg1, 0);
4042 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4044 common = TREE_OPERAND (arg0, 1);
4045 left = TREE_OPERAND (arg0, 0);
4046 right = TREE_OPERAND (arg1, 1);
4048 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4050 common = TREE_OPERAND (arg0, 1);
4051 left = TREE_OPERAND (arg0, 0);
4052 right = TREE_OPERAND (arg1, 0);
4057 common = fold_convert_loc (loc, type, common);
4058 left = fold_convert_loc (loc, type, left);
4059 right = fold_convert_loc (loc, type, right);
4060 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4061 fold_build2_loc (loc, code, type, left, right));
4064 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4065 with code CODE. This optimization is unsafe. */
4067 distribute_real_division (location_t loc, enum tree_code code, tree type,
4068 tree arg0, tree arg1)
4070 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4071 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4073 /* (A / C) +- (B / C) -> (A +- B) / C. */
4075 && operand_equal_p (TREE_OPERAND (arg0, 1),
4076 TREE_OPERAND (arg1, 1), 0))
4077 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4078 fold_build2_loc (loc, code, type,
4079 TREE_OPERAND (arg0, 0),
4080 TREE_OPERAND (arg1, 0)),
4081 TREE_OPERAND (arg0, 1));
4083 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4084 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4085 TREE_OPERAND (arg1, 0), 0)
4086 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4087 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4089 REAL_VALUE_TYPE r0, r1;
4090 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4091 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4093 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4095 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4096 real_arithmetic (&r0, code, &r0, &r1);
4097 return fold_build2_loc (loc, MULT_EXPR, type,
4098 TREE_OPERAND (arg0, 0),
4099 build_real (type, r0));
4105 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4106 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4109 make_bit_field_ref (location_t loc, tree inner, tree type,
4110 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4112 tree result, bftype;
4116 tree size = TYPE_SIZE (TREE_TYPE (inner));
4117 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4118 || POINTER_TYPE_P (TREE_TYPE (inner)))
4119 && host_integerp (size, 0)
4120 && tree_low_cst (size, 0) == bitsize)
4121 return fold_convert_loc (loc, type, inner);
4125 if (TYPE_PRECISION (bftype) != bitsize
4126 || TYPE_UNSIGNED (bftype) == !unsignedp)
4127 bftype = build_nonstandard_integer_type (bitsize, 0);
4129 result = build3 (BIT_FIELD_REF, bftype, inner,
4130 size_int (bitsize), bitsize_int (bitpos));
4131 SET_EXPR_LOCATION (result, loc);
4134 result = fold_convert_loc (loc, type, result);
4139 /* Optimize a bit-field compare.
4141 There are two cases: First is a compare against a constant and the
4142 second is a comparison of two items where the fields are at the same
4143 bit position relative to the start of a chunk (byte, halfword, word)
4144 large enough to contain it. In these cases we can avoid the shift
4145 implicit in bitfield extractions.
4147 For constants, we emit a compare of the shifted constant with the
4148 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4149 compared. For two fields at the same position, we do the ANDs with the
4150 similar mask and compare the result of the ANDs.
4152 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4153 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4154 are the left and right operands of the comparison, respectively.
4156 If the optimization described above can be done, we return the resulting
4157 tree. Otherwise we return zero. */
4160 optimize_bit_field_compare (location_t loc, enum tree_code code,
4161 tree compare_type, tree lhs, tree rhs)
4163 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4164 tree type = TREE_TYPE (lhs);
4165 tree signed_type, unsigned_type;
4166 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4167 enum machine_mode lmode, rmode, nmode;
4168 int lunsignedp, runsignedp;
4169 int lvolatilep = 0, rvolatilep = 0;
4170 tree linner, rinner = NULL_TREE;
4174 /* Get all the information about the extractions being done. If the bit size
4175 if the same as the size of the underlying object, we aren't doing an
4176 extraction at all and so can do nothing. We also don't want to
4177 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4178 then will no longer be able to replace it. */
4179 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4180 &lunsignedp, &lvolatilep, false);
4181 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4182 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4187 /* If this is not a constant, we can only do something if bit positions,
4188 sizes, and signedness are the same. */
4189 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4190 &runsignedp, &rvolatilep, false);
4192 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4193 || lunsignedp != runsignedp || offset != 0
4194 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4198 /* See if we can find a mode to refer to this field. We should be able to,
4199 but fail if we can't. */
4200 nmode = get_best_mode (lbitsize, lbitpos,
4201 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4202 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4203 TYPE_ALIGN (TREE_TYPE (rinner))),
4204 word_mode, lvolatilep || rvolatilep);
4205 if (nmode == VOIDmode)
4208 /* Set signed and unsigned types of the precision of this mode for the
4210 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4211 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4213 /* Compute the bit position and size for the new reference and our offset
4214 within it. If the new reference is the same size as the original, we
4215 won't optimize anything, so return zero. */
4216 nbitsize = GET_MODE_BITSIZE (nmode);
4217 nbitpos = lbitpos & ~ (nbitsize - 1);
4219 if (nbitsize == lbitsize)
4222 if (BYTES_BIG_ENDIAN)
4223 lbitpos = nbitsize - lbitsize - lbitpos;
4225 /* Make the mask to be used against the extracted field. */
4226 mask = build_int_cst_type (unsigned_type, -1);
4227 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4228 mask = const_binop (RSHIFT_EXPR, mask,
4229 size_int (nbitsize - lbitsize - lbitpos), 0);
4232 /* If not comparing with constant, just rework the comparison
4234 return fold_build2_loc (loc, code, compare_type,
4235 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4236 make_bit_field_ref (loc, linner,
4241 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4242 make_bit_field_ref (loc, rinner,
4248 /* Otherwise, we are handling the constant case. See if the constant is too
4249 big for the field. Warn and return a tree of for 0 (false) if so. We do
4250 this not only for its own sake, but to avoid having to test for this
4251 error case below. If we didn't, we might generate wrong code.
4253 For unsigned fields, the constant shifted right by the field length should
4254 be all zero. For signed fields, the high-order bits should agree with
4259 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4260 fold_convert_loc (loc,
4261 unsigned_type, rhs),
4262 size_int (lbitsize), 0)))
4264 warning (0, "comparison is always %d due to width of bit-field",
4266 return constant_boolean_node (code == NE_EXPR, compare_type);
4271 tree tem = const_binop (RSHIFT_EXPR,
4272 fold_convert_loc (loc, signed_type, rhs),
4273 size_int (lbitsize - 1), 0);
4274 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4276 warning (0, "comparison is always %d due to width of bit-field",
4278 return constant_boolean_node (code == NE_EXPR, compare_type);
4282 /* Single-bit compares should always be against zero. */
4283 if (lbitsize == 1 && ! integer_zerop (rhs))
4285 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4286 rhs = build_int_cst (type, 0);
4289 /* Make a new bitfield reference, shift the constant over the
4290 appropriate number of bits and mask it with the computed mask
4291 (in case this was a signed field). If we changed it, make a new one. */
4292 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4295 TREE_SIDE_EFFECTS (lhs) = 1;
4296 TREE_THIS_VOLATILE (lhs) = 1;
4299 rhs = const_binop (BIT_AND_EXPR,
4300 const_binop (LSHIFT_EXPR,
4301 fold_convert_loc (loc, unsigned_type, rhs),
4302 size_int (lbitpos), 0),
4305 lhs = build2 (code, compare_type,
4306 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4308 SET_EXPR_LOCATION (lhs, loc);
4312 /* Subroutine for fold_truthop: decode a field reference.
4314 If EXP is a comparison reference, we return the innermost reference.
4316 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4317 set to the starting bit number.
4319 If the innermost field can be completely contained in a mode-sized
4320 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4322 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4323 otherwise it is not changed.
4325 *PUNSIGNEDP is set to the signedness of the field.
4327 *PMASK is set to the mask used. This is either contained in a
4328 BIT_AND_EXPR or derived from the width of the field.
4330 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4332 Return 0 if this is not a component reference or is one that we can't
4333 do anything with. */
4336 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4337 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4338 int *punsignedp, int *pvolatilep,
4339 tree *pmask, tree *pand_mask)
4341 tree outer_type = 0;
4343 tree mask, inner, offset;
4345 unsigned int precision;
4347 /* All the optimizations using this function assume integer fields.
4348 There are problems with FP fields since the type_for_size call
4349 below can fail for, e.g., XFmode. */
4350 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4353 /* We are interested in the bare arrangement of bits, so strip everything
4354 that doesn't affect the machine mode. However, record the type of the
4355 outermost expression if it may matter below. */
4356 if (CONVERT_EXPR_P (exp)
4357 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4358 outer_type = TREE_TYPE (exp);
4361 if (TREE_CODE (exp) == BIT_AND_EXPR)
4363 and_mask = TREE_OPERAND (exp, 1);
4364 exp = TREE_OPERAND (exp, 0);
4365 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4366 if (TREE_CODE (and_mask) != INTEGER_CST)
4370 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4371 punsignedp, pvolatilep, false);
4372 if ((inner == exp && and_mask == 0)
4373 || *pbitsize < 0 || offset != 0
4374 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4377 /* If the number of bits in the reference is the same as the bitsize of
4378 the outer type, then the outer type gives the signedness. Otherwise
4379 (in case of a small bitfield) the signedness is unchanged. */
4380 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4381 *punsignedp = TYPE_UNSIGNED (outer_type);
4383 /* Compute the mask to access the bitfield. */
4384 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4385 precision = TYPE_PRECISION (unsigned_type);
4387 mask = build_int_cst_type (unsigned_type, -1);
4389 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4390 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4392 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4394 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4395 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4398 *pand_mask = and_mask;
4402 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4406 all_ones_mask_p (const_tree mask, int size)
4408 tree type = TREE_TYPE (mask);
4409 unsigned int precision = TYPE_PRECISION (type);
4412 tmask = build_int_cst_type (signed_type_for (type), -1);
4415 tree_int_cst_equal (mask,
4416 const_binop (RSHIFT_EXPR,
4417 const_binop (LSHIFT_EXPR, tmask,
4418 size_int (precision - size),
4420 size_int (precision - size), 0));
4423 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4424 represents the sign bit of EXP's type. If EXP represents a sign
4425 or zero extension, also test VAL against the unextended type.
4426 The return value is the (sub)expression whose sign bit is VAL,
4427 or NULL_TREE otherwise. */
4430 sign_bit_p (tree exp, const_tree val)
4432 unsigned HOST_WIDE_INT mask_lo, lo;
4433 HOST_WIDE_INT mask_hi, hi;
4437 /* Tree EXP must have an integral type. */
4438 t = TREE_TYPE (exp);
4439 if (! INTEGRAL_TYPE_P (t))
4442 /* Tree VAL must be an integer constant. */
4443 if (TREE_CODE (val) != INTEGER_CST
4444 || TREE_OVERFLOW (val))
4447 width = TYPE_PRECISION (t);
4448 if (width > HOST_BITS_PER_WIDE_INT)
4450 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4453 mask_hi = ((unsigned HOST_WIDE_INT) -1
4454 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4460 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4463 mask_lo = ((unsigned HOST_WIDE_INT) -1
4464 >> (HOST_BITS_PER_WIDE_INT - width));
4467 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4468 treat VAL as if it were unsigned. */
4469 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4470 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4473 /* Handle extension from a narrower type. */
4474 if (TREE_CODE (exp) == NOP_EXPR
4475 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4476 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4481 /* Subroutine for fold_truthop: determine if an operand is simple enough
4482 to be evaluated unconditionally. */
4485 simple_operand_p (const_tree exp)
4487 /* Strip any conversions that don't change the machine mode. */
4490 return (CONSTANT_CLASS_P (exp)
4491 || TREE_CODE (exp) == SSA_NAME
4493 && ! TREE_ADDRESSABLE (exp)
4494 && ! TREE_THIS_VOLATILE (exp)
4495 && ! DECL_NONLOCAL (exp)
4496 /* Don't regard global variables as simple. They may be
4497 allocated in ways unknown to the compiler (shared memory,
4498 #pragma weak, etc). */
4499 && ! TREE_PUBLIC (exp)
4500 && ! DECL_EXTERNAL (exp)
4501 /* Loading a static variable is unduly expensive, but global
4502 registers aren't expensive. */
4503 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4506 /* The following functions are subroutines to fold_range_test and allow it to
4507 try to change a logical combination of comparisons into a range test.
4510 X == 2 || X == 3 || X == 4 || X == 5
4514 (unsigned) (X - 2) <= 3
4516 We describe each set of comparisons as being either inside or outside
4517 a range, using a variable named like IN_P, and then describe the
4518 range with a lower and upper bound. If one of the bounds is omitted,
4519 it represents either the highest or lowest value of the type.
4521 In the comments below, we represent a range by two numbers in brackets
4522 preceded by a "+" to designate being inside that range, or a "-" to
4523 designate being outside that range, so the condition can be inverted by
4524 flipping the prefix. An omitted bound is represented by a "-". For
4525 example, "- [-, 10]" means being outside the range starting at the lowest
4526 possible value and ending at 10, in other words, being greater than 10.
4527 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4530 We set up things so that the missing bounds are handled in a consistent
4531 manner so neither a missing bound nor "true" and "false" need to be
4532 handled using a special case. */
4534 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4535 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4536 and UPPER1_P are nonzero if the respective argument is an upper bound
4537 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4538 must be specified for a comparison. ARG1 will be converted to ARG0's
4539 type if both are specified. */
4542 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4543 tree arg1, int upper1_p)
4549 /* If neither arg represents infinity, do the normal operation.
4550 Else, if not a comparison, return infinity. Else handle the special
4551 comparison rules. Note that most of the cases below won't occur, but
4552 are handled for consistency. */
4554 if (arg0 != 0 && arg1 != 0)
4556 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4557 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4559 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4562 if (TREE_CODE_CLASS (code) != tcc_comparison)
4565 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4566 for neither. In real maths, we cannot assume open ended ranges are
4567 the same. But, this is computer arithmetic, where numbers are finite.
4568 We can therefore make the transformation of any unbounded range with
4569 the value Z, Z being greater than any representable number. This permits
4570 us to treat unbounded ranges as equal. */
4571 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4572 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4576 result = sgn0 == sgn1;
4579 result = sgn0 != sgn1;
4582 result = sgn0 < sgn1;
4585 result = sgn0 <= sgn1;
4588 result = sgn0 > sgn1;
4591 result = sgn0 >= sgn1;
4597 return constant_boolean_node (result, type);
4600 /* Given EXP, a logical expression, set the range it is testing into
4601 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4602 actually being tested. *PLOW and *PHIGH will be made of the same
4603 type as the returned expression. If EXP is not a comparison, we
4604 will most likely not be returning a useful value and range. Set
4605 *STRICT_OVERFLOW_P to true if the return value is only valid
4606 because signed overflow is undefined; otherwise, do not change
4607 *STRICT_OVERFLOW_P. */
4610 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4611 bool *strict_overflow_p)
4613 enum tree_code code;
4614 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4615 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4617 tree low, high, n_low, n_high;
4618 location_t loc = EXPR_LOCATION (exp);
4620 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4621 and see if we can refine the range. Some of the cases below may not
4622 happen, but it doesn't seem worth worrying about this. We "continue"
4623 the outer loop when we've changed something; otherwise we "break"
4624 the switch, which will "break" the while. */
4627 low = high = build_int_cst (TREE_TYPE (exp), 0);
4631 code = TREE_CODE (exp);
4632 exp_type = TREE_TYPE (exp);
4634 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4636 if (TREE_OPERAND_LENGTH (exp) > 0)
4637 arg0 = TREE_OPERAND (exp, 0);
4638 if (TREE_CODE_CLASS (code) == tcc_comparison
4639 || TREE_CODE_CLASS (code) == tcc_unary
4640 || TREE_CODE_CLASS (code) == tcc_binary)
4641 arg0_type = TREE_TYPE (arg0);
4642 if (TREE_CODE_CLASS (code) == tcc_binary
4643 || TREE_CODE_CLASS (code) == tcc_comparison
4644 || (TREE_CODE_CLASS (code) == tcc_expression
4645 && TREE_OPERAND_LENGTH (exp) > 1))
4646 arg1 = TREE_OPERAND (exp, 1);
4651 case TRUTH_NOT_EXPR:
4652 in_p = ! in_p, exp = arg0;
4655 case EQ_EXPR: case NE_EXPR:
4656 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4657 /* We can only do something if the range is testing for zero
4658 and if the second operand is an integer constant. Note that
4659 saying something is "in" the range we make is done by
4660 complementing IN_P since it will set in the initial case of
4661 being not equal to zero; "out" is leaving it alone. */
4662 if (low == 0 || high == 0
4663 || ! integer_zerop (low) || ! integer_zerop (high)
4664 || TREE_CODE (arg1) != INTEGER_CST)
4669 case NE_EXPR: /* - [c, c] */
4672 case EQ_EXPR: /* + [c, c] */
4673 in_p = ! in_p, low = high = arg1;
4675 case GT_EXPR: /* - [-, c] */
4676 low = 0, high = arg1;
4678 case GE_EXPR: /* + [c, -] */
4679 in_p = ! in_p, low = arg1, high = 0;
4681 case LT_EXPR: /* - [c, -] */
4682 low = arg1, high = 0;
4684 case LE_EXPR: /* + [-, c] */
4685 in_p = ! in_p, low = 0, high = arg1;
4691 /* If this is an unsigned comparison, we also know that EXP is
4692 greater than or equal to zero. We base the range tests we make
4693 on that fact, so we record it here so we can parse existing
4694 range tests. We test arg0_type since often the return type
4695 of, e.g. EQ_EXPR, is boolean. */
4696 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4698 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4700 build_int_cst (arg0_type, 0),
4704 in_p = n_in_p, low = n_low, high = n_high;
4706 /* If the high bound is missing, but we have a nonzero low
4707 bound, reverse the range so it goes from zero to the low bound
4709 if (high == 0 && low && ! integer_zerop (low))
4712 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4713 integer_one_node, 0);
4714 low = build_int_cst (arg0_type, 0);
4722 /* (-x) IN [a,b] -> x in [-b, -a] */
4723 n_low = range_binop (MINUS_EXPR, exp_type,
4724 build_int_cst (exp_type, 0),
4726 n_high = range_binop (MINUS_EXPR, exp_type,
4727 build_int_cst (exp_type, 0),
4729 low = n_low, high = n_high;
4735 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4736 build_int_cst (exp_type, 1));
4737 SET_EXPR_LOCATION (exp, loc);
4740 case PLUS_EXPR: case MINUS_EXPR:
4741 if (TREE_CODE (arg1) != INTEGER_CST)
4744 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4745 move a constant to the other side. */
4746 if (!TYPE_UNSIGNED (arg0_type)
4747 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4750 /* If EXP is signed, any overflow in the computation is undefined,
4751 so we don't worry about it so long as our computations on
4752 the bounds don't overflow. For unsigned, overflow is defined
4753 and this is exactly the right thing. */
4754 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4755 arg0_type, low, 0, arg1, 0);
4756 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4757 arg0_type, high, 1, arg1, 0);
4758 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4759 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4762 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4763 *strict_overflow_p = true;
4765 /* Check for an unsigned range which has wrapped around the maximum
4766 value thus making n_high < n_low, and normalize it. */
4767 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4769 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4770 integer_one_node, 0);
4771 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4772 integer_one_node, 0);
4774 /* If the range is of the form +/- [ x+1, x ], we won't
4775 be able to normalize it. But then, it represents the
4776 whole range or the empty set, so make it
4778 if (tree_int_cst_equal (n_low, low)
4779 && tree_int_cst_equal (n_high, high))
4785 low = n_low, high = n_high;
4790 CASE_CONVERT: case NON_LVALUE_EXPR:
4791 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4794 if (! INTEGRAL_TYPE_P (arg0_type)
4795 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4796 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4799 n_low = low, n_high = high;
4802 n_low = fold_convert_loc (loc, arg0_type, n_low);
4805 n_high = fold_convert_loc (loc, arg0_type, n_high);
4808 /* If we're converting arg0 from an unsigned type, to exp,
4809 a signed type, we will be doing the comparison as unsigned.
4810 The tests above have already verified that LOW and HIGH
4813 So we have to ensure that we will handle large unsigned
4814 values the same way that the current signed bounds treat
4817 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4821 /* For fixed-point modes, we need to pass the saturating flag
4822 as the 2nd parameter. */
4823 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4824 equiv_type = lang_hooks.types.type_for_mode
4825 (TYPE_MODE (arg0_type),
4826 TYPE_SATURATING (arg0_type));
4828 equiv_type = lang_hooks.types.type_for_mode
4829 (TYPE_MODE (arg0_type), 1);
4831 /* A range without an upper bound is, naturally, unbounded.
4832 Since convert would have cropped a very large value, use
4833 the max value for the destination type. */
4835 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4836 : TYPE_MAX_VALUE (arg0_type);
4838 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4839 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4840 fold_convert_loc (loc, arg0_type,
4842 build_int_cst (arg0_type, 1));
4844 /* If the low bound is specified, "and" the range with the
4845 range for which the original unsigned value will be
4849 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4850 1, n_low, n_high, 1,
4851 fold_convert_loc (loc, arg0_type,
4856 in_p = (n_in_p == in_p);
4860 /* Otherwise, "or" the range with the range of the input
4861 that will be interpreted as negative. */
4862 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4863 0, n_low, n_high, 1,
4864 fold_convert_loc (loc, arg0_type,
4869 in_p = (in_p != n_in_p);
4874 low = n_low, high = n_high;
4884 /* If EXP is a constant, we can evaluate whether this is true or false. */
4885 if (TREE_CODE (exp) == INTEGER_CST)
4887 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4889 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4895 *pin_p = in_p, *plow = low, *phigh = high;
4899 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4900 type, TYPE, return an expression to test if EXP is in (or out of, depending
4901 on IN_P) the range. Return 0 if the test couldn't be created. */
4904 build_range_check (location_t loc, tree type, tree exp, int in_p,
4905 tree low, tree high)
4907 tree etype = TREE_TYPE (exp), value;
4909 #ifdef HAVE_canonicalize_funcptr_for_compare
4910 /* Disable this optimization for function pointer expressions
4911 on targets that require function pointer canonicalization. */
4912 if (HAVE_canonicalize_funcptr_for_compare
4913 && TREE_CODE (etype) == POINTER_TYPE
4914 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4920 value = build_range_check (loc, type, exp, 1, low, high);
4922 return invert_truthvalue_loc (loc, value);
4927 if (low == 0 && high == 0)
4928 return build_int_cst (type, 1);
4931 return fold_build2_loc (loc, LE_EXPR, type, exp,
4932 fold_convert_loc (loc, etype, high));
4935 return fold_build2_loc (loc, GE_EXPR, type, exp,
4936 fold_convert_loc (loc, etype, low));
4938 if (operand_equal_p (low, high, 0))
4939 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4940 fold_convert_loc (loc, etype, low));
4942 if (integer_zerop (low))
4944 if (! TYPE_UNSIGNED (etype))
4946 etype = unsigned_type_for (etype);
4947 high = fold_convert_loc (loc, etype, high);
4948 exp = fold_convert_loc (loc, etype, exp);
4950 return build_range_check (loc, type, exp, 1, 0, high);
4953 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4954 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4956 unsigned HOST_WIDE_INT lo;
4960 prec = TYPE_PRECISION (etype);
4961 if (prec <= HOST_BITS_PER_WIDE_INT)
4964 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4968 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4969 lo = (unsigned HOST_WIDE_INT) -1;
4972 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4974 if (TYPE_UNSIGNED (etype))
4976 tree signed_etype = signed_type_for (etype);
4977 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4979 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4981 etype = signed_etype;
4982 exp = fold_convert_loc (loc, etype, exp);
4984 return fold_build2_loc (loc, GT_EXPR, type, exp,
4985 build_int_cst (etype, 0));
4989 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4990 This requires wrap-around arithmetics for the type of the expression.
4991 First make sure that arithmetics in this type is valid, then make sure
4992 that it wraps around. */
4993 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4994 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4995 TYPE_UNSIGNED (etype));
4997 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4999 tree utype, minv, maxv;
5001 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5002 for the type in question, as we rely on this here. */
5003 utype = unsigned_type_for (etype);
5004 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5005 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5006 integer_one_node, 1);
5007 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5009 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5016 high = fold_convert_loc (loc, etype, high);
5017 low = fold_convert_loc (loc, etype, low);
5018 exp = fold_convert_loc (loc, etype, exp);
5020 value = const_binop (MINUS_EXPR, high, low, 0);
5023 if (POINTER_TYPE_P (etype))
5025 if (value != 0 && !TREE_OVERFLOW (value))
5027 low = fold_convert_loc (loc, sizetype, low);
5028 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5029 return build_range_check (loc, type,
5030 fold_build2_loc (loc, POINTER_PLUS_EXPR,
5032 1, build_int_cst (etype, 0), value);
5037 if (value != 0 && !TREE_OVERFLOW (value))
5038 return build_range_check (loc, type,
5039 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5040 1, build_int_cst (etype, 0), value);
5045 /* Return the predecessor of VAL in its type, handling the infinite case. */
5048 range_predecessor (tree val)
5050 tree type = TREE_TYPE (val);
5052 if (INTEGRAL_TYPE_P (type)
5053 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5056 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5059 /* Return the successor of VAL in its type, handling the infinite case. */
5062 range_successor (tree val)
5064 tree type = TREE_TYPE (val);
5066 if (INTEGRAL_TYPE_P (type)
5067 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5070 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5073 /* Given two ranges, see if we can merge them into one. Return 1 if we
5074 can, 0 if we can't. Set the output range into the specified parameters. */
5077 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5078 tree high0, int in1_p, tree low1, tree high1)
5086 int lowequal = ((low0 == 0 && low1 == 0)
5087 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5088 low0, 0, low1, 0)));
5089 int highequal = ((high0 == 0 && high1 == 0)
5090 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5091 high0, 1, high1, 1)));
5093 /* Make range 0 be the range that starts first, or ends last if they
5094 start at the same value. Swap them if it isn't. */
5095 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5098 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5099 high1, 1, high0, 1))))
5101 temp = in0_p, in0_p = in1_p, in1_p = temp;
5102 tem = low0, low0 = low1, low1 = tem;
5103 tem = high0, high0 = high1, high1 = tem;
5106 /* Now flag two cases, whether the ranges are disjoint or whether the
5107 second range is totally subsumed in the first. Note that the tests
5108 below are simplified by the ones above. */
5109 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5110 high0, 1, low1, 0));
5111 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5112 high1, 1, high0, 1));
5114 /* We now have four cases, depending on whether we are including or
5115 excluding the two ranges. */
5118 /* If they don't overlap, the result is false. If the second range
5119 is a subset it is the result. Otherwise, the range is from the start
5120 of the second to the end of the first. */
5122 in_p = 0, low = high = 0;
5124 in_p = 1, low = low1, high = high1;
5126 in_p = 1, low = low1, high = high0;
5129 else if (in0_p && ! in1_p)
5131 /* If they don't overlap, the result is the first range. If they are
5132 equal, the result is false. If the second range is a subset of the
5133 first, and the ranges begin at the same place, we go from just after
5134 the end of the second range to the end of the first. If the second
5135 range is not a subset of the first, or if it is a subset and both
5136 ranges end at the same place, the range starts at the start of the
5137 first range and ends just before the second range.
5138 Otherwise, we can't describe this as a single range. */
5140 in_p = 1, low = low0, high = high0;
5141 else if (lowequal && highequal)
5142 in_p = 0, low = high = 0;
5143 else if (subset && lowequal)
5145 low = range_successor (high1);
5150 /* We are in the weird situation where high0 > high1 but
5151 high1 has no successor. Punt. */
5155 else if (! subset || highequal)
5158 high = range_predecessor (low1);
5162 /* low0 < low1 but low1 has no predecessor. Punt. */
5170 else if (! in0_p && in1_p)
5172 /* If they don't overlap, the result is the second range. If the second
5173 is a subset of the first, the result is false. Otherwise,
5174 the range starts just after the first range and ends at the
5175 end of the second. */
5177 in_p = 1, low = low1, high = high1;
5178 else if (subset || highequal)
5179 in_p = 0, low = high = 0;
5182 low = range_successor (high0);
5187 /* high1 > high0 but high0 has no successor. Punt. */
5195 /* The case where we are excluding both ranges. Here the complex case
5196 is if they don't overlap. In that case, the only time we have a
5197 range is if they are adjacent. If the second is a subset of the
5198 first, the result is the first. Otherwise, the range to exclude
5199 starts at the beginning of the first range and ends at the end of the
5203 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5204 range_successor (high0),
5206 in_p = 0, low = low0, high = high1;
5209 /* Canonicalize - [min, x] into - [-, x]. */
5210 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5211 switch (TREE_CODE (TREE_TYPE (low0)))
5214 if (TYPE_PRECISION (TREE_TYPE (low0))
5215 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5219 if (tree_int_cst_equal (low0,
5220 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5224 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5225 && integer_zerop (low0))
5232 /* Canonicalize - [x, max] into - [x, -]. */
5233 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5234 switch (TREE_CODE (TREE_TYPE (high1)))
5237 if (TYPE_PRECISION (TREE_TYPE (high1))
5238 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5242 if (tree_int_cst_equal (high1,
5243 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5247 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5248 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5250 integer_one_node, 1)))
5257 /* The ranges might be also adjacent between the maximum and
5258 minimum values of the given type. For
5259 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5260 return + [x + 1, y - 1]. */
5261 if (low0 == 0 && high1 == 0)
5263 low = range_successor (high0);
5264 high = range_predecessor (low1);
5265 if (low == 0 || high == 0)
5275 in_p = 0, low = low0, high = high0;
5277 in_p = 0, low = low0, high = high1;
5280 *pin_p = in_p, *plow = low, *phigh = high;
5285 /* Subroutine of fold, looking inside expressions of the form
5286 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5287 of the COND_EXPR. This function is being used also to optimize
5288 A op B ? C : A, by reversing the comparison first.
5290 Return a folded expression whose code is not a COND_EXPR
5291 anymore, or NULL_TREE if no folding opportunity is found. */
5294 fold_cond_expr_with_comparison (location_t loc, tree type,
5295 tree arg0, tree arg1, tree arg2)
5297 enum tree_code comp_code = TREE_CODE (arg0);
5298 tree arg00 = TREE_OPERAND (arg0, 0);
5299 tree arg01 = TREE_OPERAND (arg0, 1);
5300 tree arg1_type = TREE_TYPE (arg1);
5306 /* If we have A op 0 ? A : -A, consider applying the following
5309 A == 0? A : -A same as -A
5310 A != 0? A : -A same as A
5311 A >= 0? A : -A same as abs (A)
5312 A > 0? A : -A same as abs (A)
5313 A <= 0? A : -A same as -abs (A)
5314 A < 0? A : -A same as -abs (A)
5316 None of these transformations work for modes with signed
5317 zeros. If A is +/-0, the first two transformations will
5318 change the sign of the result (from +0 to -0, or vice
5319 versa). The last four will fix the sign of the result,
5320 even though the original expressions could be positive or
5321 negative, depending on the sign of A.
5323 Note that all these transformations are correct if A is
5324 NaN, since the two alternatives (A and -A) are also NaNs. */
5325 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5326 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5327 ? real_zerop (arg01)
5328 : integer_zerop (arg01))
5329 && ((TREE_CODE (arg2) == NEGATE_EXPR
5330 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5331 /* In the case that A is of the form X-Y, '-A' (arg2) may
5332 have already been folded to Y-X, check for that. */
5333 || (TREE_CODE (arg1) == MINUS_EXPR
5334 && TREE_CODE (arg2) == MINUS_EXPR
5335 && operand_equal_p (TREE_OPERAND (arg1, 0),
5336 TREE_OPERAND (arg2, 1), 0)
5337 && operand_equal_p (TREE_OPERAND (arg1, 1),
5338 TREE_OPERAND (arg2, 0), 0))))
5343 tem = fold_convert_loc (loc, arg1_type, arg1);
5344 return pedantic_non_lvalue_loc (loc,
5345 fold_convert_loc (loc, type,
5346 negate_expr (tem)));
5349 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5352 if (flag_trapping_math)
5357 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5358 arg1 = fold_convert_loc (loc, signed_type_for
5359 (TREE_TYPE (arg1)), arg1);
5360 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5361 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5364 if (flag_trapping_math)
5368 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5369 arg1 = fold_convert_loc (loc, signed_type_for
5370 (TREE_TYPE (arg1)), arg1);
5371 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5372 return negate_expr (fold_convert_loc (loc, type, tem));
5374 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5378 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5379 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5380 both transformations are correct when A is NaN: A != 0
5381 is then true, and A == 0 is false. */
5383 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5384 && integer_zerop (arg01) && integer_zerop (arg2))
5386 if (comp_code == NE_EXPR)
5387 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5388 else if (comp_code == EQ_EXPR)
5389 return build_int_cst (type, 0);
5392 /* Try some transformations of A op B ? A : B.
5394 A == B? A : B same as B
5395 A != B? A : B same as A
5396 A >= B? A : B same as max (A, B)
5397 A > B? A : B same as max (B, A)
5398 A <= B? A : B same as min (A, B)
5399 A < B? A : B same as min (B, A)
5401 As above, these transformations don't work in the presence
5402 of signed zeros. For example, if A and B are zeros of
5403 opposite sign, the first two transformations will change
5404 the sign of the result. In the last four, the original
5405 expressions give different results for (A=+0, B=-0) and
5406 (A=-0, B=+0), but the transformed expressions do not.
5408 The first two transformations are correct if either A or B
5409 is a NaN. In the first transformation, the condition will
5410 be false, and B will indeed be chosen. In the case of the
5411 second transformation, the condition A != B will be true,
5412 and A will be chosen.
5414 The conversions to max() and min() are not correct if B is
5415 a number and A is not. The conditions in the original
5416 expressions will be false, so all four give B. The min()
5417 and max() versions would give a NaN instead. */
5418 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5419 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5420 /* Avoid these transformations if the COND_EXPR may be used
5421 as an lvalue in the C++ front-end. PR c++/19199. */
5423 || (strcmp (lang_hooks.name, "GNU C++") != 0
5424 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5425 || ! maybe_lvalue_p (arg1)
5426 || ! maybe_lvalue_p (arg2)))
5428 tree comp_op0 = arg00;
5429 tree comp_op1 = arg01;
5430 tree comp_type = TREE_TYPE (comp_op0);
5432 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5433 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5443 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5445 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5450 /* In C++ a ?: expression can be an lvalue, so put the
5451 operand which will be used if they are equal first
5452 so that we can convert this back to the
5453 corresponding COND_EXPR. */
5454 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5456 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5457 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5458 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5459 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5460 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5461 comp_op1, comp_op0);
5462 return pedantic_non_lvalue_loc (loc,
5463 fold_convert_loc (loc, type, tem));
5470 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5472 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5473 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5474 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5475 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5476 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5477 comp_op1, comp_op0);
5478 return pedantic_non_lvalue_loc (loc,
5479 fold_convert_loc (loc, type, tem));
5483 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5484 return pedantic_non_lvalue_loc (loc,
5485 fold_convert_loc (loc, type, arg2));
5488 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5489 return pedantic_non_lvalue_loc (loc,
5490 fold_convert_loc (loc, type, arg1));
5493 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5498 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5499 we might still be able to simplify this. For example,
5500 if C1 is one less or one more than C2, this might have started
5501 out as a MIN or MAX and been transformed by this function.
5502 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5504 if (INTEGRAL_TYPE_P (type)
5505 && TREE_CODE (arg01) == INTEGER_CST
5506 && TREE_CODE (arg2) == INTEGER_CST)
5510 if (TREE_CODE (arg1) == INTEGER_CST)
5512 /* We can replace A with C1 in this case. */
5513 arg1 = fold_convert_loc (loc, type, arg01);
5514 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5517 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5518 MIN_EXPR, to preserve the signedness of the comparison. */
5519 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5521 && operand_equal_p (arg01,
5522 const_binop (PLUS_EXPR, arg2,
5523 build_int_cst (type, 1), 0),
5526 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5527 fold_convert_loc (loc, TREE_TYPE (arg00),
5529 return pedantic_non_lvalue_loc (loc,
5530 fold_convert_loc (loc, type, tem));
5535 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5537 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5539 && operand_equal_p (arg01,
5540 const_binop (MINUS_EXPR, arg2,
5541 build_int_cst (type, 1), 0),
5544 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5545 fold_convert_loc (loc, TREE_TYPE (arg00),
5547 return pedantic_non_lvalue_loc (loc,
5548 fold_convert_loc (loc, type, tem));
5553 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5554 MAX_EXPR, to preserve the signedness of the comparison. */
5555 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5557 && operand_equal_p (arg01,
5558 const_binop (MINUS_EXPR, arg2,
5559 build_int_cst (type, 1), 0),
5562 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5563 fold_convert_loc (loc, TREE_TYPE (arg00),
5565 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5570 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5571 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5573 && operand_equal_p (arg01,
5574 const_binop (PLUS_EXPR, arg2,
5575 build_int_cst (type, 1), 0),
5578 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5579 fold_convert_loc (loc, TREE_TYPE (arg00),
5581 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5595 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5596 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5597 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5601 /* EXP is some logical combination of boolean tests. See if we can
5602 merge it into some range test. Return the new tree if so. */
5605 fold_range_test (location_t loc, enum tree_code code, tree type,
5608 int or_op = (code == TRUTH_ORIF_EXPR
5609 || code == TRUTH_OR_EXPR);
5610 int in0_p, in1_p, in_p;
5611 tree low0, low1, low, high0, high1, high;
5612 bool strict_overflow_p = false;
5613 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5614 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5616 const char * const warnmsg = G_("assuming signed overflow does not occur "
5617 "when simplifying range test");
5619 /* If this is an OR operation, invert both sides; we will invert
5620 again at the end. */
5622 in0_p = ! in0_p, in1_p = ! in1_p;
5624 /* If both expressions are the same, if we can merge the ranges, and we
5625 can build the range test, return it or it inverted. If one of the
5626 ranges is always true or always false, consider it to be the same
5627 expression as the other. */
5628 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5629 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5631 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5633 : rhs != 0 ? rhs : integer_zero_node,
5636 if (strict_overflow_p)
5637 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5638 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5641 /* On machines where the branch cost is expensive, if this is a
5642 short-circuited branch and the underlying object on both sides
5643 is the same, make a non-short-circuit operation. */
5644 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5645 && lhs != 0 && rhs != 0
5646 && (code == TRUTH_ANDIF_EXPR
5647 || code == TRUTH_ORIF_EXPR)
5648 && operand_equal_p (lhs, rhs, 0))
5650 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5651 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5652 which cases we can't do this. */
5653 if (simple_operand_p (lhs))
5655 tem = build2 (code == TRUTH_ANDIF_EXPR
5656 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5658 SET_EXPR_LOCATION (tem, loc);
5662 else if (lang_hooks.decls.global_bindings_p () == 0
5663 && ! CONTAINS_PLACEHOLDER_P (lhs))
5665 tree common = save_expr (lhs);
5667 if (0 != (lhs = build_range_check (loc, type, common,
5668 or_op ? ! in0_p : in0_p,
5670 && (0 != (rhs = build_range_check (loc, type, common,
5671 or_op ? ! in1_p : in1_p,
5674 if (strict_overflow_p)
5675 fold_overflow_warning (warnmsg,
5676 WARN_STRICT_OVERFLOW_COMPARISON);
5677 tem = build2 (code == TRUTH_ANDIF_EXPR
5678 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5680 SET_EXPR_LOCATION (tem, loc);
5689 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5690 bit value. Arrange things so the extra bits will be set to zero if and
5691 only if C is signed-extended to its full width. If MASK is nonzero,
5692 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5695 unextend (tree c, int p, int unsignedp, tree mask)
5697 tree type = TREE_TYPE (c);
5698 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5701 if (p == modesize || unsignedp)
5704 /* We work by getting just the sign bit into the low-order bit, then
5705 into the high-order bit, then sign-extend. We then XOR that value
5707 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5708 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5710 /* We must use a signed type in order to get an arithmetic right shift.
5711 However, we must also avoid introducing accidental overflows, so that
5712 a subsequent call to integer_zerop will work. Hence we must
5713 do the type conversion here. At this point, the constant is either
5714 zero or one, and the conversion to a signed type can never overflow.
5715 We could get an overflow if this conversion is done anywhere else. */
5716 if (TYPE_UNSIGNED (type))
5717 temp = fold_convert (signed_type_for (type), temp);
5719 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5720 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5722 temp = const_binop (BIT_AND_EXPR, temp,
5723 fold_convert (TREE_TYPE (c), mask),
5725 /* If necessary, convert the type back to match the type of C. */
5726 if (TYPE_UNSIGNED (type))
5727 temp = fold_convert (type, temp);
5729 return fold_convert (type,
5730 const_binop (BIT_XOR_EXPR, c, temp, 0));
5733 /* Find ways of folding logical expressions of LHS and RHS:
5734 Try to merge two comparisons to the same innermost item.
5735 Look for range tests like "ch >= '0' && ch <= '9'".
5736 Look for combinations of simple terms on machines with expensive branches
5737 and evaluate the RHS unconditionally.
5739 For example, if we have p->a == 2 && p->b == 4 and we can make an
5740 object large enough to span both A and B, we can do this with a comparison
5741 against the object ANDed with the a mask.
5743 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5744 operations to do this with one comparison.
5746 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5747 function and the one above.
5749 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5750 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5752 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5755 We return the simplified tree or 0 if no optimization is possible. */
5758 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5761 /* If this is the "or" of two comparisons, we can do something if
5762 the comparisons are NE_EXPR. If this is the "and", we can do something
5763 if the comparisons are EQ_EXPR. I.e.,
5764 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5766 WANTED_CODE is this operation code. For single bit fields, we can
5767 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5768 comparison for one-bit fields. */
5770 enum tree_code wanted_code;
5771 enum tree_code lcode, rcode;
5772 tree ll_arg, lr_arg, rl_arg, rr_arg;
5773 tree ll_inner, lr_inner, rl_inner, rr_inner;
5774 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5775 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5776 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5777 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5778 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5779 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5780 enum machine_mode lnmode, rnmode;
5781 tree ll_mask, lr_mask, rl_mask, rr_mask;
5782 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5783 tree l_const, r_const;
5784 tree lntype, rntype, result;
5785 HOST_WIDE_INT first_bit, end_bit;
5787 tree orig_lhs = lhs, orig_rhs = rhs;
5788 enum tree_code orig_code = code;
5790 /* Start by getting the comparison codes. Fail if anything is volatile.
5791 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5792 it were surrounded with a NE_EXPR. */
5794 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5797 lcode = TREE_CODE (lhs);
5798 rcode = TREE_CODE (rhs);
5800 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5802 lhs = build2 (NE_EXPR, truth_type, lhs,
5803 build_int_cst (TREE_TYPE (lhs), 0));
5807 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5809 rhs = build2 (NE_EXPR, truth_type, rhs,
5810 build_int_cst (TREE_TYPE (rhs), 0));
5814 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5815 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5818 ll_arg = TREE_OPERAND (lhs, 0);
5819 lr_arg = TREE_OPERAND (lhs, 1);
5820 rl_arg = TREE_OPERAND (rhs, 0);
5821 rr_arg = TREE_OPERAND (rhs, 1);
5823 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5824 if (simple_operand_p (ll_arg)
5825 && simple_operand_p (lr_arg))
5828 if (operand_equal_p (ll_arg, rl_arg, 0)
5829 && operand_equal_p (lr_arg, rr_arg, 0))
5831 result = combine_comparisons (loc, code, lcode, rcode,
5832 truth_type, ll_arg, lr_arg);
5836 else if (operand_equal_p (ll_arg, rr_arg, 0)
5837 && operand_equal_p (lr_arg, rl_arg, 0))
5839 result = combine_comparisons (loc, code, lcode,
5840 swap_tree_comparison (rcode),
5841 truth_type, ll_arg, lr_arg);
5847 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5848 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5850 /* If the RHS can be evaluated unconditionally and its operands are
5851 simple, it wins to evaluate the RHS unconditionally on machines
5852 with expensive branches. In this case, this isn't a comparison
5853 that can be merged. Avoid doing this if the RHS is a floating-point
5854 comparison since those can trap. */
5856 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5858 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5859 && simple_operand_p (rl_arg)
5860 && simple_operand_p (rr_arg))
5862 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5863 if (code == TRUTH_OR_EXPR
5864 && lcode == NE_EXPR && integer_zerop (lr_arg)
5865 && rcode == NE_EXPR && integer_zerop (rr_arg)
5866 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5867 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5869 result = build2 (NE_EXPR, truth_type,
5870 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5872 build_int_cst (TREE_TYPE (ll_arg), 0));
5873 goto fold_truthop_exit;
5876 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5877 if (code == TRUTH_AND_EXPR
5878 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5879 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5880 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5881 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5883 result = build2 (EQ_EXPR, truth_type,
5884 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5886 build_int_cst (TREE_TYPE (ll_arg), 0));
5887 goto fold_truthop_exit;
5890 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5892 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5894 result = build2 (code, truth_type, lhs, rhs);
5895 goto fold_truthop_exit;
5901 /* See if the comparisons can be merged. Then get all the parameters for
5904 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5905 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5909 ll_inner = decode_field_reference (loc, ll_arg,
5910 &ll_bitsize, &ll_bitpos, &ll_mode,
5911 &ll_unsignedp, &volatilep, &ll_mask,
5913 lr_inner = decode_field_reference (loc, lr_arg,
5914 &lr_bitsize, &lr_bitpos, &lr_mode,
5915 &lr_unsignedp, &volatilep, &lr_mask,
5917 rl_inner = decode_field_reference (loc, rl_arg,
5918 &rl_bitsize, &rl_bitpos, &rl_mode,
5919 &rl_unsignedp, &volatilep, &rl_mask,
5921 rr_inner = decode_field_reference (loc, rr_arg,
5922 &rr_bitsize, &rr_bitpos, &rr_mode,
5923 &rr_unsignedp, &volatilep, &rr_mask,
5926 /* It must be true that the inner operation on the lhs of each
5927 comparison must be the same if we are to be able to do anything.
5928 Then see if we have constants. If not, the same must be true for
5930 if (volatilep || ll_inner == 0 || rl_inner == 0
5931 || ! operand_equal_p (ll_inner, rl_inner, 0))
5934 if (TREE_CODE (lr_arg) == INTEGER_CST
5935 && TREE_CODE (rr_arg) == INTEGER_CST)
5936 l_const = lr_arg, r_const = rr_arg;
5937 else if (lr_inner == 0 || rr_inner == 0
5938 || ! operand_equal_p (lr_inner, rr_inner, 0))
5941 l_const = r_const = 0;
5943 /* If either comparison code is not correct for our logical operation,
5944 fail. However, we can convert a one-bit comparison against zero into
5945 the opposite comparison against that bit being set in the field. */
5947 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5948 if (lcode != wanted_code)
5950 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5952 /* Make the left operand unsigned, since we are only interested
5953 in the value of one bit. Otherwise we are doing the wrong
5962 /* This is analogous to the code for l_const above. */
5963 if (rcode != wanted_code)
5965 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5974 /* See if we can find a mode that contains both fields being compared on
5975 the left. If we can't, fail. Otherwise, update all constants and masks
5976 to be relative to a field of that size. */
5977 first_bit = MIN (ll_bitpos, rl_bitpos);
5978 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5979 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5980 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5982 if (lnmode == VOIDmode)
5985 lnbitsize = GET_MODE_BITSIZE (lnmode);
5986 lnbitpos = first_bit & ~ (lnbitsize - 1);
5987 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5988 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5990 if (BYTES_BIG_ENDIAN)
5992 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5993 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5996 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5997 size_int (xll_bitpos), 0);
5998 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5999 size_int (xrl_bitpos), 0);
6003 l_const = fold_convert_loc (loc, lntype, l_const);
6004 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6005 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6006 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6007 fold_build1_loc (loc, BIT_NOT_EXPR,
6011 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6013 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6018 r_const = fold_convert_loc (loc, lntype, r_const);
6019 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6020 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6021 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6022 fold_build1_loc (loc, BIT_NOT_EXPR,
6026 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6028 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6032 /* If the right sides are not constant, do the same for it. Also,
6033 disallow this optimization if a size or signedness mismatch occurs
6034 between the left and right sides. */
6037 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6038 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6039 /* Make sure the two fields on the right
6040 correspond to the left without being swapped. */
6041 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6044 first_bit = MIN (lr_bitpos, rr_bitpos);
6045 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6046 rnmode = get_best_mode (end_bit - first_bit, first_bit,
6047 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6049 if (rnmode == VOIDmode)
6052 rnbitsize = GET_MODE_BITSIZE (rnmode);
6053 rnbitpos = first_bit & ~ (rnbitsize - 1);
6054 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6055 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6057 if (BYTES_BIG_ENDIAN)
6059 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6060 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6063 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6065 size_int (xlr_bitpos), 0);
6066 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6068 size_int (xrr_bitpos), 0);
6070 /* Make a mask that corresponds to both fields being compared.
6071 Do this for both items being compared. If the operands are the
6072 same size and the bits being compared are in the same position
6073 then we can do this by masking both and comparing the masked
6075 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6076 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6077 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6079 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6080 ll_unsignedp || rl_unsignedp);
6081 if (! all_ones_mask_p (ll_mask, lnbitsize))
6082 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6084 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6085 lr_unsignedp || rr_unsignedp);
6086 if (! all_ones_mask_p (lr_mask, rnbitsize))
6087 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6089 result = build2 (wanted_code, truth_type, lhs, rhs);
6090 goto fold_truthop_exit;
6093 /* There is still another way we can do something: If both pairs of
6094 fields being compared are adjacent, we may be able to make a wider
6095 field containing them both.
6097 Note that we still must mask the lhs/rhs expressions. Furthermore,
6098 the mask must be shifted to account for the shift done by
6099 make_bit_field_ref. */
6100 if ((ll_bitsize + ll_bitpos == rl_bitpos
6101 && lr_bitsize + lr_bitpos == rr_bitpos)
6102 || (ll_bitpos == rl_bitpos + rl_bitsize
6103 && lr_bitpos == rr_bitpos + rr_bitsize))
6107 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6108 ll_bitsize + rl_bitsize,
6109 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6110 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6111 lr_bitsize + rr_bitsize,
6112 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6114 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6115 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6116 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6117 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6119 /* Convert to the smaller type before masking out unwanted bits. */
6121 if (lntype != rntype)
6123 if (lnbitsize > rnbitsize)
6125 lhs = fold_convert_loc (loc, rntype, lhs);
6126 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6129 else if (lnbitsize < rnbitsize)
6131 rhs = fold_convert_loc (loc, lntype, rhs);
6132 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6137 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6138 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6140 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6141 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6143 result = build2 (wanted_code, truth_type, lhs, rhs);
6144 goto fold_truthop_exit;
6150 /* Handle the case of comparisons with constants. If there is something in
6151 common between the masks, those bits of the constants must be the same.
6152 If not, the condition is always false. Test for this to avoid generating
6153 incorrect code below. */
6154 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6155 if (! integer_zerop (result)
6156 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6157 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6159 if (wanted_code == NE_EXPR)
6161 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6162 return constant_boolean_node (true, truth_type);
6166 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6167 return constant_boolean_node (false, truth_type);
6171 /* Construct the expression we will return. First get the component
6172 reference we will make. Unless the mask is all ones the width of
6173 that field, perform the mask operation. Then compare with the
6175 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6176 ll_unsignedp || rl_unsignedp);
6178 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6179 if (! all_ones_mask_p (ll_mask, lnbitsize))
6181 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6182 SET_EXPR_LOCATION (result, loc);
6185 result = build2 (wanted_code, truth_type, result,
6186 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6189 SET_EXPR_LOCATION (result, loc);
6193 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6197 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6201 enum tree_code op_code;
6204 int consts_equal, consts_lt;
6207 STRIP_SIGN_NOPS (arg0);
6209 op_code = TREE_CODE (arg0);
6210 minmax_const = TREE_OPERAND (arg0, 1);
6211 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6212 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6213 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6214 inner = TREE_OPERAND (arg0, 0);
6216 /* If something does not permit us to optimize, return the original tree. */
6217 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6218 || TREE_CODE (comp_const) != INTEGER_CST
6219 || TREE_OVERFLOW (comp_const)
6220 || TREE_CODE (minmax_const) != INTEGER_CST
6221 || TREE_OVERFLOW (minmax_const))
6224 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6225 and GT_EXPR, doing the rest with recursive calls using logical
6229 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6232 = optimize_minmax_comparison (loc,
6233 invert_tree_comparison (code, false),
6236 return invert_truthvalue_loc (loc, tem);
6242 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6243 optimize_minmax_comparison
6244 (loc, EQ_EXPR, type, arg0, comp_const),
6245 optimize_minmax_comparison
6246 (loc, GT_EXPR, type, arg0, comp_const));
6249 if (op_code == MAX_EXPR && consts_equal)
6250 /* MAX (X, 0) == 0 -> X <= 0 */
6251 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6253 else if (op_code == MAX_EXPR && consts_lt)
6254 /* MAX (X, 0) == 5 -> X == 5 */
6255 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6257 else if (op_code == MAX_EXPR)
6258 /* MAX (X, 0) == -1 -> false */
6259 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6261 else if (consts_equal)
6262 /* MIN (X, 0) == 0 -> X >= 0 */
6263 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6266 /* MIN (X, 0) == 5 -> false */
6267 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6270 /* MIN (X, 0) == -1 -> X == -1 */
6271 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6274 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6275 /* MAX (X, 0) > 0 -> X > 0
6276 MAX (X, 0) > 5 -> X > 5 */
6277 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6279 else if (op_code == MAX_EXPR)
6280 /* MAX (X, 0) > -1 -> true */
6281 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6283 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6284 /* MIN (X, 0) > 0 -> false
6285 MIN (X, 0) > 5 -> false */
6286 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6289 /* MIN (X, 0) > -1 -> X > -1 */
6290 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6297 /* T is an integer expression that is being multiplied, divided, or taken a
6298 modulus (CODE says which and what kind of divide or modulus) by a
6299 constant C. See if we can eliminate that operation by folding it with
6300 other operations already in T. WIDE_TYPE, if non-null, is a type that
6301 should be used for the computation if wider than our type.
6303 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6304 (X * 2) + (Y * 4). We must, however, be assured that either the original
6305 expression would not overflow or that overflow is undefined for the type
6306 in the language in question.
6308 If we return a non-null expression, it is an equivalent form of the
6309 original computation, but need not be in the original type.
6311 We set *STRICT_OVERFLOW_P to true if the return values depends on
6312 signed overflow being undefined. Otherwise we do not change
6313 *STRICT_OVERFLOW_P. */
6316 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6317 bool *strict_overflow_p)
6319 /* To avoid exponential search depth, refuse to allow recursion past
6320 three levels. Beyond that (1) it's highly unlikely that we'll find
6321 something interesting and (2) we've probably processed it before
6322 when we built the inner expression. */
6331 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6338 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6339 bool *strict_overflow_p)
6341 tree type = TREE_TYPE (t);
6342 enum tree_code tcode = TREE_CODE (t);
6343 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6344 > GET_MODE_SIZE (TYPE_MODE (type)))
6345 ? wide_type : type);
6347 int same_p = tcode == code;
6348 tree op0 = NULL_TREE, op1 = NULL_TREE;
6349 bool sub_strict_overflow_p;
6351 /* Don't deal with constants of zero here; they confuse the code below. */
6352 if (integer_zerop (c))
6355 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6356 op0 = TREE_OPERAND (t, 0);
6358 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6359 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6361 /* Note that we need not handle conditional operations here since fold
6362 already handles those cases. So just do arithmetic here. */
6366 /* For a constant, we can always simplify if we are a multiply
6367 or (for divide and modulus) if it is a multiple of our constant. */
6368 if (code == MULT_EXPR
6369 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6370 return const_binop (code, fold_convert (ctype, t),
6371 fold_convert (ctype, c), 0);
6374 CASE_CONVERT: case NON_LVALUE_EXPR:
6375 /* If op0 is an expression ... */
6376 if ((COMPARISON_CLASS_P (op0)
6377 || UNARY_CLASS_P (op0)
6378 || BINARY_CLASS_P (op0)
6379 || VL_EXP_CLASS_P (op0)
6380 || EXPRESSION_CLASS_P (op0))
6381 /* ... and has wrapping overflow, and its type is smaller
6382 than ctype, then we cannot pass through as widening. */
6383 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6384 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6385 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6386 && (TYPE_PRECISION (ctype)
6387 > TYPE_PRECISION (TREE_TYPE (op0))))
6388 /* ... or this is a truncation (t is narrower than op0),
6389 then we cannot pass through this narrowing. */
6390 || (TYPE_PRECISION (type)
6391 < TYPE_PRECISION (TREE_TYPE (op0)))
6392 /* ... or signedness changes for division or modulus,
6393 then we cannot pass through this conversion. */
6394 || (code != MULT_EXPR
6395 && (TYPE_UNSIGNED (ctype)
6396 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6397 /* ... or has undefined overflow while the converted to
6398 type has not, we cannot do the operation in the inner type
6399 as that would introduce undefined overflow. */
6400 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6401 && !TYPE_OVERFLOW_UNDEFINED (type))))
6404 /* Pass the constant down and see if we can make a simplification. If
6405 we can, replace this expression with the inner simplification for
6406 possible later conversion to our or some other type. */
6407 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6408 && TREE_CODE (t2) == INTEGER_CST
6409 && !TREE_OVERFLOW (t2)
6410 && (0 != (t1 = extract_muldiv (op0, t2, code,
6412 ? ctype : NULL_TREE,
6413 strict_overflow_p))))
6418 /* If widening the type changes it from signed to unsigned, then we
6419 must avoid building ABS_EXPR itself as unsigned. */
6420 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6422 tree cstype = (*signed_type_for) (ctype);
6423 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6426 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6427 return fold_convert (ctype, t1);
6431 /* If the constant is negative, we cannot simplify this. */
6432 if (tree_int_cst_sgn (c) == -1)
6436 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6438 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6441 case MIN_EXPR: case MAX_EXPR:
6442 /* If widening the type changes the signedness, then we can't perform
6443 this optimization as that changes the result. */
6444 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6447 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6448 sub_strict_overflow_p = false;
6449 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6450 &sub_strict_overflow_p)) != 0
6451 && (t2 = extract_muldiv (op1, c, code, wide_type,
6452 &sub_strict_overflow_p)) != 0)
6454 if (tree_int_cst_sgn (c) < 0)
6455 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6456 if (sub_strict_overflow_p)
6457 *strict_overflow_p = true;
6458 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6459 fold_convert (ctype, t2));
6463 case LSHIFT_EXPR: case RSHIFT_EXPR:
6464 /* If the second operand is constant, this is a multiplication
6465 or floor division, by a power of two, so we can treat it that
6466 way unless the multiplier or divisor overflows. Signed
6467 left-shift overflow is implementation-defined rather than
6468 undefined in C90, so do not convert signed left shift into
6470 if (TREE_CODE (op1) == INTEGER_CST
6471 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6472 /* const_binop may not detect overflow correctly,
6473 so check for it explicitly here. */
6474 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6475 && TREE_INT_CST_HIGH (op1) == 0
6476 && 0 != (t1 = fold_convert (ctype,
6477 const_binop (LSHIFT_EXPR,
6480 && !TREE_OVERFLOW (t1))
6481 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6482 ? MULT_EXPR : FLOOR_DIV_EXPR,
6484 fold_convert (ctype, op0),
6486 c, code, wide_type, strict_overflow_p);
6489 case PLUS_EXPR: case MINUS_EXPR:
6490 /* See if we can eliminate the operation on both sides. If we can, we
6491 can return a new PLUS or MINUS. If we can't, the only remaining
6492 cases where we can do anything are if the second operand is a
6494 sub_strict_overflow_p = false;
6495 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6496 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6497 if (t1 != 0 && t2 != 0
6498 && (code == MULT_EXPR
6499 /* If not multiplication, we can only do this if both operands
6500 are divisible by c. */
6501 || (multiple_of_p (ctype, op0, c)
6502 && multiple_of_p (ctype, op1, c))))
6504 if (sub_strict_overflow_p)
6505 *strict_overflow_p = true;
6506 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6507 fold_convert (ctype, t2));
6510 /* If this was a subtraction, negate OP1 and set it to be an addition.
6511 This simplifies the logic below. */
6512 if (tcode == MINUS_EXPR)
6514 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6515 /* If OP1 was not easily negatable, the constant may be OP0. */
6516 if (TREE_CODE (op0) == INTEGER_CST)
6527 if (TREE_CODE (op1) != INTEGER_CST)
6530 /* If either OP1 or C are negative, this optimization is not safe for
6531 some of the division and remainder types while for others we need
6532 to change the code. */
6533 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6535 if (code == CEIL_DIV_EXPR)
6536 code = FLOOR_DIV_EXPR;
6537 else if (code == FLOOR_DIV_EXPR)
6538 code = CEIL_DIV_EXPR;
6539 else if (code != MULT_EXPR
6540 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6544 /* If it's a multiply or a division/modulus operation of a multiple
6545 of our constant, do the operation and verify it doesn't overflow. */
6546 if (code == MULT_EXPR
6547 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6549 op1 = const_binop (code, fold_convert (ctype, op1),
6550 fold_convert (ctype, c), 0);
6551 /* We allow the constant to overflow with wrapping semantics. */
6553 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6559 /* If we have an unsigned type is not a sizetype, we cannot widen
6560 the operation since it will change the result if the original
6561 computation overflowed. */
6562 if (TYPE_UNSIGNED (ctype)
6563 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6567 /* If we were able to eliminate our operation from the first side,
6568 apply our operation to the second side and reform the PLUS. */
6569 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6570 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6572 /* The last case is if we are a multiply. In that case, we can
6573 apply the distributive law to commute the multiply and addition
6574 if the multiplication of the constants doesn't overflow. */
6575 if (code == MULT_EXPR)
6576 return fold_build2 (tcode, ctype,
6577 fold_build2 (code, ctype,
6578 fold_convert (ctype, op0),
6579 fold_convert (ctype, c)),
6585 /* We have a special case here if we are doing something like
6586 (C * 8) % 4 since we know that's zero. */
6587 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6588 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6589 /* If the multiplication can overflow we cannot optimize this.
6590 ??? Until we can properly mark individual operations as
6591 not overflowing we need to treat sizetype special here as
6592 stor-layout relies on this opimization to make
6593 DECL_FIELD_BIT_OFFSET always a constant. */
6594 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6595 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6596 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6597 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6598 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6600 *strict_overflow_p = true;
6601 return omit_one_operand (type, integer_zero_node, op0);
6604 /* ... fall through ... */
6606 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6607 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6608 /* If we can extract our operation from the LHS, do so and return a
6609 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6610 do something only if the second operand is a constant. */
6612 && (t1 = extract_muldiv (op0, c, code, wide_type,
6613 strict_overflow_p)) != 0)
6614 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6615 fold_convert (ctype, op1));
6616 else if (tcode == MULT_EXPR && code == MULT_EXPR
6617 && (t1 = extract_muldiv (op1, c, code, wide_type,
6618 strict_overflow_p)) != 0)
6619 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6620 fold_convert (ctype, t1));
6621 else if (TREE_CODE (op1) != INTEGER_CST)
6624 /* If these are the same operation types, we can associate them
6625 assuming no overflow. */
6627 && 0 != (t1 = int_const_binop (MULT_EXPR,
6628 fold_convert (ctype, op1),
6629 fold_convert (ctype, c), 1))
6630 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6631 TREE_INT_CST_HIGH (t1),
6632 (TYPE_UNSIGNED (ctype)
6633 && tcode != MULT_EXPR) ? -1 : 1,
6634 TREE_OVERFLOW (t1)))
6635 && !TREE_OVERFLOW (t1))
6636 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6638 /* If these operations "cancel" each other, we have the main
6639 optimizations of this pass, which occur when either constant is a
6640 multiple of the other, in which case we replace this with either an
6641 operation or CODE or TCODE.
6643 If we have an unsigned type that is not a sizetype, we cannot do
6644 this since it will change the result if the original computation
6646 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6647 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6648 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6649 || (tcode == MULT_EXPR
6650 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6651 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6652 && code != MULT_EXPR)))
6654 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6656 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6657 *strict_overflow_p = true;
6658 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6659 fold_convert (ctype,
6660 const_binop (TRUNC_DIV_EXPR,
6663 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6665 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6666 *strict_overflow_p = true;
6667 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6668 fold_convert (ctype,
6669 const_binop (TRUNC_DIV_EXPR,
6682 /* Return a node which has the indicated constant VALUE (either 0 or
6683 1), and is of the indicated TYPE. */
6686 constant_boolean_node (int value, tree type)
6688 if (type == integer_type_node)
6689 return value ? integer_one_node : integer_zero_node;
6690 else if (type == boolean_type_node)
6691 return value ? boolean_true_node : boolean_false_node;
6693 return build_int_cst (type, value);
6697 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6698 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6699 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6700 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6701 COND is the first argument to CODE; otherwise (as in the example
6702 given here), it is the second argument. TYPE is the type of the
6703 original expression. Return NULL_TREE if no simplification is
6707 fold_binary_op_with_conditional_arg (location_t loc,
6708 enum tree_code code,
6709 tree type, tree op0, tree op1,
6710 tree cond, tree arg, int cond_first_p)
6712 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6713 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6714 tree test, true_value, false_value;
6715 tree lhs = NULL_TREE;
6716 tree rhs = NULL_TREE;
6718 if (TREE_CODE (cond) == COND_EXPR)
6720 test = TREE_OPERAND (cond, 0);
6721 true_value = TREE_OPERAND (cond, 1);
6722 false_value = TREE_OPERAND (cond, 2);
6723 /* If this operand throws an expression, then it does not make
6724 sense to try to perform a logical or arithmetic operation
6726 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6728 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6733 tree testtype = TREE_TYPE (cond);
6735 true_value = constant_boolean_node (true, testtype);
6736 false_value = constant_boolean_node (false, testtype);
6739 /* This transformation is only worthwhile if we don't have to wrap ARG
6740 in a SAVE_EXPR and the operation can be simplified on at least one
6741 of the branches once its pushed inside the COND_EXPR. */
6742 if (!TREE_CONSTANT (arg)
6743 && (TREE_SIDE_EFFECTS (arg)
6744 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6747 arg = fold_convert_loc (loc, arg_type, arg);
6750 true_value = fold_convert_loc (loc, cond_type, true_value);
6752 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6754 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6758 false_value = fold_convert_loc (loc, cond_type, false_value);
6760 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6762 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6765 /* Check that we have simplified at least one of the branches. */
6766 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6769 return fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6773 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6775 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6776 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6777 ADDEND is the same as X.
6779 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6780 and finite. The problematic cases are when X is zero, and its mode
6781 has signed zeros. In the case of rounding towards -infinity,
6782 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6783 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6786 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6788 if (!real_zerop (addend))
6791 /* Don't allow the fold with -fsignaling-nans. */
6792 if (HONOR_SNANS (TYPE_MODE (type)))
6795 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6796 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6799 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6800 if (TREE_CODE (addend) == REAL_CST
6801 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6804 /* The mode has signed zeros, and we have to honor their sign.
6805 In this situation, there is only one case we can return true for.
6806 X - 0 is the same as X unless rounding towards -infinity is
6808 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6811 /* Subroutine of fold() that checks comparisons of built-in math
6812 functions against real constants.
6814 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6815 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6816 is the type of the result and ARG0 and ARG1 are the operands of the
6817 comparison. ARG1 must be a TREE_REAL_CST.
6819 The function returns the constant folded tree if a simplification
6820 can be made, and NULL_TREE otherwise. */
6823 fold_mathfn_compare (location_t loc,
6824 enum built_in_function fcode, enum tree_code code,
6825 tree type, tree arg0, tree arg1)
6829 if (BUILTIN_SQRT_P (fcode))
6831 tree arg = CALL_EXPR_ARG (arg0, 0);
6832 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6834 c = TREE_REAL_CST (arg1);
6835 if (REAL_VALUE_NEGATIVE (c))
6837 /* sqrt(x) < y is always false, if y is negative. */
6838 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6839 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6841 /* sqrt(x) > y is always true, if y is negative and we
6842 don't care about NaNs, i.e. negative values of x. */
6843 if (code == NE_EXPR || !HONOR_NANS (mode))
6844 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6846 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6847 return fold_build2_loc (loc, GE_EXPR, type, arg,
6848 build_real (TREE_TYPE (arg), dconst0));
6850 else if (code == GT_EXPR || code == GE_EXPR)
6854 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6855 real_convert (&c2, mode, &c2);
6857 if (REAL_VALUE_ISINF (c2))
6859 /* sqrt(x) > y is x == +Inf, when y is very large. */
6860 if (HONOR_INFINITIES (mode))
6861 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6862 build_real (TREE_TYPE (arg), c2));
6864 /* sqrt(x) > y is always false, when y is very large
6865 and we don't care about infinities. */
6866 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6869 /* sqrt(x) > c is the same as x > c*c. */
6870 return fold_build2_loc (loc, code, type, arg,
6871 build_real (TREE_TYPE (arg), c2));
6873 else if (code == LT_EXPR || code == LE_EXPR)
6877 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6878 real_convert (&c2, mode, &c2);
6880 if (REAL_VALUE_ISINF (c2))
6882 /* sqrt(x) < y is always true, when y is a very large
6883 value and we don't care about NaNs or Infinities. */
6884 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6885 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6887 /* sqrt(x) < y is x != +Inf when y is very large and we
6888 don't care about NaNs. */
6889 if (! HONOR_NANS (mode))
6890 return fold_build2_loc (loc, NE_EXPR, type, arg,
6891 build_real (TREE_TYPE (arg), c2));
6893 /* sqrt(x) < y is x >= 0 when y is very large and we
6894 don't care about Infinities. */
6895 if (! HONOR_INFINITIES (mode))
6896 return fold_build2_loc (loc, GE_EXPR, type, arg,
6897 build_real (TREE_TYPE (arg), dconst0));
6899 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6900 if (lang_hooks.decls.global_bindings_p () != 0
6901 || CONTAINS_PLACEHOLDER_P (arg))
6904 arg = save_expr (arg);
6905 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6906 fold_build2_loc (loc, GE_EXPR, type, arg,
6907 build_real (TREE_TYPE (arg),
6909 fold_build2_loc (loc, NE_EXPR, type, arg,
6910 build_real (TREE_TYPE (arg),
6914 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6915 if (! HONOR_NANS (mode))
6916 return fold_build2_loc (loc, code, type, arg,
6917 build_real (TREE_TYPE (arg), c2));
6919 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6920 if (lang_hooks.decls.global_bindings_p () == 0
6921 && ! CONTAINS_PLACEHOLDER_P (arg))
6923 arg = save_expr (arg);
6924 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6925 fold_build2_loc (loc, GE_EXPR, type, arg,
6926 build_real (TREE_TYPE (arg),
6928 fold_build2_loc (loc, code, type, arg,
6929 build_real (TREE_TYPE (arg),
6938 /* Subroutine of fold() that optimizes comparisons against Infinities,
6939 either +Inf or -Inf.
6941 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6942 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6943 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6945 The function returns the constant folded tree if a simplification
6946 can be made, and NULL_TREE otherwise. */
6949 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6950 tree arg0, tree arg1)
6952 enum machine_mode mode;
6953 REAL_VALUE_TYPE max;
6957 mode = TYPE_MODE (TREE_TYPE (arg0));
6959 /* For negative infinity swap the sense of the comparison. */
6960 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6962 code = swap_tree_comparison (code);
6967 /* x > +Inf is always false, if with ignore sNANs. */
6968 if (HONOR_SNANS (mode))
6970 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6973 /* x <= +Inf is always true, if we don't case about NaNs. */
6974 if (! HONOR_NANS (mode))
6975 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6977 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6978 if (lang_hooks.decls.global_bindings_p () == 0
6979 && ! CONTAINS_PLACEHOLDER_P (arg0))
6981 arg0 = save_expr (arg0);
6982 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6988 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6989 real_maxval (&max, neg, mode);
6990 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6991 arg0, build_real (TREE_TYPE (arg0), max));
6994 /* x < +Inf is always equal to x <= DBL_MAX. */
6995 real_maxval (&max, neg, mode);
6996 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6997 arg0, build_real (TREE_TYPE (arg0), max));
7000 /* x != +Inf is always equal to !(x > DBL_MAX). */
7001 real_maxval (&max, neg, mode);
7002 if (! HONOR_NANS (mode))
7003 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7004 arg0, build_real (TREE_TYPE (arg0), max));
7006 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7007 arg0, build_real (TREE_TYPE (arg0), max));
7008 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7017 /* Subroutine of fold() that optimizes comparisons of a division by
7018 a nonzero integer constant against an integer constant, i.e.
7021 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7022 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7023 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7025 The function returns the constant folded tree if a simplification
7026 can be made, and NULL_TREE otherwise. */
7029 fold_div_compare (location_t loc,
7030 enum tree_code code, tree type, tree arg0, tree arg1)
7032 tree prod, tmp, hi, lo;
7033 tree arg00 = TREE_OPERAND (arg0, 0);
7034 tree arg01 = TREE_OPERAND (arg0, 1);
7035 unsigned HOST_WIDE_INT lpart;
7036 HOST_WIDE_INT hpart;
7037 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7041 /* We have to do this the hard way to detect unsigned overflow.
7042 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7043 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7044 TREE_INT_CST_HIGH (arg01),
7045 TREE_INT_CST_LOW (arg1),
7046 TREE_INT_CST_HIGH (arg1),
7047 &lpart, &hpart, unsigned_p);
7048 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7050 neg_overflow = false;
7054 tmp = int_const_binop (MINUS_EXPR, arg01,
7055 build_int_cst (TREE_TYPE (arg01), 1), 0);
7058 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7059 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7060 TREE_INT_CST_HIGH (prod),
7061 TREE_INT_CST_LOW (tmp),
7062 TREE_INT_CST_HIGH (tmp),
7063 &lpart, &hpart, unsigned_p);
7064 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7065 -1, overflow | TREE_OVERFLOW (prod));
7067 else if (tree_int_cst_sgn (arg01) >= 0)
7069 tmp = int_const_binop (MINUS_EXPR, arg01,
7070 build_int_cst (TREE_TYPE (arg01), 1), 0);
7071 switch (tree_int_cst_sgn (arg1))
7074 neg_overflow = true;
7075 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7080 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7085 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7095 /* A negative divisor reverses the relational operators. */
7096 code = swap_tree_comparison (code);
7098 tmp = int_const_binop (PLUS_EXPR, arg01,
7099 build_int_cst (TREE_TYPE (arg01), 1), 0);
7100 switch (tree_int_cst_sgn (arg1))
7103 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7108 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7113 neg_overflow = true;
7114 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7126 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7127 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7128 if (TREE_OVERFLOW (hi))
7129 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7130 if (TREE_OVERFLOW (lo))
7131 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7132 return build_range_check (loc, type, arg00, 1, lo, hi);
7135 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7136 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7137 if (TREE_OVERFLOW (hi))
7138 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7139 if (TREE_OVERFLOW (lo))
7140 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7141 return build_range_check (loc, type, arg00, 0, lo, hi);
7144 if (TREE_OVERFLOW (lo))
7146 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7147 return omit_one_operand_loc (loc, type, tmp, arg00);
7149 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7152 if (TREE_OVERFLOW (hi))
7154 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7155 return omit_one_operand_loc (loc, type, tmp, arg00);
7157 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7160 if (TREE_OVERFLOW (hi))
7162 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7163 return omit_one_operand_loc (loc, type, tmp, arg00);
7165 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7168 if (TREE_OVERFLOW (lo))
7170 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7171 return omit_one_operand_loc (loc, type, tmp, arg00);
7173 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7183 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7184 equality/inequality test, then return a simplified form of the test
7185 using a sign testing. Otherwise return NULL. TYPE is the desired
7189 fold_single_bit_test_into_sign_test (location_t loc,
7190 enum tree_code code, tree arg0, tree arg1,
7193 /* If this is testing a single bit, we can optimize the test. */
7194 if ((code == NE_EXPR || code == EQ_EXPR)
7195 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7196 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7198 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7199 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7200 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7202 if (arg00 != NULL_TREE
7203 /* This is only a win if casting to a signed type is cheap,
7204 i.e. when arg00's type is not a partial mode. */
7205 && TYPE_PRECISION (TREE_TYPE (arg00))
7206 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7208 tree stype = signed_type_for (TREE_TYPE (arg00));
7209 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7211 fold_convert_loc (loc, stype, arg00),
7212 build_int_cst (stype, 0));
7219 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7220 equality/inequality test, then return a simplified form of
7221 the test using shifts and logical operations. Otherwise return
7222 NULL. TYPE is the desired result type. */
7225 fold_single_bit_test (location_t loc, enum tree_code code,
7226 tree arg0, tree arg1, tree result_type)
7228 /* If this is testing a single bit, we can optimize the test. */
7229 if ((code == NE_EXPR || code == EQ_EXPR)
7230 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7231 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7233 tree inner = TREE_OPERAND (arg0, 0);
7234 tree type = TREE_TYPE (arg0);
7235 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7236 enum machine_mode operand_mode = TYPE_MODE (type);
7238 tree signed_type, unsigned_type, intermediate_type;
7241 /* First, see if we can fold the single bit test into a sign-bit
7243 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7248 /* Otherwise we have (A & C) != 0 where C is a single bit,
7249 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7250 Similarly for (A & C) == 0. */
7252 /* If INNER is a right shift of a constant and it plus BITNUM does
7253 not overflow, adjust BITNUM and INNER. */
7254 if (TREE_CODE (inner) == RSHIFT_EXPR
7255 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7256 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7257 && bitnum < TYPE_PRECISION (type)
7258 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7259 bitnum - TYPE_PRECISION (type)))
7261 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7262 inner = TREE_OPERAND (inner, 0);
7265 /* If we are going to be able to omit the AND below, we must do our
7266 operations as unsigned. If we must use the AND, we have a choice.
7267 Normally unsigned is faster, but for some machines signed is. */
7268 #ifdef LOAD_EXTEND_OP
7269 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7270 && !flag_syntax_only) ? 0 : 1;
7275 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7276 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7277 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7278 inner = fold_convert_loc (loc, intermediate_type, inner);
7281 inner = build2 (RSHIFT_EXPR, intermediate_type,
7282 inner, size_int (bitnum));
7284 one = build_int_cst (intermediate_type, 1);
7286 if (code == EQ_EXPR)
7287 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7289 /* Put the AND last so it can combine with more things. */
7290 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7292 /* Make sure to return the proper type. */
7293 inner = fold_convert_loc (loc, result_type, inner);
7300 /* Check whether we are allowed to reorder operands arg0 and arg1,
7301 such that the evaluation of arg1 occurs before arg0. */
7304 reorder_operands_p (const_tree arg0, const_tree arg1)
7306 if (! flag_evaluation_order)
7308 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7310 return ! TREE_SIDE_EFFECTS (arg0)
7311 && ! TREE_SIDE_EFFECTS (arg1);
7314 /* Test whether it is preferable two swap two operands, ARG0 and
7315 ARG1, for example because ARG0 is an integer constant and ARG1
7316 isn't. If REORDER is true, only recommend swapping if we can
7317 evaluate the operands in reverse order. */
7320 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7322 STRIP_SIGN_NOPS (arg0);
7323 STRIP_SIGN_NOPS (arg1);
7325 if (TREE_CODE (arg1) == INTEGER_CST)
7327 if (TREE_CODE (arg0) == INTEGER_CST)
7330 if (TREE_CODE (arg1) == REAL_CST)
7332 if (TREE_CODE (arg0) == REAL_CST)
7335 if (TREE_CODE (arg1) == FIXED_CST)
7337 if (TREE_CODE (arg0) == FIXED_CST)
7340 if (TREE_CODE (arg1) == COMPLEX_CST)
7342 if (TREE_CODE (arg0) == COMPLEX_CST)
7345 if (TREE_CONSTANT (arg1))
7347 if (TREE_CONSTANT (arg0))
7350 if (optimize_function_for_size_p (cfun))
7353 if (reorder && flag_evaluation_order
7354 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7357 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7358 for commutative and comparison operators. Ensuring a canonical
7359 form allows the optimizers to find additional redundancies without
7360 having to explicitly check for both orderings. */
7361 if (TREE_CODE (arg0) == SSA_NAME
7362 && TREE_CODE (arg1) == SSA_NAME
7363 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7366 /* Put SSA_NAMEs last. */
7367 if (TREE_CODE (arg1) == SSA_NAME)
7369 if (TREE_CODE (arg0) == SSA_NAME)
7372 /* Put variables last. */
7381 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7382 ARG0 is extended to a wider type. */
7385 fold_widened_comparison (location_t loc, enum tree_code code,
7386 tree type, tree arg0, tree arg1)
7388 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7390 tree shorter_type, outer_type;
7394 if (arg0_unw == arg0)
7396 shorter_type = TREE_TYPE (arg0_unw);
7398 #ifdef HAVE_canonicalize_funcptr_for_compare
7399 /* Disable this optimization if we're casting a function pointer
7400 type on targets that require function pointer canonicalization. */
7401 if (HAVE_canonicalize_funcptr_for_compare
7402 && TREE_CODE (shorter_type) == POINTER_TYPE
7403 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7407 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7410 arg1_unw = get_unwidened (arg1, NULL_TREE);
7412 /* If possible, express the comparison in the shorter mode. */
7413 if ((code == EQ_EXPR || code == NE_EXPR
7414 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7415 && (TREE_TYPE (arg1_unw) == shorter_type
7416 || ((TYPE_PRECISION (shorter_type)
7417 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7418 && (TYPE_UNSIGNED (shorter_type)
7419 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7420 || (TREE_CODE (arg1_unw) == INTEGER_CST
7421 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7422 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7423 && int_fits_type_p (arg1_unw, shorter_type))))
7424 return fold_build2_loc (loc, code, type, arg0_unw,
7425 fold_convert_loc (loc, shorter_type, arg1_unw));
7427 if (TREE_CODE (arg1_unw) != INTEGER_CST
7428 || TREE_CODE (shorter_type) != INTEGER_TYPE
7429 || !int_fits_type_p (arg1_unw, shorter_type))
7432 /* If we are comparing with the integer that does not fit into the range
7433 of the shorter type, the result is known. */
7434 outer_type = TREE_TYPE (arg1_unw);
7435 min = lower_bound_in_type (outer_type, shorter_type);
7436 max = upper_bound_in_type (outer_type, shorter_type);
7438 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7440 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7447 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7452 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7458 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7460 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7465 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7467 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7476 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7477 ARG0 just the signedness is changed. */
7480 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7481 tree arg0, tree arg1)
7484 tree inner_type, outer_type;
7486 if (!CONVERT_EXPR_P (arg0))
7489 outer_type = TREE_TYPE (arg0);
7490 arg0_inner = TREE_OPERAND (arg0, 0);
7491 inner_type = TREE_TYPE (arg0_inner);
7493 #ifdef HAVE_canonicalize_funcptr_for_compare
7494 /* Disable this optimization if we're casting a function pointer
7495 type on targets that require function pointer canonicalization. */
7496 if (HAVE_canonicalize_funcptr_for_compare
7497 && TREE_CODE (inner_type) == POINTER_TYPE
7498 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7502 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7505 if (TREE_CODE (arg1) != INTEGER_CST
7506 && !(CONVERT_EXPR_P (arg1)
7507 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7510 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7511 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7516 if (TREE_CODE (arg1) == INTEGER_CST)
7517 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7518 TREE_INT_CST_HIGH (arg1), 0,
7519 TREE_OVERFLOW (arg1));
7521 arg1 = fold_convert_loc (loc, inner_type, arg1);
7523 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7526 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7527 step of the array. Reconstructs s and delta in the case of s *
7528 delta being an integer constant (and thus already folded). ADDR is
7529 the address. MULT is the multiplicative expression. If the
7530 function succeeds, the new address expression is returned.
7531 Otherwise NULL_TREE is returned. LOC is the location of the
7532 resulting expression. */
7535 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7537 tree s, delta, step;
7538 tree ref = TREE_OPERAND (addr, 0), pref;
7543 /* Strip the nops that might be added when converting op1 to sizetype. */
7546 /* Canonicalize op1 into a possibly non-constant delta
7547 and an INTEGER_CST s. */
7548 if (TREE_CODE (op1) == MULT_EXPR)
7550 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7555 if (TREE_CODE (arg0) == INTEGER_CST)
7560 else if (TREE_CODE (arg1) == INTEGER_CST)
7568 else if (TREE_CODE (op1) == INTEGER_CST)
7575 /* Simulate we are delta * 1. */
7577 s = integer_one_node;
7580 for (;; ref = TREE_OPERAND (ref, 0))
7582 if (TREE_CODE (ref) == ARRAY_REF)
7586 /* Remember if this was a multi-dimensional array. */
7587 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7590 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7593 itype = TREE_TYPE (domain);
7595 step = array_ref_element_size (ref);
7596 if (TREE_CODE (step) != INTEGER_CST)
7601 if (! tree_int_cst_equal (step, s))
7606 /* Try if delta is a multiple of step. */
7607 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7613 /* Only fold here if we can verify we do not overflow one
7614 dimension of a multi-dimensional array. */
7619 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7620 || !TYPE_MAX_VALUE (domain)
7621 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7624 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7625 fold_convert_loc (loc, itype,
7626 TREE_OPERAND (ref, 1)),
7627 fold_convert_loc (loc, itype, delta));
7629 || TREE_CODE (tmp) != INTEGER_CST
7630 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7639 if (!handled_component_p (ref))
7643 /* We found the suitable array reference. So copy everything up to it,
7644 and replace the index. */
7646 pref = TREE_OPERAND (addr, 0);
7647 ret = copy_node (pref);
7648 SET_EXPR_LOCATION (ret, loc);
7653 pref = TREE_OPERAND (pref, 0);
7654 TREE_OPERAND (pos, 0) = copy_node (pref);
7655 pos = TREE_OPERAND (pos, 0);
7658 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7659 fold_convert_loc (loc, itype,
7660 TREE_OPERAND (pos, 1)),
7661 fold_convert_loc (loc, itype, delta));
7663 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7667 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7668 means A >= Y && A != MAX, but in this case we know that
7669 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7672 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7674 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7676 if (TREE_CODE (bound) == LT_EXPR)
7677 a = TREE_OPERAND (bound, 0);
7678 else if (TREE_CODE (bound) == GT_EXPR)
7679 a = TREE_OPERAND (bound, 1);
7683 typea = TREE_TYPE (a);
7684 if (!INTEGRAL_TYPE_P (typea)
7685 && !POINTER_TYPE_P (typea))
7688 if (TREE_CODE (ineq) == LT_EXPR)
7690 a1 = TREE_OPERAND (ineq, 1);
7691 y = TREE_OPERAND (ineq, 0);
7693 else if (TREE_CODE (ineq) == GT_EXPR)
7695 a1 = TREE_OPERAND (ineq, 0);
7696 y = TREE_OPERAND (ineq, 1);
7701 if (TREE_TYPE (a1) != typea)
7704 if (POINTER_TYPE_P (typea))
7706 /* Convert the pointer types into integer before taking the difference. */
7707 tree ta = fold_convert_loc (loc, ssizetype, a);
7708 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7709 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7712 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7714 if (!diff || !integer_onep (diff))
7717 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7720 /* Fold a sum or difference of at least one multiplication.
7721 Returns the folded tree or NULL if no simplification could be made. */
7724 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7725 tree arg0, tree arg1)
7727 tree arg00, arg01, arg10, arg11;
7728 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7730 /* (A * C) +- (B * C) -> (A+-B) * C.
7731 (A * C) +- A -> A * (C+-1).
7732 We are most concerned about the case where C is a constant,
7733 but other combinations show up during loop reduction. Since
7734 it is not difficult, try all four possibilities. */
7736 if (TREE_CODE (arg0) == MULT_EXPR)
7738 arg00 = TREE_OPERAND (arg0, 0);
7739 arg01 = TREE_OPERAND (arg0, 1);
7741 else if (TREE_CODE (arg0) == INTEGER_CST)
7743 arg00 = build_one_cst (type);
7748 /* We cannot generate constant 1 for fract. */
7749 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7752 arg01 = build_one_cst (type);
7754 if (TREE_CODE (arg1) == MULT_EXPR)
7756 arg10 = TREE_OPERAND (arg1, 0);
7757 arg11 = TREE_OPERAND (arg1, 1);
7759 else if (TREE_CODE (arg1) == INTEGER_CST)
7761 arg10 = build_one_cst (type);
7762 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7763 the purpose of this canonicalization. */
7764 if (TREE_INT_CST_HIGH (arg1) == -1
7765 && negate_expr_p (arg1)
7766 && code == PLUS_EXPR)
7768 arg11 = negate_expr (arg1);
7776 /* We cannot generate constant 1 for fract. */
7777 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7780 arg11 = build_one_cst (type);
7784 if (operand_equal_p (arg01, arg11, 0))
7785 same = arg01, alt0 = arg00, alt1 = arg10;
7786 else if (operand_equal_p (arg00, arg10, 0))
7787 same = arg00, alt0 = arg01, alt1 = arg11;
7788 else if (operand_equal_p (arg00, arg11, 0))
7789 same = arg00, alt0 = arg01, alt1 = arg10;
7790 else if (operand_equal_p (arg01, arg10, 0))
7791 same = arg01, alt0 = arg00, alt1 = arg11;
7793 /* No identical multiplicands; see if we can find a common
7794 power-of-two factor in non-power-of-two multiplies. This
7795 can help in multi-dimensional array access. */
7796 else if (host_integerp (arg01, 0)
7797 && host_integerp (arg11, 0))
7799 HOST_WIDE_INT int01, int11, tmp;
7802 int01 = TREE_INT_CST_LOW (arg01);
7803 int11 = TREE_INT_CST_LOW (arg11);
7805 /* Move min of absolute values to int11. */
7806 if ((int01 >= 0 ? int01 : -int01)
7807 < (int11 >= 0 ? int11 : -int11))
7809 tmp = int01, int01 = int11, int11 = tmp;
7810 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7817 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7818 /* The remainder should not be a constant, otherwise we
7819 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7820 increased the number of multiplications necessary. */
7821 && TREE_CODE (arg10) != INTEGER_CST)
7823 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7824 build_int_cst (TREE_TYPE (arg00),
7829 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7834 return fold_build2_loc (loc, MULT_EXPR, type,
7835 fold_build2_loc (loc, code, type,
7836 fold_convert_loc (loc, type, alt0),
7837 fold_convert_loc (loc, type, alt1)),
7838 fold_convert_loc (loc, type, same));
7843 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7844 specified by EXPR into the buffer PTR of length LEN bytes.
7845 Return the number of bytes placed in the buffer, or zero
7849 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7851 tree type = TREE_TYPE (expr);
7852 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7853 int byte, offset, word, words;
7854 unsigned char value;
7856 if (total_bytes > len)
7858 words = total_bytes / UNITS_PER_WORD;
7860 for (byte = 0; byte < total_bytes; byte++)
7862 int bitpos = byte * BITS_PER_UNIT;
7863 if (bitpos < HOST_BITS_PER_WIDE_INT)
7864 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7866 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7867 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7869 if (total_bytes > UNITS_PER_WORD)
7871 word = byte / UNITS_PER_WORD;
7872 if (WORDS_BIG_ENDIAN)
7873 word = (words - 1) - word;
7874 offset = word * UNITS_PER_WORD;
7875 if (BYTES_BIG_ENDIAN)
7876 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7878 offset += byte % UNITS_PER_WORD;
7881 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7882 ptr[offset] = value;
7888 /* Subroutine of native_encode_expr. Encode the REAL_CST
7889 specified by EXPR into the buffer PTR of length LEN bytes.
7890 Return the number of bytes placed in the buffer, or zero
7894 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7896 tree type = TREE_TYPE (expr);
7897 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7898 int byte, offset, word, words, bitpos;
7899 unsigned char value;
7901 /* There are always 32 bits in each long, no matter the size of
7902 the hosts long. We handle floating point representations with
7906 if (total_bytes > len)
7908 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7910 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7912 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7913 bitpos += BITS_PER_UNIT)
7915 byte = (bitpos / BITS_PER_UNIT) & 3;
7916 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7918 if (UNITS_PER_WORD < 4)
7920 word = byte / UNITS_PER_WORD;
7921 if (WORDS_BIG_ENDIAN)
7922 word = (words - 1) - word;
7923 offset = word * UNITS_PER_WORD;
7924 if (BYTES_BIG_ENDIAN)
7925 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7927 offset += byte % UNITS_PER_WORD;
7930 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7931 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7936 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7937 specified by EXPR into the buffer PTR of length LEN bytes.
7938 Return the number of bytes placed in the buffer, or zero
7942 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7947 part = TREE_REALPART (expr);
7948 rsize = native_encode_expr (part, ptr, len);
7951 part = TREE_IMAGPART (expr);
7952 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7955 return rsize + isize;
7959 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7960 specified by EXPR into the buffer PTR of length LEN bytes.
7961 Return the number of bytes placed in the buffer, or zero
7965 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7967 int i, size, offset, count;
7968 tree itype, elem, elements;
7971 elements = TREE_VECTOR_CST_ELTS (expr);
7972 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7973 itype = TREE_TYPE (TREE_TYPE (expr));
7974 size = GET_MODE_SIZE (TYPE_MODE (itype));
7975 for (i = 0; i < count; i++)
7979 elem = TREE_VALUE (elements);
7980 elements = TREE_CHAIN (elements);
7987 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7992 if (offset + size > len)
7994 memset (ptr+offset, 0, size);
8002 /* Subroutine of native_encode_expr. Encode the STRING_CST
8003 specified by EXPR into the buffer PTR of length LEN bytes.
8004 Return the number of bytes placed in the buffer, or zero
8008 native_encode_string (const_tree expr, unsigned char *ptr, int len)
8010 tree type = TREE_TYPE (expr);
8011 HOST_WIDE_INT total_bytes;
8013 if (TREE_CODE (type) != ARRAY_TYPE
8014 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8015 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8016 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8018 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8019 if (total_bytes > len)
8021 if (TREE_STRING_LENGTH (expr) < total_bytes)
8023 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8024 memset (ptr + TREE_STRING_LENGTH (expr), 0,
8025 total_bytes - TREE_STRING_LENGTH (expr));
8028 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8033 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8034 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8035 buffer PTR of length LEN bytes. Return the number of bytes
8036 placed in the buffer, or zero upon failure. */
8039 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8041 switch (TREE_CODE (expr))
8044 return native_encode_int (expr, ptr, len);
8047 return native_encode_real (expr, ptr, len);
8050 return native_encode_complex (expr, ptr, len);
8053 return native_encode_vector (expr, ptr, len);
8056 return native_encode_string (expr, ptr, len);
8064 /* Subroutine of native_interpret_expr. Interpret the contents of
8065 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8066 If the buffer cannot be interpreted, return NULL_TREE. */
8069 native_interpret_int (tree type, const unsigned char *ptr, int len)
8071 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8072 int byte, offset, word, words;
8073 unsigned char value;
8074 unsigned int HOST_WIDE_INT lo = 0;
8075 HOST_WIDE_INT hi = 0;
8077 if (total_bytes > len)
8079 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8081 words = total_bytes / UNITS_PER_WORD;
8083 for (byte = 0; byte < total_bytes; byte++)
8085 int bitpos = byte * BITS_PER_UNIT;
8086 if (total_bytes > UNITS_PER_WORD)
8088 word = byte / UNITS_PER_WORD;
8089 if (WORDS_BIG_ENDIAN)
8090 word = (words - 1) - word;
8091 offset = word * UNITS_PER_WORD;
8092 if (BYTES_BIG_ENDIAN)
8093 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8095 offset += byte % UNITS_PER_WORD;
8098 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8099 value = ptr[offset];
8101 if (bitpos < HOST_BITS_PER_WIDE_INT)
8102 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8104 hi |= (unsigned HOST_WIDE_INT) value
8105 << (bitpos - HOST_BITS_PER_WIDE_INT);
8108 return build_int_cst_wide_type (type, lo, hi);
8112 /* Subroutine of native_interpret_expr. Interpret the contents of
8113 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8114 If the buffer cannot be interpreted, return NULL_TREE. */
8117 native_interpret_real (tree type, const unsigned char *ptr, int len)
8119 enum machine_mode mode = TYPE_MODE (type);
8120 int total_bytes = GET_MODE_SIZE (mode);
8121 int byte, offset, word, words, bitpos;
8122 unsigned char value;
8123 /* There are always 32 bits in each long, no matter the size of
8124 the hosts long. We handle floating point representations with
8129 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8130 if (total_bytes > len || total_bytes > 24)
8132 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8134 memset (tmp, 0, sizeof (tmp));
8135 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8136 bitpos += BITS_PER_UNIT)
8138 byte = (bitpos / BITS_PER_UNIT) & 3;
8139 if (UNITS_PER_WORD < 4)
8141 word = byte / UNITS_PER_WORD;
8142 if (WORDS_BIG_ENDIAN)
8143 word = (words - 1) - word;
8144 offset = word * UNITS_PER_WORD;
8145 if (BYTES_BIG_ENDIAN)
8146 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8148 offset += byte % UNITS_PER_WORD;
8151 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8152 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8154 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8157 real_from_target (&r, tmp, mode);
8158 return build_real (type, r);
8162 /* Subroutine of native_interpret_expr. Interpret the contents of
8163 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8164 If the buffer cannot be interpreted, return NULL_TREE. */
8167 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8169 tree etype, rpart, ipart;
8172 etype = TREE_TYPE (type);
8173 size = GET_MODE_SIZE (TYPE_MODE (etype));
8176 rpart = native_interpret_expr (etype, ptr, size);
8179 ipart = native_interpret_expr (etype, ptr+size, size);
8182 return build_complex (type, rpart, ipart);
8186 /* Subroutine of native_interpret_expr. Interpret the contents of
8187 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8188 If the buffer cannot be interpreted, return NULL_TREE. */
8191 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8193 tree etype, elem, elements;
8196 etype = TREE_TYPE (type);
8197 size = GET_MODE_SIZE (TYPE_MODE (etype));
8198 count = TYPE_VECTOR_SUBPARTS (type);
8199 if (size * count > len)
8202 elements = NULL_TREE;
8203 for (i = count - 1; i >= 0; i--)
8205 elem = native_interpret_expr (etype, ptr+(i*size), size);
8208 elements = tree_cons (NULL_TREE, elem, elements);
8210 return build_vector (type, elements);
8214 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8215 the buffer PTR of length LEN as a constant of type TYPE. For
8216 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8217 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8218 return NULL_TREE. */
8221 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8223 switch (TREE_CODE (type))
8228 return native_interpret_int (type, ptr, len);
8231 return native_interpret_real (type, ptr, len);
8234 return native_interpret_complex (type, ptr, len);
8237 return native_interpret_vector (type, ptr, len);
8245 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8246 TYPE at compile-time. If we're unable to perform the conversion
8247 return NULL_TREE. */
8250 fold_view_convert_expr (tree type, tree expr)
8252 /* We support up to 512-bit values (for V8DFmode). */
8253 unsigned char buffer[64];
8256 /* Check that the host and target are sane. */
8257 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8260 len = native_encode_expr (expr, buffer, sizeof (buffer));
8264 return native_interpret_expr (type, buffer, len);
8267 /* Build an expression for the address of T. Folds away INDIRECT_REF
8268 to avoid confusing the gimplify process. */
8271 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8273 /* The size of the object is not relevant when talking about its address. */
8274 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8275 t = TREE_OPERAND (t, 0);
8277 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8278 if (TREE_CODE (t) == INDIRECT_REF
8279 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8281 t = TREE_OPERAND (t, 0);
8283 if (TREE_TYPE (t) != ptrtype)
8285 t = build1 (NOP_EXPR, ptrtype, t);
8286 SET_EXPR_LOCATION (t, loc);
8289 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8291 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8293 if (TREE_TYPE (t) != ptrtype)
8294 t = fold_convert_loc (loc, ptrtype, t);
8298 t = build1 (ADDR_EXPR, ptrtype, t);
8299 SET_EXPR_LOCATION (t, loc);
8305 /* Build an expression for the address of T. */
8308 build_fold_addr_expr_loc (location_t loc, tree t)
8310 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8312 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8315 /* Fold a unary expression of code CODE and type TYPE with operand
8316 OP0. Return the folded expression if folding is successful.
8317 Otherwise, return NULL_TREE. */
8320 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8324 enum tree_code_class kind = TREE_CODE_CLASS (code);
8326 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8327 && TREE_CODE_LENGTH (code) == 1);
8332 if (CONVERT_EXPR_CODE_P (code)
8333 || code == FLOAT_EXPR || code == ABS_EXPR)
8335 /* Don't use STRIP_NOPS, because signedness of argument type
8337 STRIP_SIGN_NOPS (arg0);
8341 /* Strip any conversions that don't change the mode. This
8342 is safe for every expression, except for a comparison
8343 expression because its signedness is derived from its
8346 Note that this is done as an internal manipulation within
8347 the constant folder, in order to find the simplest
8348 representation of the arguments so that their form can be
8349 studied. In any cases, the appropriate type conversions
8350 should be put back in the tree that will get out of the
8356 if (TREE_CODE_CLASS (code) == tcc_unary)
8358 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8359 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8360 fold_build1_loc (loc, code, type,
8361 fold_convert_loc (loc, TREE_TYPE (op0),
8362 TREE_OPERAND (arg0, 1))));
8363 else if (TREE_CODE (arg0) == COND_EXPR)
8365 tree arg01 = TREE_OPERAND (arg0, 1);
8366 tree arg02 = TREE_OPERAND (arg0, 2);
8367 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8368 arg01 = fold_build1_loc (loc, code, type,
8369 fold_convert_loc (loc,
8370 TREE_TYPE (op0), arg01));
8371 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8372 arg02 = fold_build1_loc (loc, code, type,
8373 fold_convert_loc (loc,
8374 TREE_TYPE (op0), arg02));
8375 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8378 /* If this was a conversion, and all we did was to move into
8379 inside the COND_EXPR, bring it back out. But leave it if
8380 it is a conversion from integer to integer and the
8381 result precision is no wider than a word since such a
8382 conversion is cheap and may be optimized away by combine,
8383 while it couldn't if it were outside the COND_EXPR. Then return
8384 so we don't get into an infinite recursion loop taking the
8385 conversion out and then back in. */
8387 if ((CONVERT_EXPR_CODE_P (code)
8388 || code == NON_LVALUE_EXPR)
8389 && TREE_CODE (tem) == COND_EXPR
8390 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8391 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8392 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8393 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8394 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8395 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8396 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8398 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8399 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8400 || flag_syntax_only))
8402 tem = build1 (code, type,
8404 TREE_TYPE (TREE_OPERAND
8405 (TREE_OPERAND (tem, 1), 0)),
8406 TREE_OPERAND (tem, 0),
8407 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8408 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8409 SET_EXPR_LOCATION (tem, loc);
8413 else if (COMPARISON_CLASS_P (arg0))
8415 if (TREE_CODE (type) == BOOLEAN_TYPE)
8417 arg0 = copy_node (arg0);
8418 TREE_TYPE (arg0) = type;
8421 else if (TREE_CODE (type) != INTEGER_TYPE)
8422 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8423 fold_build1_loc (loc, code, type,
8425 fold_build1_loc (loc, code, type,
8426 integer_zero_node));
8433 /* Re-association barriers around constants and other re-association
8434 barriers can be removed. */
8435 if (CONSTANT_CLASS_P (op0)
8436 || TREE_CODE (op0) == PAREN_EXPR)
8437 return fold_convert_loc (loc, type, op0);
8442 case FIX_TRUNC_EXPR:
8443 if (TREE_TYPE (op0) == type)
8446 /* If we have (type) (a CMP b) and type is an integral type, return
8447 new expression involving the new type. */
8448 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8449 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8450 TREE_OPERAND (op0, 1));
8452 /* Handle cases of two conversions in a row. */
8453 if (CONVERT_EXPR_P (op0))
8455 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8456 tree inter_type = TREE_TYPE (op0);
8457 int inside_int = INTEGRAL_TYPE_P (inside_type);
8458 int inside_ptr = POINTER_TYPE_P (inside_type);
8459 int inside_float = FLOAT_TYPE_P (inside_type);
8460 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8461 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8462 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8463 int inter_int = INTEGRAL_TYPE_P (inter_type);
8464 int inter_ptr = POINTER_TYPE_P (inter_type);
8465 int inter_float = FLOAT_TYPE_P (inter_type);
8466 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8467 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8468 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8469 int final_int = INTEGRAL_TYPE_P (type);
8470 int final_ptr = POINTER_TYPE_P (type);
8471 int final_float = FLOAT_TYPE_P (type);
8472 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8473 unsigned int final_prec = TYPE_PRECISION (type);
8474 int final_unsignedp = TYPE_UNSIGNED (type);
8476 /* In addition to the cases of two conversions in a row
8477 handled below, if we are converting something to its own
8478 type via an object of identical or wider precision, neither
8479 conversion is needed. */
8480 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8481 && (((inter_int || inter_ptr) && final_int)
8482 || (inter_float && final_float))
8483 && inter_prec >= final_prec)
8484 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8486 /* Likewise, if the intermediate and initial types are either both
8487 float or both integer, we don't need the middle conversion if the
8488 former is wider than the latter and doesn't change the signedness
8489 (for integers). Avoid this if the final type is a pointer since
8490 then we sometimes need the middle conversion. Likewise if the
8491 final type has a precision not equal to the size of its mode. */
8492 if (((inter_int && inside_int)
8493 || (inter_float && inside_float)
8494 || (inter_vec && inside_vec))
8495 && inter_prec >= inside_prec
8496 && (inter_float || inter_vec
8497 || inter_unsignedp == inside_unsignedp)
8498 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8499 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8501 && (! final_vec || inter_prec == inside_prec))
8502 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8504 /* If we have a sign-extension of a zero-extended value, we can
8505 replace that by a single zero-extension. */
8506 if (inside_int && inter_int && final_int
8507 && inside_prec < inter_prec && inter_prec < final_prec
8508 && inside_unsignedp && !inter_unsignedp)
8509 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8511 /* Two conversions in a row are not needed unless:
8512 - some conversion is floating-point (overstrict for now), or
8513 - some conversion is a vector (overstrict for now), or
8514 - the intermediate type is narrower than both initial and
8516 - the intermediate type and innermost type differ in signedness,
8517 and the outermost type is wider than the intermediate, or
8518 - the initial type is a pointer type and the precisions of the
8519 intermediate and final types differ, or
8520 - the final type is a pointer type and the precisions of the
8521 initial and intermediate types differ. */
8522 if (! inside_float && ! inter_float && ! final_float
8523 && ! inside_vec && ! inter_vec && ! final_vec
8524 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8525 && ! (inside_int && inter_int
8526 && inter_unsignedp != inside_unsignedp
8527 && inter_prec < final_prec)
8528 && ((inter_unsignedp && inter_prec > inside_prec)
8529 == (final_unsignedp && final_prec > inter_prec))
8530 && ! (inside_ptr && inter_prec != final_prec)
8531 && ! (final_ptr && inside_prec != inter_prec)
8532 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8533 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8534 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8537 /* Handle (T *)&A.B.C for A being of type T and B and C
8538 living at offset zero. This occurs frequently in
8539 C++ upcasting and then accessing the base. */
8540 if (TREE_CODE (op0) == ADDR_EXPR
8541 && POINTER_TYPE_P (type)
8542 && handled_component_p (TREE_OPERAND (op0, 0)))
8544 HOST_WIDE_INT bitsize, bitpos;
8546 enum machine_mode mode;
8547 int unsignedp, volatilep;
8548 tree base = TREE_OPERAND (op0, 0);
8549 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8550 &mode, &unsignedp, &volatilep, false);
8551 /* If the reference was to a (constant) zero offset, we can use
8552 the address of the base if it has the same base type
8553 as the result type and the pointer type is unqualified. */
8554 if (! offset && bitpos == 0
8555 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8556 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8557 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8558 return fold_convert_loc (loc, type,
8559 build_fold_addr_expr_loc (loc, base));
8562 if (TREE_CODE (op0) == MODIFY_EXPR
8563 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8564 /* Detect assigning a bitfield. */
8565 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8567 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8569 /* Don't leave an assignment inside a conversion
8570 unless assigning a bitfield. */
8571 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8572 /* First do the assignment, then return converted constant. */
8573 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8574 TREE_NO_WARNING (tem) = 1;
8575 TREE_USED (tem) = 1;
8576 SET_EXPR_LOCATION (tem, loc);
8580 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8581 constants (if x has signed type, the sign bit cannot be set
8582 in c). This folds extension into the BIT_AND_EXPR.
8583 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8584 very likely don't have maximal range for their precision and this
8585 transformation effectively doesn't preserve non-maximal ranges. */
8586 if (TREE_CODE (type) == INTEGER_TYPE
8587 && TREE_CODE (op0) == BIT_AND_EXPR
8588 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8590 tree and_expr = op0;
8591 tree and0 = TREE_OPERAND (and_expr, 0);
8592 tree and1 = TREE_OPERAND (and_expr, 1);
8595 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8596 || (TYPE_PRECISION (type)
8597 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8599 else if (TYPE_PRECISION (TREE_TYPE (and1))
8600 <= HOST_BITS_PER_WIDE_INT
8601 && host_integerp (and1, 1))
8603 unsigned HOST_WIDE_INT cst;
8605 cst = tree_low_cst (and1, 1);
8606 cst &= (HOST_WIDE_INT) -1
8607 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8608 change = (cst == 0);
8609 #ifdef LOAD_EXTEND_OP
8611 && !flag_syntax_only
8612 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8615 tree uns = unsigned_type_for (TREE_TYPE (and0));
8616 and0 = fold_convert_loc (loc, uns, and0);
8617 and1 = fold_convert_loc (loc, uns, and1);
8623 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8624 TREE_INT_CST_HIGH (and1), 0,
8625 TREE_OVERFLOW (and1));
8626 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8627 fold_convert_loc (loc, type, and0), tem);
8631 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8632 when one of the new casts will fold away. Conservatively we assume
8633 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8634 if (POINTER_TYPE_P (type)
8635 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8636 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8637 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8638 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8640 tree arg00 = TREE_OPERAND (arg0, 0);
8641 tree arg01 = TREE_OPERAND (arg0, 1);
8643 return fold_build2_loc (loc,
8644 TREE_CODE (arg0), type,
8645 fold_convert_loc (loc, type, arg00),
8646 fold_convert_loc (loc, sizetype, arg01));
8649 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8650 of the same precision, and X is an integer type not narrower than
8651 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8652 if (INTEGRAL_TYPE_P (type)
8653 && TREE_CODE (op0) == BIT_NOT_EXPR
8654 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8655 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8656 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8658 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8659 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8660 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8661 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8662 fold_convert_loc (loc, type, tem));
8665 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8666 type of X and Y (integer types only). */
8667 if (INTEGRAL_TYPE_P (type)
8668 && TREE_CODE (op0) == MULT_EXPR
8669 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8670 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8672 /* Be careful not to introduce new overflows. */
8674 if (TYPE_OVERFLOW_WRAPS (type))
8677 mult_type = unsigned_type_for (type);
8679 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8681 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8682 fold_convert_loc (loc, mult_type,
8683 TREE_OPERAND (op0, 0)),
8684 fold_convert_loc (loc, mult_type,
8685 TREE_OPERAND (op0, 1)));
8686 return fold_convert_loc (loc, type, tem);
8690 tem = fold_convert_const (code, type, op0);
8691 return tem ? tem : NULL_TREE;
8693 case ADDR_SPACE_CONVERT_EXPR:
8694 if (integer_zerop (arg0))
8695 return fold_convert_const (code, type, arg0);
8698 case FIXED_CONVERT_EXPR:
8699 tem = fold_convert_const (code, type, arg0);
8700 return tem ? tem : NULL_TREE;
8702 case VIEW_CONVERT_EXPR:
8703 if (TREE_TYPE (op0) == type)
8705 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8706 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8707 type, TREE_OPERAND (op0, 0));
8709 /* For integral conversions with the same precision or pointer
8710 conversions use a NOP_EXPR instead. */
8711 if ((INTEGRAL_TYPE_P (type)
8712 || POINTER_TYPE_P (type))
8713 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8714 || POINTER_TYPE_P (TREE_TYPE (op0)))
8715 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8716 return fold_convert_loc (loc, type, op0);
8718 /* Strip inner integral conversions that do not change the precision. */
8719 if (CONVERT_EXPR_P (op0)
8720 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8721 || POINTER_TYPE_P (TREE_TYPE (op0)))
8722 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8723 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8724 && (TYPE_PRECISION (TREE_TYPE (op0))
8725 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8726 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8727 type, TREE_OPERAND (op0, 0));
8729 return fold_view_convert_expr (type, op0);
8732 tem = fold_negate_expr (loc, arg0);
8734 return fold_convert_loc (loc, type, tem);
8738 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8739 return fold_abs_const (arg0, type);
8740 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8741 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8742 /* Convert fabs((double)float) into (double)fabsf(float). */
8743 else if (TREE_CODE (arg0) == NOP_EXPR
8744 && TREE_CODE (type) == REAL_TYPE)
8746 tree targ0 = strip_float_extensions (arg0);
8748 return fold_convert_loc (loc, type,
8749 fold_build1_loc (loc, ABS_EXPR,
8753 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8754 else if (TREE_CODE (arg0) == ABS_EXPR)
8756 else if (tree_expr_nonnegative_p (arg0))
8759 /* Strip sign ops from argument. */
8760 if (TREE_CODE (type) == REAL_TYPE)
8762 tem = fold_strip_sign_ops (arg0);
8764 return fold_build1_loc (loc, ABS_EXPR, type,
8765 fold_convert_loc (loc, type, tem));
8770 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8771 return fold_convert_loc (loc, type, arg0);
8772 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8774 tree itype = TREE_TYPE (type);
8775 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8776 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8777 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8778 negate_expr (ipart));
8780 if (TREE_CODE (arg0) == COMPLEX_CST)
8782 tree itype = TREE_TYPE (type);
8783 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8784 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8785 return build_complex (type, rpart, negate_expr (ipart));
8787 if (TREE_CODE (arg0) == CONJ_EXPR)
8788 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8792 if (TREE_CODE (arg0) == INTEGER_CST)
8793 return fold_not_const (arg0, type);
8794 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8795 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8796 /* Convert ~ (-A) to A - 1. */
8797 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8798 return fold_build2_loc (loc, MINUS_EXPR, type,
8799 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8800 build_int_cst (type, 1));
8801 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8802 else if (INTEGRAL_TYPE_P (type)
8803 && ((TREE_CODE (arg0) == MINUS_EXPR
8804 && integer_onep (TREE_OPERAND (arg0, 1)))
8805 || (TREE_CODE (arg0) == PLUS_EXPR
8806 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8807 return fold_build1_loc (loc, NEGATE_EXPR, type,
8808 fold_convert_loc (loc, type,
8809 TREE_OPERAND (arg0, 0)));
8810 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8811 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8812 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8813 fold_convert_loc (loc, type,
8814 TREE_OPERAND (arg0, 0)))))
8815 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8816 fold_convert_loc (loc, type,
8817 TREE_OPERAND (arg0, 1)));
8818 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8819 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8820 fold_convert_loc (loc, type,
8821 TREE_OPERAND (arg0, 1)))))
8822 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8823 fold_convert_loc (loc, type,
8824 TREE_OPERAND (arg0, 0)), tem);
8825 /* Perform BIT_NOT_EXPR on each element individually. */
8826 else if (TREE_CODE (arg0) == VECTOR_CST)
8828 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8829 int count = TYPE_VECTOR_SUBPARTS (type), i;
8831 for (i = 0; i < count; i++)
8835 elem = TREE_VALUE (elements);
8836 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8837 if (elem == NULL_TREE)
8839 elements = TREE_CHAIN (elements);
8842 elem = build_int_cst (TREE_TYPE (type), -1);
8843 list = tree_cons (NULL_TREE, elem, list);
8846 return build_vector (type, nreverse (list));
8851 case TRUTH_NOT_EXPR:
8852 /* The argument to invert_truthvalue must have Boolean type. */
8853 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8854 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8856 /* Note that the operand of this must be an int
8857 and its values must be 0 or 1.
8858 ("true" is a fixed value perhaps depending on the language,
8859 but we don't handle values other than 1 correctly yet.) */
8860 tem = fold_truth_not_expr (loc, arg0);
8863 return fold_convert_loc (loc, type, tem);
8866 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8867 return fold_convert_loc (loc, type, arg0);
8868 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8869 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8870 TREE_OPERAND (arg0, 1));
8871 if (TREE_CODE (arg0) == COMPLEX_CST)
8872 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8873 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8875 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8876 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8877 fold_build1_loc (loc, REALPART_EXPR, itype,
8878 TREE_OPERAND (arg0, 0)),
8879 fold_build1_loc (loc, REALPART_EXPR, itype,
8880 TREE_OPERAND (arg0, 1)));
8881 return fold_convert_loc (loc, type, tem);
8883 if (TREE_CODE (arg0) == CONJ_EXPR)
8885 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8886 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8887 TREE_OPERAND (arg0, 0));
8888 return fold_convert_loc (loc, type, tem);
8890 if (TREE_CODE (arg0) == CALL_EXPR)
8892 tree fn = get_callee_fndecl (arg0);
8893 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8894 switch (DECL_FUNCTION_CODE (fn))
8896 CASE_FLT_FN (BUILT_IN_CEXPI):
8897 fn = mathfn_built_in (type, BUILT_IN_COS);
8899 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8909 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8910 return fold_convert_loc (loc, type, integer_zero_node);
8911 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8912 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8913 TREE_OPERAND (arg0, 0));
8914 if (TREE_CODE (arg0) == COMPLEX_CST)
8915 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8916 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8918 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8919 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8920 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8921 TREE_OPERAND (arg0, 0)),
8922 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8923 TREE_OPERAND (arg0, 1)));
8924 return fold_convert_loc (loc, type, tem);
8926 if (TREE_CODE (arg0) == CONJ_EXPR)
8928 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8929 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8930 return fold_convert_loc (loc, type, negate_expr (tem));
8932 if (TREE_CODE (arg0) == CALL_EXPR)
8934 tree fn = get_callee_fndecl (arg0);
8935 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8936 switch (DECL_FUNCTION_CODE (fn))
8938 CASE_FLT_FN (BUILT_IN_CEXPI):
8939 fn = mathfn_built_in (type, BUILT_IN_SIN);
8941 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8951 /* Fold *&X to X if X is an lvalue. */
8952 if (TREE_CODE (op0) == ADDR_EXPR)
8954 tree op00 = TREE_OPERAND (op0, 0);
8955 if ((TREE_CODE (op00) == VAR_DECL
8956 || TREE_CODE (op00) == PARM_DECL
8957 || TREE_CODE (op00) == RESULT_DECL)
8958 && !TREE_READONLY (op00))
8965 } /* switch (code) */
8969 /* If the operation was a conversion do _not_ mark a resulting constant
8970 with TREE_OVERFLOW if the original constant was not. These conversions
8971 have implementation defined behavior and retaining the TREE_OVERFLOW
8972 flag here would confuse later passes such as VRP. */
8974 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8975 tree type, tree op0)
8977 tree res = fold_unary_loc (loc, code, type, op0);
8979 && TREE_CODE (res) == INTEGER_CST
8980 && TREE_CODE (op0) == INTEGER_CST
8981 && CONVERT_EXPR_CODE_P (code))
8982 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8987 /* Fold a binary expression of code CODE and type TYPE with operands
8988 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8989 Return the folded expression if folding is successful. Otherwise,
8990 return NULL_TREE. */
8993 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8995 enum tree_code compl_code;
8997 if (code == MIN_EXPR)
8998 compl_code = MAX_EXPR;
8999 else if (code == MAX_EXPR)
9000 compl_code = MIN_EXPR;
9004 /* MIN (MAX (a, b), b) == b. */
9005 if (TREE_CODE (op0) == compl_code
9006 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9007 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9009 /* MIN (MAX (b, a), b) == b. */
9010 if (TREE_CODE (op0) == compl_code
9011 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9012 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9013 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9015 /* MIN (a, MAX (a, b)) == a. */
9016 if (TREE_CODE (op1) == compl_code
9017 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9018 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9019 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9021 /* MIN (a, MAX (b, a)) == a. */
9022 if (TREE_CODE (op1) == compl_code
9023 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9024 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9025 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9030 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9031 by changing CODE to reduce the magnitude of constants involved in
9032 ARG0 of the comparison.
9033 Returns a canonicalized comparison tree if a simplification was
9034 possible, otherwise returns NULL_TREE.
9035 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9036 valid if signed overflow is undefined. */
9039 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9040 tree arg0, tree arg1,
9041 bool *strict_overflow_p)
9043 enum tree_code code0 = TREE_CODE (arg0);
9044 tree t, cst0 = NULL_TREE;
9048 /* Match A +- CST code arg1 and CST code arg1. We can change the
9049 first form only if overflow is undefined. */
9050 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9051 /* In principle pointers also have undefined overflow behavior,
9052 but that causes problems elsewhere. */
9053 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9054 && (code0 == MINUS_EXPR
9055 || code0 == PLUS_EXPR)
9056 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9057 || code0 == INTEGER_CST))
9060 /* Identify the constant in arg0 and its sign. */
9061 if (code0 == INTEGER_CST)
9064 cst0 = TREE_OPERAND (arg0, 1);
9065 sgn0 = tree_int_cst_sgn (cst0);
9067 /* Overflowed constants and zero will cause problems. */
9068 if (integer_zerop (cst0)
9069 || TREE_OVERFLOW (cst0))
9072 /* See if we can reduce the magnitude of the constant in
9073 arg0 by changing the comparison code. */
9074 if (code0 == INTEGER_CST)
9076 /* CST <= arg1 -> CST-1 < arg1. */
9077 if (code == LE_EXPR && sgn0 == 1)
9079 /* -CST < arg1 -> -CST-1 <= arg1. */
9080 else if (code == LT_EXPR && sgn0 == -1)
9082 /* CST > arg1 -> CST-1 >= arg1. */
9083 else if (code == GT_EXPR && sgn0 == 1)
9085 /* -CST >= arg1 -> -CST-1 > arg1. */
9086 else if (code == GE_EXPR && sgn0 == -1)
9090 /* arg1 code' CST' might be more canonical. */
9095 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9097 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9099 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9100 else if (code == GT_EXPR
9101 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9103 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9104 else if (code == LE_EXPR
9105 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9107 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9108 else if (code == GE_EXPR
9109 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9113 *strict_overflow_p = true;
9116 /* Now build the constant reduced in magnitude. But not if that
9117 would produce one outside of its types range. */
9118 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9120 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9121 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9123 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9124 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9125 /* We cannot swap the comparison here as that would cause us to
9126 endlessly recurse. */
9129 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9130 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9131 if (code0 != INTEGER_CST)
9132 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9134 /* If swapping might yield to a more canonical form, do so. */
9136 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9138 return fold_build2_loc (loc, code, type, t, arg1);
9141 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9142 overflow further. Try to decrease the magnitude of constants involved
9143 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9144 and put sole constants at the second argument position.
9145 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9148 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9149 tree arg0, tree arg1)
9152 bool strict_overflow_p;
9153 const char * const warnmsg = G_("assuming signed overflow does not occur "
9154 "when reducing constant in comparison");
9156 /* Try canonicalization by simplifying arg0. */
9157 strict_overflow_p = false;
9158 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9159 &strict_overflow_p);
9162 if (strict_overflow_p)
9163 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9167 /* Try canonicalization by simplifying arg1 using the swapped
9169 code = swap_tree_comparison (code);
9170 strict_overflow_p = false;
9171 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9172 &strict_overflow_p);
9173 if (t && strict_overflow_p)
9174 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9178 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9179 space. This is used to avoid issuing overflow warnings for
9180 expressions like &p->x which can not wrap. */
9183 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9185 unsigned HOST_WIDE_INT offset_low, total_low;
9186 HOST_WIDE_INT size, offset_high, total_high;
9188 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9194 if (offset == NULL_TREE)
9199 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9203 offset_low = TREE_INT_CST_LOW (offset);
9204 offset_high = TREE_INT_CST_HIGH (offset);
9207 if (add_double_with_sign (offset_low, offset_high,
9208 bitpos / BITS_PER_UNIT, 0,
9209 &total_low, &total_high,
9213 if (total_high != 0)
9216 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9220 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9222 if (TREE_CODE (base) == ADDR_EXPR)
9224 HOST_WIDE_INT base_size;
9226 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9227 if (base_size > 0 && size < base_size)
9231 return total_low > (unsigned HOST_WIDE_INT) size;
9234 /* Subroutine of fold_binary. This routine performs all of the
9235 transformations that are common to the equality/inequality
9236 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9237 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9238 fold_binary should call fold_binary. Fold a comparison with
9239 tree code CODE and type TYPE with operands OP0 and OP1. Return
9240 the folded comparison or NULL_TREE. */
9243 fold_comparison (location_t loc, enum tree_code code, tree type,
9246 tree arg0, arg1, tem;
9251 STRIP_SIGN_NOPS (arg0);
9252 STRIP_SIGN_NOPS (arg1);
9254 tem = fold_relational_const (code, type, arg0, arg1);
9255 if (tem != NULL_TREE)
9258 /* If one arg is a real or integer constant, put it last. */
9259 if (tree_swap_operands_p (arg0, arg1, true))
9260 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9262 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9263 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9264 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9265 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9266 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9267 && (TREE_CODE (arg1) == INTEGER_CST
9268 && !TREE_OVERFLOW (arg1)))
9270 tree const1 = TREE_OPERAND (arg0, 1);
9272 tree variable = TREE_OPERAND (arg0, 0);
9275 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9277 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9278 TREE_TYPE (arg1), const2, const1);
9280 /* If the constant operation overflowed this can be
9281 simplified as a comparison against INT_MAX/INT_MIN. */
9282 if (TREE_CODE (lhs) == INTEGER_CST
9283 && TREE_OVERFLOW (lhs))
9285 int const1_sgn = tree_int_cst_sgn (const1);
9286 enum tree_code code2 = code;
9288 /* Get the sign of the constant on the lhs if the
9289 operation were VARIABLE + CONST1. */
9290 if (TREE_CODE (arg0) == MINUS_EXPR)
9291 const1_sgn = -const1_sgn;
9293 /* The sign of the constant determines if we overflowed
9294 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9295 Canonicalize to the INT_MIN overflow by swapping the comparison
9297 if (const1_sgn == -1)
9298 code2 = swap_tree_comparison (code);
9300 /* We now can look at the canonicalized case
9301 VARIABLE + 1 CODE2 INT_MIN
9302 and decide on the result. */
9303 if (code2 == LT_EXPR
9305 || code2 == EQ_EXPR)
9306 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9307 else if (code2 == NE_EXPR
9309 || code2 == GT_EXPR)
9310 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9313 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9314 && (TREE_CODE (lhs) != INTEGER_CST
9315 || !TREE_OVERFLOW (lhs)))
9317 fold_overflow_warning (("assuming signed overflow does not occur "
9318 "when changing X +- C1 cmp C2 to "
9320 WARN_STRICT_OVERFLOW_COMPARISON);
9321 return fold_build2_loc (loc, code, type, variable, lhs);
9325 /* For comparisons of pointers we can decompose it to a compile time
9326 comparison of the base objects and the offsets into the object.
9327 This requires at least one operand being an ADDR_EXPR or a
9328 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9329 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9330 && (TREE_CODE (arg0) == ADDR_EXPR
9331 || TREE_CODE (arg1) == ADDR_EXPR
9332 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9333 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9335 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9336 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9337 enum machine_mode mode;
9338 int volatilep, unsignedp;
9339 bool indirect_base0 = false, indirect_base1 = false;
9341 /* Get base and offset for the access. Strip ADDR_EXPR for
9342 get_inner_reference, but put it back by stripping INDIRECT_REF
9343 off the base object if possible. indirect_baseN will be true
9344 if baseN is not an address but refers to the object itself. */
9346 if (TREE_CODE (arg0) == ADDR_EXPR)
9348 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9349 &bitsize, &bitpos0, &offset0, &mode,
9350 &unsignedp, &volatilep, false);
9351 if (TREE_CODE (base0) == INDIRECT_REF)
9352 base0 = TREE_OPERAND (base0, 0);
9354 indirect_base0 = true;
9356 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9358 base0 = TREE_OPERAND (arg0, 0);
9359 offset0 = TREE_OPERAND (arg0, 1);
9363 if (TREE_CODE (arg1) == ADDR_EXPR)
9365 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9366 &bitsize, &bitpos1, &offset1, &mode,
9367 &unsignedp, &volatilep, false);
9368 if (TREE_CODE (base1) == INDIRECT_REF)
9369 base1 = TREE_OPERAND (base1, 0);
9371 indirect_base1 = true;
9373 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9375 base1 = TREE_OPERAND (arg1, 0);
9376 offset1 = TREE_OPERAND (arg1, 1);
9379 /* If we have equivalent bases we might be able to simplify. */
9380 if (indirect_base0 == indirect_base1
9381 && operand_equal_p (base0, base1, 0))
9383 /* We can fold this expression to a constant if the non-constant
9384 offset parts are equal. */
9385 if ((offset0 == offset1
9386 || (offset0 && offset1
9387 && operand_equal_p (offset0, offset1, 0)))
9390 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9395 && bitpos0 != bitpos1
9396 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9397 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9398 fold_overflow_warning (("assuming pointer wraparound does not "
9399 "occur when comparing P +- C1 with "
9401 WARN_STRICT_OVERFLOW_CONDITIONAL);
9406 return constant_boolean_node (bitpos0 == bitpos1, type);
9408 return constant_boolean_node (bitpos0 != bitpos1, type);
9410 return constant_boolean_node (bitpos0 < bitpos1, type);
9412 return constant_boolean_node (bitpos0 <= bitpos1, type);
9414 return constant_boolean_node (bitpos0 >= bitpos1, type);
9416 return constant_boolean_node (bitpos0 > bitpos1, type);
9420 /* We can simplify the comparison to a comparison of the variable
9421 offset parts if the constant offset parts are equal.
9422 Be careful to use signed size type here because otherwise we
9423 mess with array offsets in the wrong way. This is possible
9424 because pointer arithmetic is restricted to retain within an
9425 object and overflow on pointer differences is undefined as of
9426 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9427 else if (bitpos0 == bitpos1
9428 && ((code == EQ_EXPR || code == NE_EXPR)
9429 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9431 tree signed_size_type_node;
9432 signed_size_type_node = signed_type_for (size_type_node);
9434 /* By converting to signed size type we cover middle-end pointer
9435 arithmetic which operates on unsigned pointer types of size
9436 type size and ARRAY_REF offsets which are properly sign or
9437 zero extended from their type in case it is narrower than
9439 if (offset0 == NULL_TREE)
9440 offset0 = build_int_cst (signed_size_type_node, 0);
9442 offset0 = fold_convert_loc (loc, signed_size_type_node,
9444 if (offset1 == NULL_TREE)
9445 offset1 = build_int_cst (signed_size_type_node, 0);
9447 offset1 = fold_convert_loc (loc, signed_size_type_node,
9452 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9453 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9454 fold_overflow_warning (("assuming pointer wraparound does not "
9455 "occur when comparing P +- C1 with "
9457 WARN_STRICT_OVERFLOW_COMPARISON);
9459 return fold_build2_loc (loc, code, type, offset0, offset1);
9462 /* For non-equal bases we can simplify if they are addresses
9463 of local binding decls or constants. */
9464 else if (indirect_base0 && indirect_base1
9465 /* We know that !operand_equal_p (base0, base1, 0)
9466 because the if condition was false. But make
9467 sure two decls are not the same. */
9469 && TREE_CODE (arg0) == ADDR_EXPR
9470 && TREE_CODE (arg1) == ADDR_EXPR
9471 && (((TREE_CODE (base0) == VAR_DECL
9472 || TREE_CODE (base0) == PARM_DECL)
9473 && (targetm.binds_local_p (base0)
9474 || CONSTANT_CLASS_P (base1)))
9475 || CONSTANT_CLASS_P (base0))
9476 && (((TREE_CODE (base1) == VAR_DECL
9477 || TREE_CODE (base1) == PARM_DECL)
9478 && (targetm.binds_local_p (base1)
9479 || CONSTANT_CLASS_P (base0)))
9480 || CONSTANT_CLASS_P (base1)))
9482 if (code == EQ_EXPR)
9483 return omit_two_operands_loc (loc, type, boolean_false_node,
9485 else if (code == NE_EXPR)
9486 return omit_two_operands_loc (loc, type, boolean_true_node,
9489 /* For equal offsets we can simplify to a comparison of the
9491 else if (bitpos0 == bitpos1
9493 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9495 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9496 && ((offset0 == offset1)
9497 || (offset0 && offset1
9498 && operand_equal_p (offset0, offset1, 0))))
9501 base0 = build_fold_addr_expr_loc (loc, base0);
9503 base1 = build_fold_addr_expr_loc (loc, base1);
9504 return fold_build2_loc (loc, code, type, base0, base1);
9508 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9509 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9510 the resulting offset is smaller in absolute value than the
9512 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9513 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9514 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9515 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9516 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9517 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9518 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9520 tree const1 = TREE_OPERAND (arg0, 1);
9521 tree const2 = TREE_OPERAND (arg1, 1);
9522 tree variable1 = TREE_OPERAND (arg0, 0);
9523 tree variable2 = TREE_OPERAND (arg1, 0);
9525 const char * const warnmsg = G_("assuming signed overflow does not "
9526 "occur when combining constants around "
9529 /* Put the constant on the side where it doesn't overflow and is
9530 of lower absolute value than before. */
9531 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9532 ? MINUS_EXPR : PLUS_EXPR,
9534 if (!TREE_OVERFLOW (cst)
9535 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9537 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9538 return fold_build2_loc (loc, code, type,
9540 fold_build2_loc (loc,
9541 TREE_CODE (arg1), TREE_TYPE (arg1),
9545 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9546 ? MINUS_EXPR : PLUS_EXPR,
9548 if (!TREE_OVERFLOW (cst)
9549 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9551 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9552 return fold_build2_loc (loc, code, type,
9553 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9559 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9560 signed arithmetic case. That form is created by the compiler
9561 often enough for folding it to be of value. One example is in
9562 computing loop trip counts after Operator Strength Reduction. */
9563 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9564 && TREE_CODE (arg0) == MULT_EXPR
9565 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9566 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9567 && integer_zerop (arg1))
9569 tree const1 = TREE_OPERAND (arg0, 1);
9570 tree const2 = arg1; /* zero */
9571 tree variable1 = TREE_OPERAND (arg0, 0);
9572 enum tree_code cmp_code = code;
9574 /* Handle unfolded multiplication by zero. */
9575 if (integer_zerop (const1))
9576 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9578 fold_overflow_warning (("assuming signed overflow does not occur when "
9579 "eliminating multiplication in comparison "
9581 WARN_STRICT_OVERFLOW_COMPARISON);
9583 /* If const1 is negative we swap the sense of the comparison. */
9584 if (tree_int_cst_sgn (const1) < 0)
9585 cmp_code = swap_tree_comparison (cmp_code);
9587 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9590 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9594 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9596 tree targ0 = strip_float_extensions (arg0);
9597 tree targ1 = strip_float_extensions (arg1);
9598 tree newtype = TREE_TYPE (targ0);
9600 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9601 newtype = TREE_TYPE (targ1);
9603 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9604 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9605 return fold_build2_loc (loc, code, type,
9606 fold_convert_loc (loc, newtype, targ0),
9607 fold_convert_loc (loc, newtype, targ1));
9609 /* (-a) CMP (-b) -> b CMP a */
9610 if (TREE_CODE (arg0) == NEGATE_EXPR
9611 && TREE_CODE (arg1) == NEGATE_EXPR)
9612 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9613 TREE_OPERAND (arg0, 0));
9615 if (TREE_CODE (arg1) == REAL_CST)
9617 REAL_VALUE_TYPE cst;
9618 cst = TREE_REAL_CST (arg1);
9620 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9621 if (TREE_CODE (arg0) == NEGATE_EXPR)
9622 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9623 TREE_OPERAND (arg0, 0),
9624 build_real (TREE_TYPE (arg1),
9625 REAL_VALUE_NEGATE (cst)));
9627 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9628 /* a CMP (-0) -> a CMP 0 */
9629 if (REAL_VALUE_MINUS_ZERO (cst))
9630 return fold_build2_loc (loc, code, type, arg0,
9631 build_real (TREE_TYPE (arg1), dconst0));
9633 /* x != NaN is always true, other ops are always false. */
9634 if (REAL_VALUE_ISNAN (cst)
9635 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9637 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9638 return omit_one_operand_loc (loc, type, tem, arg0);
9641 /* Fold comparisons against infinity. */
9642 if (REAL_VALUE_ISINF (cst)
9643 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9645 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9646 if (tem != NULL_TREE)
9651 /* If this is a comparison of a real constant with a PLUS_EXPR
9652 or a MINUS_EXPR of a real constant, we can convert it into a
9653 comparison with a revised real constant as long as no overflow
9654 occurs when unsafe_math_optimizations are enabled. */
9655 if (flag_unsafe_math_optimizations
9656 && TREE_CODE (arg1) == REAL_CST
9657 && (TREE_CODE (arg0) == PLUS_EXPR
9658 || TREE_CODE (arg0) == MINUS_EXPR)
9659 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9660 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9661 ? MINUS_EXPR : PLUS_EXPR,
9662 arg1, TREE_OPERAND (arg0, 1), 0))
9663 && !TREE_OVERFLOW (tem))
9664 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9666 /* Likewise, we can simplify a comparison of a real constant with
9667 a MINUS_EXPR whose first operand is also a real constant, i.e.
9668 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9669 floating-point types only if -fassociative-math is set. */
9670 if (flag_associative_math
9671 && TREE_CODE (arg1) == REAL_CST
9672 && TREE_CODE (arg0) == MINUS_EXPR
9673 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9674 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9676 && !TREE_OVERFLOW (tem))
9677 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9678 TREE_OPERAND (arg0, 1), tem);
9680 /* Fold comparisons against built-in math functions. */
9681 if (TREE_CODE (arg1) == REAL_CST
9682 && flag_unsafe_math_optimizations
9683 && ! flag_errno_math)
9685 enum built_in_function fcode = builtin_mathfn_code (arg0);
9687 if (fcode != END_BUILTINS)
9689 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9690 if (tem != NULL_TREE)
9696 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9697 && CONVERT_EXPR_P (arg0))
9699 /* If we are widening one operand of an integer comparison,
9700 see if the other operand is similarly being widened. Perhaps we
9701 can do the comparison in the narrower type. */
9702 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9706 /* Or if we are changing signedness. */
9707 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9712 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9713 constant, we can simplify it. */
9714 if (TREE_CODE (arg1) == INTEGER_CST
9715 && (TREE_CODE (arg0) == MIN_EXPR
9716 || TREE_CODE (arg0) == MAX_EXPR)
9717 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9719 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9724 /* Simplify comparison of something with itself. (For IEEE
9725 floating-point, we can only do some of these simplifications.) */
9726 if (operand_equal_p (arg0, arg1, 0))
9731 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9732 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9733 return constant_boolean_node (1, type);
9738 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9739 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9740 return constant_boolean_node (1, type);
9741 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9744 /* For NE, we can only do this simplification if integer
9745 or we don't honor IEEE floating point NaNs. */
9746 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9747 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9749 /* ... fall through ... */
9752 return constant_boolean_node (0, type);
9758 /* If we are comparing an expression that just has comparisons
9759 of two integer values, arithmetic expressions of those comparisons,
9760 and constants, we can simplify it. There are only three cases
9761 to check: the two values can either be equal, the first can be
9762 greater, or the second can be greater. Fold the expression for
9763 those three values. Since each value must be 0 or 1, we have
9764 eight possibilities, each of which corresponds to the constant 0
9765 or 1 or one of the six possible comparisons.
9767 This handles common cases like (a > b) == 0 but also handles
9768 expressions like ((x > y) - (y > x)) > 0, which supposedly
9769 occur in macroized code. */
9771 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9773 tree cval1 = 0, cval2 = 0;
9776 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9777 /* Don't handle degenerate cases here; they should already
9778 have been handled anyway. */
9779 && cval1 != 0 && cval2 != 0
9780 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9781 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9782 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9783 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9784 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9785 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9786 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9788 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9789 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9791 /* We can't just pass T to eval_subst in case cval1 or cval2
9792 was the same as ARG1. */
9795 = fold_build2_loc (loc, code, type,
9796 eval_subst (loc, arg0, cval1, maxval,
9800 = fold_build2_loc (loc, code, type,
9801 eval_subst (loc, arg0, cval1, maxval,
9805 = fold_build2_loc (loc, code, type,
9806 eval_subst (loc, arg0, cval1, minval,
9810 /* All three of these results should be 0 or 1. Confirm they are.
9811 Then use those values to select the proper code to use. */
9813 if (TREE_CODE (high_result) == INTEGER_CST
9814 && TREE_CODE (equal_result) == INTEGER_CST
9815 && TREE_CODE (low_result) == INTEGER_CST)
9817 /* Make a 3-bit mask with the high-order bit being the
9818 value for `>', the next for '=', and the low for '<'. */
9819 switch ((integer_onep (high_result) * 4)
9820 + (integer_onep (equal_result) * 2)
9821 + integer_onep (low_result))
9825 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9846 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9851 tem = save_expr (build2 (code, type, cval1, cval2));
9852 SET_EXPR_LOCATION (tem, loc);
9855 return fold_build2_loc (loc, code, type, cval1, cval2);
9860 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9861 into a single range test. */
9862 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9863 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9864 && TREE_CODE (arg1) == INTEGER_CST
9865 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9866 && !integer_zerop (TREE_OPERAND (arg0, 1))
9867 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9868 && !TREE_OVERFLOW (arg1))
9870 tem = fold_div_compare (loc, code, type, arg0, arg1);
9871 if (tem != NULL_TREE)
9875 /* Fold ~X op ~Y as Y op X. */
9876 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9877 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9879 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9880 return fold_build2_loc (loc, code, type,
9881 fold_convert_loc (loc, cmp_type,
9882 TREE_OPERAND (arg1, 0)),
9883 TREE_OPERAND (arg0, 0));
9886 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9887 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9888 && TREE_CODE (arg1) == INTEGER_CST)
9890 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9891 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9892 TREE_OPERAND (arg0, 0),
9893 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9894 fold_convert_loc (loc, cmp_type, arg1)));
9901 /* Subroutine of fold_binary. Optimize complex multiplications of the
9902 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9903 argument EXPR represents the expression "z" of type TYPE. */
9906 fold_mult_zconjz (location_t loc, tree type, tree expr)
9908 tree itype = TREE_TYPE (type);
9909 tree rpart, ipart, tem;
9911 if (TREE_CODE (expr) == COMPLEX_EXPR)
9913 rpart = TREE_OPERAND (expr, 0);
9914 ipart = TREE_OPERAND (expr, 1);
9916 else if (TREE_CODE (expr) == COMPLEX_CST)
9918 rpart = TREE_REALPART (expr);
9919 ipart = TREE_IMAGPART (expr);
9923 expr = save_expr (expr);
9924 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9925 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9928 rpart = save_expr (rpart);
9929 ipart = save_expr (ipart);
9930 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9931 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9932 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9933 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9934 fold_convert_loc (loc, itype, integer_zero_node));
9938 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9939 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9940 guarantees that P and N have the same least significant log2(M) bits.
9941 N is not otherwise constrained. In particular, N is not normalized to
9942 0 <= N < M as is common. In general, the precise value of P is unknown.
9943 M is chosen as large as possible such that constant N can be determined.
9945 Returns M and sets *RESIDUE to N.
9947 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9948 account. This is not always possible due to PR 35705.
9951 static unsigned HOST_WIDE_INT
9952 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9953 bool allow_func_align)
9955 enum tree_code code;
9959 code = TREE_CODE (expr);
9960 if (code == ADDR_EXPR)
9962 expr = TREE_OPERAND (expr, 0);
9963 if (handled_component_p (expr))
9965 HOST_WIDE_INT bitsize, bitpos;
9967 enum machine_mode mode;
9968 int unsignedp, volatilep;
9970 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9971 &mode, &unsignedp, &volatilep, false);
9972 *residue = bitpos / BITS_PER_UNIT;
9975 if (TREE_CODE (offset) == INTEGER_CST)
9976 *residue += TREE_INT_CST_LOW (offset);
9978 /* We don't handle more complicated offset expressions. */
9984 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9985 return DECL_ALIGN_UNIT (expr);
9987 else if (code == POINTER_PLUS_EXPR)
9990 unsigned HOST_WIDE_INT modulus;
9991 enum tree_code inner_code;
9993 op0 = TREE_OPERAND (expr, 0);
9995 modulus = get_pointer_modulus_and_residue (op0, residue,
9998 op1 = TREE_OPERAND (expr, 1);
10000 inner_code = TREE_CODE (op1);
10001 if (inner_code == INTEGER_CST)
10003 *residue += TREE_INT_CST_LOW (op1);
10006 else if (inner_code == MULT_EXPR)
10008 op1 = TREE_OPERAND (op1, 1);
10009 if (TREE_CODE (op1) == INTEGER_CST)
10011 unsigned HOST_WIDE_INT align;
10013 /* Compute the greatest power-of-2 divisor of op1. */
10014 align = TREE_INT_CST_LOW (op1);
10017 /* If align is non-zero and less than *modulus, replace
10018 *modulus with align., If align is 0, then either op1 is 0
10019 or the greatest power-of-2 divisor of op1 doesn't fit in an
10020 unsigned HOST_WIDE_INT. In either case, no additional
10021 constraint is imposed. */
10023 modulus = MIN (modulus, align);
10030 /* If we get here, we were unable to determine anything useful about the
10036 /* Fold a binary expression of code CODE and type TYPE with operands
10037 OP0 and OP1. LOC is the location of the resulting expression.
10038 Return the folded expression if folding is successful. Otherwise,
10039 return NULL_TREE. */
10042 fold_binary_loc (location_t loc,
10043 enum tree_code code, tree type, tree op0, tree op1)
10045 enum tree_code_class kind = TREE_CODE_CLASS (code);
10046 tree arg0, arg1, tem;
10047 tree t1 = NULL_TREE;
10048 bool strict_overflow_p;
10050 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10051 && TREE_CODE_LENGTH (code) == 2
10052 && op0 != NULL_TREE
10053 && op1 != NULL_TREE);
10058 /* Strip any conversions that don't change the mode. This is
10059 safe for every expression, except for a comparison expression
10060 because its signedness is derived from its operands. So, in
10061 the latter case, only strip conversions that don't change the
10062 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10065 Note that this is done as an internal manipulation within the
10066 constant folder, in order to find the simplest representation
10067 of the arguments so that their form can be studied. In any
10068 cases, the appropriate type conversions should be put back in
10069 the tree that will get out of the constant folder. */
10071 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10073 STRIP_SIGN_NOPS (arg0);
10074 STRIP_SIGN_NOPS (arg1);
10082 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10083 constant but we can't do arithmetic on them. */
10084 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10085 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10086 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10087 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10088 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10089 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10091 if (kind == tcc_binary)
10093 /* Make sure type and arg0 have the same saturating flag. */
10094 gcc_assert (TYPE_SATURATING (type)
10095 == TYPE_SATURATING (TREE_TYPE (arg0)));
10096 tem = const_binop (code, arg0, arg1, 0);
10098 else if (kind == tcc_comparison)
10099 tem = fold_relational_const (code, type, arg0, arg1);
10103 if (tem != NULL_TREE)
10105 if (TREE_TYPE (tem) != type)
10106 tem = fold_convert_loc (loc, type, tem);
10111 /* If this is a commutative operation, and ARG0 is a constant, move it
10112 to ARG1 to reduce the number of tests below. */
10113 if (commutative_tree_code (code)
10114 && tree_swap_operands_p (arg0, arg1, true))
10115 return fold_build2_loc (loc, code, type, op1, op0);
10117 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10119 First check for cases where an arithmetic operation is applied to a
10120 compound, conditional, or comparison operation. Push the arithmetic
10121 operation inside the compound or conditional to see if any folding
10122 can then be done. Convert comparison to conditional for this purpose.
10123 The also optimizes non-constant cases that used to be done in
10126 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10127 one of the operands is a comparison and the other is a comparison, a
10128 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10129 code below would make the expression more complex. Change it to a
10130 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10131 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10133 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10134 || code == EQ_EXPR || code == NE_EXPR)
10135 && ((truth_value_p (TREE_CODE (arg0))
10136 && (truth_value_p (TREE_CODE (arg1))
10137 || (TREE_CODE (arg1) == BIT_AND_EXPR
10138 && integer_onep (TREE_OPERAND (arg1, 1)))))
10139 || (truth_value_p (TREE_CODE (arg1))
10140 && (truth_value_p (TREE_CODE (arg0))
10141 || (TREE_CODE (arg0) == BIT_AND_EXPR
10142 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10144 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10145 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10148 fold_convert_loc (loc, boolean_type_node, arg0),
10149 fold_convert_loc (loc, boolean_type_node, arg1));
10151 if (code == EQ_EXPR)
10152 tem = invert_truthvalue_loc (loc, tem);
10154 return fold_convert_loc (loc, type, tem);
10157 if (TREE_CODE_CLASS (code) == tcc_binary
10158 || TREE_CODE_CLASS (code) == tcc_comparison)
10160 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10162 tem = fold_build2_loc (loc, code, type,
10163 fold_convert_loc (loc, TREE_TYPE (op0),
10164 TREE_OPERAND (arg0, 1)), op1);
10165 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10166 goto fold_binary_exit;
10168 if (TREE_CODE (arg1) == COMPOUND_EXPR
10169 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10171 tem = fold_build2_loc (loc, code, type, op0,
10172 fold_convert_loc (loc, TREE_TYPE (op1),
10173 TREE_OPERAND (arg1, 1)));
10174 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10175 goto fold_binary_exit;
10178 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10180 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10182 /*cond_first_p=*/1);
10183 if (tem != NULL_TREE)
10187 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10189 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10191 /*cond_first_p=*/0);
10192 if (tem != NULL_TREE)
10199 case POINTER_PLUS_EXPR:
10200 /* 0 +p index -> (type)index */
10201 if (integer_zerop (arg0))
10202 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10204 /* PTR +p 0 -> PTR */
10205 if (integer_zerop (arg1))
10206 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10208 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10209 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10210 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10211 return fold_convert_loc (loc, type,
10212 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10213 fold_convert_loc (loc, sizetype,
10215 fold_convert_loc (loc, sizetype,
10218 /* index +p PTR -> PTR +p index */
10219 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10220 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10221 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10222 fold_convert_loc (loc, type, arg1),
10223 fold_convert_loc (loc, sizetype, arg0));
10225 /* (PTR +p B) +p A -> PTR +p (B + A) */
10226 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10229 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10230 tree arg00 = TREE_OPERAND (arg0, 0);
10231 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10232 arg01, fold_convert_loc (loc, sizetype, arg1));
10233 return fold_convert_loc (loc, type,
10234 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10239 /* PTR_CST +p CST -> CST1 */
10240 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10241 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10242 fold_convert_loc (loc, type, arg1));
10244 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10245 of the array. Loop optimizer sometimes produce this type of
10247 if (TREE_CODE (arg0) == ADDR_EXPR)
10249 tem = try_move_mult_to_index (loc, arg0,
10250 fold_convert_loc (loc, sizetype, arg1));
10252 return fold_convert_loc (loc, type, tem);
10258 /* A + (-B) -> A - B */
10259 if (TREE_CODE (arg1) == NEGATE_EXPR)
10260 return fold_build2_loc (loc, MINUS_EXPR, type,
10261 fold_convert_loc (loc, type, arg0),
10262 fold_convert_loc (loc, type,
10263 TREE_OPERAND (arg1, 0)));
10264 /* (-A) + B -> B - A */
10265 if (TREE_CODE (arg0) == NEGATE_EXPR
10266 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10267 return fold_build2_loc (loc, MINUS_EXPR, type,
10268 fold_convert_loc (loc, type, arg1),
10269 fold_convert_loc (loc, type,
10270 TREE_OPERAND (arg0, 0)));
10272 if (INTEGRAL_TYPE_P (type))
10274 /* Convert ~A + 1 to -A. */
10275 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10276 && integer_onep (arg1))
10277 return fold_build1_loc (loc, NEGATE_EXPR, type,
10278 fold_convert_loc (loc, type,
10279 TREE_OPERAND (arg0, 0)));
10281 /* ~X + X is -1. */
10282 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10283 && !TYPE_OVERFLOW_TRAPS (type))
10285 tree tem = TREE_OPERAND (arg0, 0);
10288 if (operand_equal_p (tem, arg1, 0))
10290 t1 = build_int_cst_type (type, -1);
10291 return omit_one_operand_loc (loc, type, t1, arg1);
10295 /* X + ~X is -1. */
10296 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10297 && !TYPE_OVERFLOW_TRAPS (type))
10299 tree tem = TREE_OPERAND (arg1, 0);
10302 if (operand_equal_p (arg0, tem, 0))
10304 t1 = build_int_cst_type (type, -1);
10305 return omit_one_operand_loc (loc, type, t1, arg0);
10309 /* X + (X / CST) * -CST is X % CST. */
10310 if (TREE_CODE (arg1) == MULT_EXPR
10311 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10312 && operand_equal_p (arg0,
10313 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10315 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10316 tree cst1 = TREE_OPERAND (arg1, 1);
10317 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10319 if (sum && integer_zerop (sum))
10320 return fold_convert_loc (loc, type,
10321 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10322 TREE_TYPE (arg0), arg0,
10327 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10328 same or one. Make sure type is not saturating.
10329 fold_plusminus_mult_expr will re-associate. */
10330 if ((TREE_CODE (arg0) == MULT_EXPR
10331 || TREE_CODE (arg1) == MULT_EXPR)
10332 && !TYPE_SATURATING (type)
10333 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10335 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10340 if (! FLOAT_TYPE_P (type))
10342 if (integer_zerop (arg1))
10343 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10345 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10346 with a constant, and the two constants have no bits in common,
10347 we should treat this as a BIT_IOR_EXPR since this may produce more
10348 simplifications. */
10349 if (TREE_CODE (arg0) == BIT_AND_EXPR
10350 && TREE_CODE (arg1) == BIT_AND_EXPR
10351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10353 && integer_zerop (const_binop (BIT_AND_EXPR,
10354 TREE_OPERAND (arg0, 1),
10355 TREE_OPERAND (arg1, 1), 0)))
10357 code = BIT_IOR_EXPR;
10361 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10362 (plus (plus (mult) (mult)) (foo)) so that we can
10363 take advantage of the factoring cases below. */
10364 if (((TREE_CODE (arg0) == PLUS_EXPR
10365 || TREE_CODE (arg0) == MINUS_EXPR)
10366 && TREE_CODE (arg1) == MULT_EXPR)
10367 || ((TREE_CODE (arg1) == PLUS_EXPR
10368 || TREE_CODE (arg1) == MINUS_EXPR)
10369 && TREE_CODE (arg0) == MULT_EXPR))
10371 tree parg0, parg1, parg, marg;
10372 enum tree_code pcode;
10374 if (TREE_CODE (arg1) == MULT_EXPR)
10375 parg = arg0, marg = arg1;
10377 parg = arg1, marg = arg0;
10378 pcode = TREE_CODE (parg);
10379 parg0 = TREE_OPERAND (parg, 0);
10380 parg1 = TREE_OPERAND (parg, 1);
10381 STRIP_NOPS (parg0);
10382 STRIP_NOPS (parg1);
10384 if (TREE_CODE (parg0) == MULT_EXPR
10385 && TREE_CODE (parg1) != MULT_EXPR)
10386 return fold_build2_loc (loc, pcode, type,
10387 fold_build2_loc (loc, PLUS_EXPR, type,
10388 fold_convert_loc (loc, type,
10390 fold_convert_loc (loc, type,
10392 fold_convert_loc (loc, type, parg1));
10393 if (TREE_CODE (parg0) != MULT_EXPR
10394 && TREE_CODE (parg1) == MULT_EXPR)
10396 fold_build2_loc (loc, PLUS_EXPR, type,
10397 fold_convert_loc (loc, type, parg0),
10398 fold_build2_loc (loc, pcode, type,
10399 fold_convert_loc (loc, type, marg),
10400 fold_convert_loc (loc, type,
10406 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10407 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10408 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10410 /* Likewise if the operands are reversed. */
10411 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10414 /* Convert X + -C into X - C. */
10415 if (TREE_CODE (arg1) == REAL_CST
10416 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10418 tem = fold_negate_const (arg1, type);
10419 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10420 return fold_build2_loc (loc, MINUS_EXPR, type,
10421 fold_convert_loc (loc, type, arg0),
10422 fold_convert_loc (loc, type, tem));
10425 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10426 to __complex__ ( x, y ). This is not the same for SNaNs or
10427 if signed zeros are involved. */
10428 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10429 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10430 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10432 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10433 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10434 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10435 bool arg0rz = false, arg0iz = false;
10436 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10437 || (arg0i && (arg0iz = real_zerop (arg0i))))
10439 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10440 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10441 if (arg0rz && arg1i && real_zerop (arg1i))
10443 tree rp = arg1r ? arg1r
10444 : build1 (REALPART_EXPR, rtype, arg1);
10445 tree ip = arg0i ? arg0i
10446 : build1 (IMAGPART_EXPR, rtype, arg0);
10447 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10449 else if (arg0iz && arg1r && real_zerop (arg1r))
10451 tree rp = arg0r ? arg0r
10452 : build1 (REALPART_EXPR, rtype, arg0);
10453 tree ip = arg1i ? arg1i
10454 : build1 (IMAGPART_EXPR, rtype, arg1);
10455 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10460 if (flag_unsafe_math_optimizations
10461 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10462 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10463 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10466 /* Convert x+x into x*2.0. */
10467 if (operand_equal_p (arg0, arg1, 0)
10468 && SCALAR_FLOAT_TYPE_P (type))
10469 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10470 build_real (type, dconst2));
10472 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10473 We associate floats only if the user has specified
10474 -fassociative-math. */
10475 if (flag_associative_math
10476 && TREE_CODE (arg1) == PLUS_EXPR
10477 && TREE_CODE (arg0) != MULT_EXPR)
10479 tree tree10 = TREE_OPERAND (arg1, 0);
10480 tree tree11 = TREE_OPERAND (arg1, 1);
10481 if (TREE_CODE (tree11) == MULT_EXPR
10482 && TREE_CODE (tree10) == MULT_EXPR)
10485 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10486 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10489 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10490 We associate floats only if the user has specified
10491 -fassociative-math. */
10492 if (flag_associative_math
10493 && TREE_CODE (arg0) == PLUS_EXPR
10494 && TREE_CODE (arg1) != MULT_EXPR)
10496 tree tree00 = TREE_OPERAND (arg0, 0);
10497 tree tree01 = TREE_OPERAND (arg0, 1);
10498 if (TREE_CODE (tree01) == MULT_EXPR
10499 && TREE_CODE (tree00) == MULT_EXPR)
10502 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10503 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10509 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10510 is a rotate of A by C1 bits. */
10511 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10512 is a rotate of A by B bits. */
10514 enum tree_code code0, code1;
10516 code0 = TREE_CODE (arg0);
10517 code1 = TREE_CODE (arg1);
10518 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10519 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10520 && operand_equal_p (TREE_OPERAND (arg0, 0),
10521 TREE_OPERAND (arg1, 0), 0)
10522 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10523 TYPE_UNSIGNED (rtype))
10524 /* Only create rotates in complete modes. Other cases are not
10525 expanded properly. */
10526 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10528 tree tree01, tree11;
10529 enum tree_code code01, code11;
10531 tree01 = TREE_OPERAND (arg0, 1);
10532 tree11 = TREE_OPERAND (arg1, 1);
10533 STRIP_NOPS (tree01);
10534 STRIP_NOPS (tree11);
10535 code01 = TREE_CODE (tree01);
10536 code11 = TREE_CODE (tree11);
10537 if (code01 == INTEGER_CST
10538 && code11 == INTEGER_CST
10539 && TREE_INT_CST_HIGH (tree01) == 0
10540 && TREE_INT_CST_HIGH (tree11) == 0
10541 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10542 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10544 tem = build2 (LROTATE_EXPR,
10545 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10546 TREE_OPERAND (arg0, 0),
10547 code0 == LSHIFT_EXPR
10548 ? tree01 : tree11);
10549 SET_EXPR_LOCATION (tem, loc);
10550 return fold_convert_loc (loc, type, tem);
10552 else if (code11 == MINUS_EXPR)
10554 tree tree110, tree111;
10555 tree110 = TREE_OPERAND (tree11, 0);
10556 tree111 = TREE_OPERAND (tree11, 1);
10557 STRIP_NOPS (tree110);
10558 STRIP_NOPS (tree111);
10559 if (TREE_CODE (tree110) == INTEGER_CST
10560 && 0 == compare_tree_int (tree110,
10562 (TREE_TYPE (TREE_OPERAND
10564 && operand_equal_p (tree01, tree111, 0))
10566 fold_convert_loc (loc, type,
10567 build2 ((code0 == LSHIFT_EXPR
10570 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10571 TREE_OPERAND (arg0, 0), tree01));
10573 else if (code01 == MINUS_EXPR)
10575 tree tree010, tree011;
10576 tree010 = TREE_OPERAND (tree01, 0);
10577 tree011 = TREE_OPERAND (tree01, 1);
10578 STRIP_NOPS (tree010);
10579 STRIP_NOPS (tree011);
10580 if (TREE_CODE (tree010) == INTEGER_CST
10581 && 0 == compare_tree_int (tree010,
10583 (TREE_TYPE (TREE_OPERAND
10585 && operand_equal_p (tree11, tree011, 0))
10586 return fold_convert_loc
10588 build2 ((code0 != LSHIFT_EXPR
10591 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10592 TREE_OPERAND (arg0, 0), tree11));
10598 /* In most languages, can't associate operations on floats through
10599 parentheses. Rather than remember where the parentheses were, we
10600 don't associate floats at all, unless the user has specified
10601 -fassociative-math.
10602 And, we need to make sure type is not saturating. */
10604 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10605 && !TYPE_SATURATING (type))
10607 tree var0, con0, lit0, minus_lit0;
10608 tree var1, con1, lit1, minus_lit1;
10611 /* Split both trees into variables, constants, and literals. Then
10612 associate each group together, the constants with literals,
10613 then the result with variables. This increases the chances of
10614 literals being recombined later and of generating relocatable
10615 expressions for the sum of a constant and literal. */
10616 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10617 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10618 code == MINUS_EXPR);
10620 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10621 if (code == MINUS_EXPR)
10624 /* With undefined overflow we can only associate constants with one
10625 variable, and constants whose association doesn't overflow. */
10626 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10627 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10634 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10635 tmp0 = TREE_OPERAND (tmp0, 0);
10636 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10637 tmp1 = TREE_OPERAND (tmp1, 0);
10638 /* The only case we can still associate with two variables
10639 is if they are the same, modulo negation. */
10640 if (!operand_equal_p (tmp0, tmp1, 0))
10644 if (ok && lit0 && lit1)
10646 tree tmp0 = fold_convert (type, lit0);
10647 tree tmp1 = fold_convert (type, lit1);
10649 if (!TREE_OVERFLOW (tmp0) && !TREE_OVERFLOW (tmp1)
10650 && TREE_OVERFLOW (fold_build2 (code, type, tmp0, tmp1)))
10655 /* Only do something if we found more than two objects. Otherwise,
10656 nothing has changed and we risk infinite recursion. */
10658 && (2 < ((var0 != 0) + (var1 != 0)
10659 + (con0 != 0) + (con1 != 0)
10660 + (lit0 != 0) + (lit1 != 0)
10661 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10663 var0 = associate_trees (loc, var0, var1, code, type);
10664 con0 = associate_trees (loc, con0, con1, code, type);
10665 lit0 = associate_trees (loc, lit0, lit1, code, type);
10666 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10668 /* Preserve the MINUS_EXPR if the negative part of the literal is
10669 greater than the positive part. Otherwise, the multiplicative
10670 folding code (i.e extract_muldiv) may be fooled in case
10671 unsigned constants are subtracted, like in the following
10672 example: ((X*2 + 4) - 8U)/2. */
10673 if (minus_lit0 && lit0)
10675 if (TREE_CODE (lit0) == INTEGER_CST
10676 && TREE_CODE (minus_lit0) == INTEGER_CST
10677 && tree_int_cst_lt (lit0, minus_lit0))
10679 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10685 lit0 = associate_trees (loc, lit0, minus_lit0,
10694 fold_convert_loc (loc, type,
10695 associate_trees (loc, var0, minus_lit0,
10696 MINUS_EXPR, type));
10699 con0 = associate_trees (loc, con0, minus_lit0,
10702 fold_convert_loc (loc, type,
10703 associate_trees (loc, var0, con0,
10708 con0 = associate_trees (loc, con0, lit0, code, type);
10710 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10718 /* Pointer simplifications for subtraction, simple reassociations. */
10719 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10721 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10722 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10723 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10725 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10726 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10727 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10728 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10729 return fold_build2_loc (loc, PLUS_EXPR, type,
10730 fold_build2_loc (loc, MINUS_EXPR, type,
10732 fold_build2_loc (loc, MINUS_EXPR, type,
10735 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10736 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10738 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10739 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10740 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10741 fold_convert_loc (loc, type, arg1));
10743 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10746 /* A - (-B) -> A + B */
10747 if (TREE_CODE (arg1) == NEGATE_EXPR)
10748 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10749 fold_convert_loc (loc, type,
10750 TREE_OPERAND (arg1, 0)));
10751 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10752 if (TREE_CODE (arg0) == NEGATE_EXPR
10753 && (FLOAT_TYPE_P (type)
10754 || INTEGRAL_TYPE_P (type))
10755 && negate_expr_p (arg1)
10756 && reorder_operands_p (arg0, arg1))
10757 return fold_build2_loc (loc, MINUS_EXPR, type,
10758 fold_convert_loc (loc, type,
10759 negate_expr (arg1)),
10760 fold_convert_loc (loc, type,
10761 TREE_OPERAND (arg0, 0)));
10762 /* Convert -A - 1 to ~A. */
10763 if (INTEGRAL_TYPE_P (type)
10764 && TREE_CODE (arg0) == NEGATE_EXPR
10765 && integer_onep (arg1)
10766 && !TYPE_OVERFLOW_TRAPS (type))
10767 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10768 fold_convert_loc (loc, type,
10769 TREE_OPERAND (arg0, 0)));
10771 /* Convert -1 - A to ~A. */
10772 if (INTEGRAL_TYPE_P (type)
10773 && integer_all_onesp (arg0))
10774 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10777 /* X - (X / CST) * CST is X % CST. */
10778 if (INTEGRAL_TYPE_P (type)
10779 && TREE_CODE (arg1) == MULT_EXPR
10780 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10781 && operand_equal_p (arg0,
10782 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10783 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10784 TREE_OPERAND (arg1, 1), 0))
10786 fold_convert_loc (loc, type,
10787 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10788 arg0, TREE_OPERAND (arg1, 1)));
10790 if (! FLOAT_TYPE_P (type))
10792 if (integer_zerop (arg0))
10793 return negate_expr (fold_convert_loc (loc, type, arg1));
10794 if (integer_zerop (arg1))
10795 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10797 /* Fold A - (A & B) into ~B & A. */
10798 if (!TREE_SIDE_EFFECTS (arg0)
10799 && TREE_CODE (arg1) == BIT_AND_EXPR)
10801 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10803 tree arg10 = fold_convert_loc (loc, type,
10804 TREE_OPERAND (arg1, 0));
10805 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10806 fold_build1_loc (loc, BIT_NOT_EXPR,
10808 fold_convert_loc (loc, type, arg0));
10810 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10812 tree arg11 = fold_convert_loc (loc,
10813 type, TREE_OPERAND (arg1, 1));
10814 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10815 fold_build1_loc (loc, BIT_NOT_EXPR,
10817 fold_convert_loc (loc, type, arg0));
10821 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10822 any power of 2 minus 1. */
10823 if (TREE_CODE (arg0) == BIT_AND_EXPR
10824 && TREE_CODE (arg1) == BIT_AND_EXPR
10825 && operand_equal_p (TREE_OPERAND (arg0, 0),
10826 TREE_OPERAND (arg1, 0), 0))
10828 tree mask0 = TREE_OPERAND (arg0, 1);
10829 tree mask1 = TREE_OPERAND (arg1, 1);
10830 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10832 if (operand_equal_p (tem, mask1, 0))
10834 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10835 TREE_OPERAND (arg0, 0), mask1);
10836 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10841 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10842 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10843 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10845 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10846 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10847 (-ARG1 + ARG0) reduces to -ARG1. */
10848 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10849 return negate_expr (fold_convert_loc (loc, type, arg1));
10851 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10852 __complex__ ( x, -y ). This is not the same for SNaNs or if
10853 signed zeros are involved. */
10854 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10855 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10856 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10858 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10859 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10860 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10861 bool arg0rz = false, arg0iz = false;
10862 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10863 || (arg0i && (arg0iz = real_zerop (arg0i))))
10865 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10866 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10867 if (arg0rz && arg1i && real_zerop (arg1i))
10869 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10871 : build1 (REALPART_EXPR, rtype, arg1));
10872 tree ip = arg0i ? arg0i
10873 : build1 (IMAGPART_EXPR, rtype, arg0);
10874 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10876 else if (arg0iz && arg1r && real_zerop (arg1r))
10878 tree rp = arg0r ? arg0r
10879 : build1 (REALPART_EXPR, rtype, arg0);
10880 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10882 : build1 (IMAGPART_EXPR, rtype, arg1));
10883 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10888 /* Fold &x - &x. This can happen from &x.foo - &x.
10889 This is unsafe for certain floats even in non-IEEE formats.
10890 In IEEE, it is unsafe because it does wrong for NaNs.
10891 Also note that operand_equal_p is always false if an operand
10894 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10895 && operand_equal_p (arg0, arg1, 0))
10896 return fold_convert_loc (loc, type, integer_zero_node);
10898 /* A - B -> A + (-B) if B is easily negatable. */
10899 if (negate_expr_p (arg1)
10900 && ((FLOAT_TYPE_P (type)
10901 /* Avoid this transformation if B is a positive REAL_CST. */
10902 && (TREE_CODE (arg1) != REAL_CST
10903 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10904 || INTEGRAL_TYPE_P (type)))
10905 return fold_build2_loc (loc, PLUS_EXPR, type,
10906 fold_convert_loc (loc, type, arg0),
10907 fold_convert_loc (loc, type,
10908 negate_expr (arg1)));
10910 /* Try folding difference of addresses. */
10912 HOST_WIDE_INT diff;
10914 if ((TREE_CODE (arg0) == ADDR_EXPR
10915 || TREE_CODE (arg1) == ADDR_EXPR)
10916 && ptr_difference_const (arg0, arg1, &diff))
10917 return build_int_cst_type (type, diff);
10920 /* Fold &a[i] - &a[j] to i-j. */
10921 if (TREE_CODE (arg0) == ADDR_EXPR
10922 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10923 && TREE_CODE (arg1) == ADDR_EXPR
10924 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10926 tree aref0 = TREE_OPERAND (arg0, 0);
10927 tree aref1 = TREE_OPERAND (arg1, 0);
10928 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10929 TREE_OPERAND (aref1, 0), 0))
10931 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10932 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10933 tree esz = array_ref_element_size (aref0);
10934 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10935 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10936 fold_convert_loc (loc, type, esz));
10941 if (FLOAT_TYPE_P (type)
10942 && flag_unsafe_math_optimizations
10943 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10944 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10945 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10948 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10949 same or one. Make sure type is not saturating.
10950 fold_plusminus_mult_expr will re-associate. */
10951 if ((TREE_CODE (arg0) == MULT_EXPR
10952 || TREE_CODE (arg1) == MULT_EXPR)
10953 && !TYPE_SATURATING (type)
10954 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10956 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10964 /* (-A) * (-B) -> A * B */
10965 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10966 return fold_build2_loc (loc, MULT_EXPR, type,
10967 fold_convert_loc (loc, type,
10968 TREE_OPERAND (arg0, 0)),
10969 fold_convert_loc (loc, type,
10970 negate_expr (arg1)));
10971 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10972 return fold_build2_loc (loc, MULT_EXPR, type,
10973 fold_convert_loc (loc, type,
10974 negate_expr (arg0)),
10975 fold_convert_loc (loc, type,
10976 TREE_OPERAND (arg1, 0)));
10978 if (! FLOAT_TYPE_P (type))
10980 if (integer_zerop (arg1))
10981 return omit_one_operand_loc (loc, type, arg1, arg0);
10982 if (integer_onep (arg1))
10983 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10984 /* Transform x * -1 into -x. Make sure to do the negation
10985 on the original operand with conversions not stripped
10986 because we can only strip non-sign-changing conversions. */
10987 if (integer_all_onesp (arg1))
10988 return fold_convert_loc (loc, type, negate_expr (op0));
10989 /* Transform x * -C into -x * C if x is easily negatable. */
10990 if (TREE_CODE (arg1) == INTEGER_CST
10991 && tree_int_cst_sgn (arg1) == -1
10992 && negate_expr_p (arg0)
10993 && (tem = negate_expr (arg1)) != arg1
10994 && !TREE_OVERFLOW (tem))
10995 return fold_build2_loc (loc, MULT_EXPR, type,
10996 fold_convert_loc (loc, type,
10997 negate_expr (arg0)),
11000 /* (a * (1 << b)) is (a << b) */
11001 if (TREE_CODE (arg1) == LSHIFT_EXPR
11002 && integer_onep (TREE_OPERAND (arg1, 0)))
11003 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11004 TREE_OPERAND (arg1, 1));
11005 if (TREE_CODE (arg0) == LSHIFT_EXPR
11006 && integer_onep (TREE_OPERAND (arg0, 0)))
11007 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11008 TREE_OPERAND (arg0, 1));
11010 /* (A + A) * C -> A * 2 * C */
11011 if (TREE_CODE (arg0) == PLUS_EXPR
11012 && TREE_CODE (arg1) == INTEGER_CST
11013 && operand_equal_p (TREE_OPERAND (arg0, 0),
11014 TREE_OPERAND (arg0, 1), 0))
11015 return fold_build2_loc (loc, MULT_EXPR, type,
11016 omit_one_operand_loc (loc, type,
11017 TREE_OPERAND (arg0, 0),
11018 TREE_OPERAND (arg0, 1)),
11019 fold_build2_loc (loc, MULT_EXPR, type,
11020 build_int_cst (type, 2) , arg1));
11022 strict_overflow_p = false;
11023 if (TREE_CODE (arg1) == INTEGER_CST
11024 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11025 &strict_overflow_p)))
11027 if (strict_overflow_p)
11028 fold_overflow_warning (("assuming signed overflow does not "
11029 "occur when simplifying "
11031 WARN_STRICT_OVERFLOW_MISC);
11032 return fold_convert_loc (loc, type, tem);
11035 /* Optimize z * conj(z) for integer complex numbers. */
11036 if (TREE_CODE (arg0) == CONJ_EXPR
11037 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11038 return fold_mult_zconjz (loc, type, arg1);
11039 if (TREE_CODE (arg1) == CONJ_EXPR
11040 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11041 return fold_mult_zconjz (loc, type, arg0);
11045 /* Maybe fold x * 0 to 0. The expressions aren't the same
11046 when x is NaN, since x * 0 is also NaN. Nor are they the
11047 same in modes with signed zeros, since multiplying a
11048 negative value by 0 gives -0, not +0. */
11049 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11051 && real_zerop (arg1))
11052 return omit_one_operand_loc (loc, type, arg1, arg0);
11053 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11054 Likewise for complex arithmetic with signed zeros. */
11055 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11056 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11057 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11058 && real_onep (arg1))
11059 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11061 /* Transform x * -1.0 into -x. */
11062 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11063 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11064 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11065 && real_minus_onep (arg1))
11066 return fold_convert_loc (loc, type, negate_expr (arg0));
11068 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11069 the result for floating point types due to rounding so it is applied
11070 only if -fassociative-math was specify. */
11071 if (flag_associative_math
11072 && TREE_CODE (arg0) == RDIV_EXPR
11073 && TREE_CODE (arg1) == REAL_CST
11074 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11076 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11079 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11080 TREE_OPERAND (arg0, 1));
11083 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11084 if (operand_equal_p (arg0, arg1, 0))
11086 tree tem = fold_strip_sign_ops (arg0);
11087 if (tem != NULL_TREE)
11089 tem = fold_convert_loc (loc, type, tem);
11090 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11094 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11095 This is not the same for NaNs or if signed zeros are
11097 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11098 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11099 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11100 && TREE_CODE (arg1) == COMPLEX_CST
11101 && real_zerop (TREE_REALPART (arg1)))
11103 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11104 if (real_onep (TREE_IMAGPART (arg1)))
11106 fold_build2_loc (loc, COMPLEX_EXPR, type,
11107 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11109 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11110 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11112 fold_build2_loc (loc, COMPLEX_EXPR, type,
11113 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11114 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11118 /* Optimize z * conj(z) for floating point complex numbers.
11119 Guarded by flag_unsafe_math_optimizations as non-finite
11120 imaginary components don't produce scalar results. */
11121 if (flag_unsafe_math_optimizations
11122 && TREE_CODE (arg0) == CONJ_EXPR
11123 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11124 return fold_mult_zconjz (loc, type, arg1);
11125 if (flag_unsafe_math_optimizations
11126 && TREE_CODE (arg1) == CONJ_EXPR
11127 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11128 return fold_mult_zconjz (loc, type, arg0);
11130 if (flag_unsafe_math_optimizations)
11132 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11133 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11135 /* Optimizations of root(...)*root(...). */
11136 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11139 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11140 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11142 /* Optimize sqrt(x)*sqrt(x) as x. */
11143 if (BUILTIN_SQRT_P (fcode0)
11144 && operand_equal_p (arg00, arg10, 0)
11145 && ! HONOR_SNANS (TYPE_MODE (type)))
11148 /* Optimize root(x)*root(y) as root(x*y). */
11149 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11150 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11151 return build_call_expr_loc (loc, rootfn, 1, arg);
11154 /* Optimize expN(x)*expN(y) as expN(x+y). */
11155 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11157 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11158 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11159 CALL_EXPR_ARG (arg0, 0),
11160 CALL_EXPR_ARG (arg1, 0));
11161 return build_call_expr_loc (loc, expfn, 1, arg);
11164 /* Optimizations of pow(...)*pow(...). */
11165 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11166 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11167 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11169 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11170 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11171 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11172 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11174 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11175 if (operand_equal_p (arg01, arg11, 0))
11177 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11178 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11180 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11183 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11184 if (operand_equal_p (arg00, arg10, 0))
11186 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11187 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11189 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11193 /* Optimize tan(x)*cos(x) as sin(x). */
11194 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11195 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11196 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11197 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11198 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11199 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11200 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11201 CALL_EXPR_ARG (arg1, 0), 0))
11203 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11205 if (sinfn != NULL_TREE)
11206 return build_call_expr_loc (loc, sinfn, 1,
11207 CALL_EXPR_ARG (arg0, 0));
11210 /* Optimize x*pow(x,c) as pow(x,c+1). */
11211 if (fcode1 == BUILT_IN_POW
11212 || fcode1 == BUILT_IN_POWF
11213 || fcode1 == BUILT_IN_POWL)
11215 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11216 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11217 if (TREE_CODE (arg11) == REAL_CST
11218 && !TREE_OVERFLOW (arg11)
11219 && operand_equal_p (arg0, arg10, 0))
11221 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11225 c = TREE_REAL_CST (arg11);
11226 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11227 arg = build_real (type, c);
11228 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11232 /* Optimize pow(x,c)*x as pow(x,c+1). */
11233 if (fcode0 == BUILT_IN_POW
11234 || fcode0 == BUILT_IN_POWF
11235 || fcode0 == BUILT_IN_POWL)
11237 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11238 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11239 if (TREE_CODE (arg01) == REAL_CST
11240 && !TREE_OVERFLOW (arg01)
11241 && operand_equal_p (arg1, arg00, 0))
11243 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11247 c = TREE_REAL_CST (arg01);
11248 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11249 arg = build_real (type, c);
11250 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11254 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11255 if (optimize_function_for_speed_p (cfun)
11256 && operand_equal_p (arg0, arg1, 0))
11258 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11262 tree arg = build_real (type, dconst2);
11263 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11272 if (integer_all_onesp (arg1))
11273 return omit_one_operand_loc (loc, type, arg1, arg0);
11274 if (integer_zerop (arg1))
11275 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11276 if (operand_equal_p (arg0, arg1, 0))
11277 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11279 /* ~X | X is -1. */
11280 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11281 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11283 t1 = fold_convert_loc (loc, type, integer_zero_node);
11284 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11285 return omit_one_operand_loc (loc, type, t1, arg1);
11288 /* X | ~X is -1. */
11289 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11290 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11292 t1 = fold_convert_loc (loc, type, integer_zero_node);
11293 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11294 return omit_one_operand_loc (loc, type, t1, arg0);
11297 /* Canonicalize (X & C1) | C2. */
11298 if (TREE_CODE (arg0) == BIT_AND_EXPR
11299 && TREE_CODE (arg1) == INTEGER_CST
11300 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11302 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11303 int width = TYPE_PRECISION (type), w;
11304 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11305 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11306 hi2 = TREE_INT_CST_HIGH (arg1);
11307 lo2 = TREE_INT_CST_LOW (arg1);
11309 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11310 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11311 return omit_one_operand_loc (loc, type, arg1,
11312 TREE_OPERAND (arg0, 0));
11314 if (width > HOST_BITS_PER_WIDE_INT)
11316 mhi = (unsigned HOST_WIDE_INT) -1
11317 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11323 mlo = (unsigned HOST_WIDE_INT) -1
11324 >> (HOST_BITS_PER_WIDE_INT - width);
11327 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11328 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11329 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11330 TREE_OPERAND (arg0, 0), arg1);
11332 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11333 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11334 mode which allows further optimizations. */
11341 for (w = BITS_PER_UNIT;
11342 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11345 unsigned HOST_WIDE_INT mask
11346 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11347 if (((lo1 | lo2) & mask) == mask
11348 && (lo1 & ~mask) == 0 && hi1 == 0)
11355 if (hi3 != hi1 || lo3 != lo1)
11356 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11357 fold_build2_loc (loc, BIT_AND_EXPR, type,
11358 TREE_OPERAND (arg0, 0),
11359 build_int_cst_wide (type,
11364 /* (X & Y) | Y is (X, Y). */
11365 if (TREE_CODE (arg0) == BIT_AND_EXPR
11366 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11367 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11368 /* (X & Y) | X is (Y, X). */
11369 if (TREE_CODE (arg0) == BIT_AND_EXPR
11370 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11371 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11372 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11373 /* X | (X & Y) is (Y, X). */
11374 if (TREE_CODE (arg1) == BIT_AND_EXPR
11375 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11376 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11377 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11378 /* X | (Y & X) is (Y, X). */
11379 if (TREE_CODE (arg1) == BIT_AND_EXPR
11380 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11381 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11382 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11384 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11385 if (t1 != NULL_TREE)
11388 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11390 This results in more efficient code for machines without a NAND
11391 instruction. Combine will canonicalize to the first form
11392 which will allow use of NAND instructions provided by the
11393 backend if they exist. */
11394 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11395 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11398 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11399 build2 (BIT_AND_EXPR, type,
11400 fold_convert_loc (loc, type,
11401 TREE_OPERAND (arg0, 0)),
11402 fold_convert_loc (loc, type,
11403 TREE_OPERAND (arg1, 0))));
11406 /* See if this can be simplified into a rotate first. If that
11407 is unsuccessful continue in the association code. */
11411 if (integer_zerop (arg1))
11412 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11413 if (integer_all_onesp (arg1))
11414 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11415 if (operand_equal_p (arg0, arg1, 0))
11416 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11418 /* ~X ^ X is -1. */
11419 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11420 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11422 t1 = fold_convert_loc (loc, type, integer_zero_node);
11423 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11424 return omit_one_operand_loc (loc, type, t1, arg1);
11427 /* X ^ ~X is -1. */
11428 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11429 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11431 t1 = fold_convert_loc (loc, type, integer_zero_node);
11432 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11433 return omit_one_operand_loc (loc, type, t1, arg0);
11436 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11437 with a constant, and the two constants have no bits in common,
11438 we should treat this as a BIT_IOR_EXPR since this may produce more
11439 simplifications. */
11440 if (TREE_CODE (arg0) == BIT_AND_EXPR
11441 && TREE_CODE (arg1) == BIT_AND_EXPR
11442 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11443 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11444 && integer_zerop (const_binop (BIT_AND_EXPR,
11445 TREE_OPERAND (arg0, 1),
11446 TREE_OPERAND (arg1, 1), 0)))
11448 code = BIT_IOR_EXPR;
11452 /* (X | Y) ^ X -> Y & ~ X*/
11453 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11454 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11456 tree t2 = TREE_OPERAND (arg0, 1);
11457 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11459 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11460 fold_convert_loc (loc, type, t2),
11461 fold_convert_loc (loc, type, t1));
11465 /* (Y | X) ^ X -> Y & ~ X*/
11466 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11467 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11469 tree t2 = TREE_OPERAND (arg0, 0);
11470 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11472 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11473 fold_convert_loc (loc, type, t2),
11474 fold_convert_loc (loc, type, t1));
11478 /* X ^ (X | Y) -> Y & ~ X*/
11479 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11480 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11482 tree t2 = TREE_OPERAND (arg1, 1);
11483 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11485 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11486 fold_convert_loc (loc, type, t2),
11487 fold_convert_loc (loc, type, t1));
11491 /* X ^ (Y | X) -> Y & ~ X*/
11492 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11493 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11495 tree t2 = TREE_OPERAND (arg1, 0);
11496 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11498 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11499 fold_convert_loc (loc, type, t2),
11500 fold_convert_loc (loc, type, t1));
11504 /* Convert ~X ^ ~Y to X ^ Y. */
11505 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11506 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11507 return fold_build2_loc (loc, code, type,
11508 fold_convert_loc (loc, type,
11509 TREE_OPERAND (arg0, 0)),
11510 fold_convert_loc (loc, type,
11511 TREE_OPERAND (arg1, 0)));
11513 /* Convert ~X ^ C to X ^ ~C. */
11514 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11515 && TREE_CODE (arg1) == INTEGER_CST)
11516 return fold_build2_loc (loc, code, type,
11517 fold_convert_loc (loc, type,
11518 TREE_OPERAND (arg0, 0)),
11519 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11521 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11522 if (TREE_CODE (arg0) == BIT_AND_EXPR
11523 && integer_onep (TREE_OPERAND (arg0, 1))
11524 && integer_onep (arg1))
11525 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11526 build_int_cst (TREE_TYPE (arg0), 0));
11528 /* Fold (X & Y) ^ Y as ~X & Y. */
11529 if (TREE_CODE (arg0) == BIT_AND_EXPR
11530 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11532 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11533 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11534 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11535 fold_convert_loc (loc, type, arg1));
11537 /* Fold (X & Y) ^ X as ~Y & X. */
11538 if (TREE_CODE (arg0) == BIT_AND_EXPR
11539 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11540 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11542 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11543 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11544 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11545 fold_convert_loc (loc, type, arg1));
11547 /* Fold X ^ (X & Y) as X & ~Y. */
11548 if (TREE_CODE (arg1) == BIT_AND_EXPR
11549 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11551 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11552 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11553 fold_convert_loc (loc, type, arg0),
11554 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11556 /* Fold X ^ (Y & X) as ~Y & X. */
11557 if (TREE_CODE (arg1) == BIT_AND_EXPR
11558 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11559 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11561 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11562 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11563 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11564 fold_convert_loc (loc, type, arg0));
11567 /* See if this can be simplified into a rotate first. If that
11568 is unsuccessful continue in the association code. */
11572 if (integer_all_onesp (arg1))
11573 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11574 if (integer_zerop (arg1))
11575 return omit_one_operand_loc (loc, type, arg1, arg0);
11576 if (operand_equal_p (arg0, arg1, 0))
11577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11579 /* ~X & X is always zero. */
11580 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11581 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11582 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11584 /* X & ~X is always zero. */
11585 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11586 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11587 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11589 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11590 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11591 && TREE_CODE (arg1) == INTEGER_CST
11592 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11594 tree tmp1 = fold_convert_loc (loc, type, arg1);
11595 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11596 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11597 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11598 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11600 fold_convert_loc (loc, type,
11601 fold_build2_loc (loc, BIT_IOR_EXPR,
11602 type, tmp2, tmp3));
11605 /* (X | Y) & Y is (X, Y). */
11606 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11607 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11608 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11609 /* (X | Y) & X is (Y, X). */
11610 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11611 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11612 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11613 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11614 /* X & (X | Y) is (Y, X). */
11615 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11616 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11617 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11618 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11619 /* X & (Y | X) is (Y, X). */
11620 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11621 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11622 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11623 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11625 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11626 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11627 && integer_onep (TREE_OPERAND (arg0, 1))
11628 && integer_onep (arg1))
11630 tem = TREE_OPERAND (arg0, 0);
11631 return fold_build2_loc (loc, EQ_EXPR, type,
11632 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11633 build_int_cst (TREE_TYPE (tem), 1)),
11634 build_int_cst (TREE_TYPE (tem), 0));
11636 /* Fold ~X & 1 as (X & 1) == 0. */
11637 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11638 && integer_onep (arg1))
11640 tem = TREE_OPERAND (arg0, 0);
11641 return fold_build2_loc (loc, EQ_EXPR, type,
11642 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11643 build_int_cst (TREE_TYPE (tem), 1)),
11644 build_int_cst (TREE_TYPE (tem), 0));
11647 /* Fold (X ^ Y) & Y as ~X & Y. */
11648 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11649 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11651 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11652 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11653 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11654 fold_convert_loc (loc, type, arg1));
11656 /* Fold (X ^ Y) & X as ~Y & X. */
11657 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11658 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11659 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11661 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11662 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11663 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11664 fold_convert_loc (loc, type, arg1));
11666 /* Fold X & (X ^ Y) as X & ~Y. */
11667 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11668 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11670 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11671 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11672 fold_convert_loc (loc, type, arg0),
11673 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11675 /* Fold X & (Y ^ X) as ~Y & X. */
11676 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11677 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11678 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11680 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11681 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11682 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11683 fold_convert_loc (loc, type, arg0));
11686 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11687 if (t1 != NULL_TREE)
11689 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11690 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11691 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11694 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11696 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11697 && (~TREE_INT_CST_LOW (arg1)
11698 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11700 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11703 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11705 This results in more efficient code for machines without a NOR
11706 instruction. Combine will canonicalize to the first form
11707 which will allow use of NOR instructions provided by the
11708 backend if they exist. */
11709 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11710 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11712 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11713 build2 (BIT_IOR_EXPR, type,
11714 fold_convert_loc (loc, type,
11715 TREE_OPERAND (arg0, 0)),
11716 fold_convert_loc (loc, type,
11717 TREE_OPERAND (arg1, 0))));
11720 /* If arg0 is derived from the address of an object or function, we may
11721 be able to fold this expression using the object or function's
11723 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11725 unsigned HOST_WIDE_INT modulus, residue;
11726 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11728 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11729 integer_onep (arg1));
11731 /* This works because modulus is a power of 2. If this weren't the
11732 case, we'd have to replace it by its greatest power-of-2
11733 divisor: modulus & -modulus. */
11735 return build_int_cst (type, residue & low);
11738 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11739 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11740 if the new mask might be further optimized. */
11741 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11742 || TREE_CODE (arg0) == RSHIFT_EXPR)
11743 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11744 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11745 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11746 < TYPE_PRECISION (TREE_TYPE (arg0))
11747 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11748 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11750 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11751 unsigned HOST_WIDE_INT mask
11752 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11753 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11754 tree shift_type = TREE_TYPE (arg0);
11756 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11757 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11758 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11759 && TYPE_PRECISION (TREE_TYPE (arg0))
11760 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11762 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11763 tree arg00 = TREE_OPERAND (arg0, 0);
11764 /* See if more bits can be proven as zero because of
11766 if (TREE_CODE (arg00) == NOP_EXPR
11767 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11769 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11770 if (TYPE_PRECISION (inner_type)
11771 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11772 && TYPE_PRECISION (inner_type) < prec)
11774 prec = TYPE_PRECISION (inner_type);
11775 /* See if we can shorten the right shift. */
11777 shift_type = inner_type;
11780 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11781 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11782 zerobits <<= prec - shiftc;
11783 /* For arithmetic shift if sign bit could be set, zerobits
11784 can contain actually sign bits, so no transformation is
11785 possible, unless MASK masks them all away. In that
11786 case the shift needs to be converted into logical shift. */
11787 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11788 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11790 if ((mask & zerobits) == 0)
11791 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11797 /* ((X << 16) & 0xff00) is (X, 0). */
11798 if ((mask & zerobits) == mask)
11799 return omit_one_operand_loc (loc, type,
11800 build_int_cst (type, 0), arg0);
11802 newmask = mask | zerobits;
11803 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11807 /* Only do the transformation if NEWMASK is some integer
11809 for (prec = BITS_PER_UNIT;
11810 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11811 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11813 if (prec < HOST_BITS_PER_WIDE_INT
11814 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11818 if (shift_type != TREE_TYPE (arg0))
11820 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11821 fold_convert_loc (loc, shift_type,
11822 TREE_OPERAND (arg0, 0)),
11823 TREE_OPERAND (arg0, 1));
11824 tem = fold_convert_loc (loc, type, tem);
11828 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11829 if (!tree_int_cst_equal (newmaskt, arg1))
11830 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11838 /* Don't touch a floating-point divide by zero unless the mode
11839 of the constant can represent infinity. */
11840 if (TREE_CODE (arg1) == REAL_CST
11841 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11842 && real_zerop (arg1))
11845 /* Optimize A / A to 1.0 if we don't care about
11846 NaNs or Infinities. Skip the transformation
11847 for non-real operands. */
11848 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11849 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11850 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11851 && operand_equal_p (arg0, arg1, 0))
11853 tree r = build_real (TREE_TYPE (arg0), dconst1);
11855 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11858 /* The complex version of the above A / A optimization. */
11859 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11860 && operand_equal_p (arg0, arg1, 0))
11862 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11863 if (! HONOR_NANS (TYPE_MODE (elem_type))
11864 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11866 tree r = build_real (elem_type, dconst1);
11867 /* omit_two_operands will call fold_convert for us. */
11868 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11872 /* (-A) / (-B) -> A / B */
11873 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11874 return fold_build2_loc (loc, RDIV_EXPR, type,
11875 TREE_OPERAND (arg0, 0),
11876 negate_expr (arg1));
11877 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11878 return fold_build2_loc (loc, RDIV_EXPR, type,
11879 negate_expr (arg0),
11880 TREE_OPERAND (arg1, 0));
11882 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11883 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11884 && real_onep (arg1))
11885 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11887 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11888 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11889 && real_minus_onep (arg1))
11890 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11891 negate_expr (arg0)));
11893 /* If ARG1 is a constant, we can convert this to a multiply by the
11894 reciprocal. This does not have the same rounding properties,
11895 so only do this if -freciprocal-math. We can actually
11896 always safely do it if ARG1 is a power of two, but it's hard to
11897 tell if it is or not in a portable manner. */
11898 if (TREE_CODE (arg1) == REAL_CST)
11900 if (flag_reciprocal_math
11901 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11903 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11904 /* Find the reciprocal if optimizing and the result is exact. */
11908 r = TREE_REAL_CST (arg1);
11909 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11911 tem = build_real (type, r);
11912 return fold_build2_loc (loc, MULT_EXPR, type,
11913 fold_convert_loc (loc, type, arg0), tem);
11917 /* Convert A/B/C to A/(B*C). */
11918 if (flag_reciprocal_math
11919 && TREE_CODE (arg0) == RDIV_EXPR)
11920 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11921 fold_build2_loc (loc, MULT_EXPR, type,
11922 TREE_OPERAND (arg0, 1), arg1));
11924 /* Convert A/(B/C) to (A/B)*C. */
11925 if (flag_reciprocal_math
11926 && TREE_CODE (arg1) == RDIV_EXPR)
11927 return fold_build2_loc (loc, MULT_EXPR, type,
11928 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11929 TREE_OPERAND (arg1, 0)),
11930 TREE_OPERAND (arg1, 1));
11932 /* Convert C1/(X*C2) into (C1/C2)/X. */
11933 if (flag_reciprocal_math
11934 && TREE_CODE (arg1) == MULT_EXPR
11935 && TREE_CODE (arg0) == REAL_CST
11936 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11938 tree tem = const_binop (RDIV_EXPR, arg0,
11939 TREE_OPERAND (arg1, 1), 0);
11941 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11942 TREE_OPERAND (arg1, 0));
11945 if (flag_unsafe_math_optimizations)
11947 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11948 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11950 /* Optimize sin(x)/cos(x) as tan(x). */
11951 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11952 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11953 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11954 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11955 CALL_EXPR_ARG (arg1, 0), 0))
11957 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11959 if (tanfn != NULL_TREE)
11960 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11963 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11964 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11965 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11966 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11967 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11968 CALL_EXPR_ARG (arg1, 0), 0))
11970 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11972 if (tanfn != NULL_TREE)
11974 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11975 CALL_EXPR_ARG (arg0, 0));
11976 return fold_build2_loc (loc, RDIV_EXPR, type,
11977 build_real (type, dconst1), tmp);
11981 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11982 NaNs or Infinities. */
11983 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11984 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11985 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11987 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11988 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11990 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11991 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11992 && operand_equal_p (arg00, arg01, 0))
11994 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11996 if (cosfn != NULL_TREE)
11997 return build_call_expr_loc (loc, cosfn, 1, arg00);
12001 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12002 NaNs or Infinities. */
12003 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12004 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12005 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12007 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12008 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12010 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12011 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12012 && operand_equal_p (arg00, arg01, 0))
12014 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12016 if (cosfn != NULL_TREE)
12018 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12019 return fold_build2_loc (loc, RDIV_EXPR, type,
12020 build_real (type, dconst1),
12026 /* Optimize pow(x,c)/x as pow(x,c-1). */
12027 if (fcode0 == BUILT_IN_POW
12028 || fcode0 == BUILT_IN_POWF
12029 || fcode0 == BUILT_IN_POWL)
12031 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12032 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12033 if (TREE_CODE (arg01) == REAL_CST
12034 && !TREE_OVERFLOW (arg01)
12035 && operand_equal_p (arg1, arg00, 0))
12037 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12041 c = TREE_REAL_CST (arg01);
12042 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12043 arg = build_real (type, c);
12044 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12048 /* Optimize a/root(b/c) into a*root(c/b). */
12049 if (BUILTIN_ROOT_P (fcode1))
12051 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12053 if (TREE_CODE (rootarg) == RDIV_EXPR)
12055 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12056 tree b = TREE_OPERAND (rootarg, 0);
12057 tree c = TREE_OPERAND (rootarg, 1);
12059 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12061 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12062 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12066 /* Optimize x/expN(y) into x*expN(-y). */
12067 if (BUILTIN_EXPONENT_P (fcode1))
12069 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12070 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12071 arg1 = build_call_expr_loc (loc,
12073 fold_convert_loc (loc, type, arg));
12074 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12077 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12078 if (fcode1 == BUILT_IN_POW
12079 || fcode1 == BUILT_IN_POWF
12080 || fcode1 == BUILT_IN_POWL)
12082 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12083 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12084 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12085 tree neg11 = fold_convert_loc (loc, type,
12086 negate_expr (arg11));
12087 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12088 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12093 case TRUNC_DIV_EXPR:
12094 case FLOOR_DIV_EXPR:
12095 /* Simplify A / (B << N) where A and B are positive and B is
12096 a power of 2, to A >> (N + log2(B)). */
12097 strict_overflow_p = false;
12098 if (TREE_CODE (arg1) == LSHIFT_EXPR
12099 && (TYPE_UNSIGNED (type)
12100 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12102 tree sval = TREE_OPERAND (arg1, 0);
12103 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12105 tree sh_cnt = TREE_OPERAND (arg1, 1);
12106 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12108 if (strict_overflow_p)
12109 fold_overflow_warning (("assuming signed overflow does not "
12110 "occur when simplifying A / (B << N)"),
12111 WARN_STRICT_OVERFLOW_MISC);
12113 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12114 sh_cnt, build_int_cst (NULL_TREE, pow2));
12115 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12116 fold_convert_loc (loc, type, arg0), sh_cnt);
12120 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12121 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12122 if (INTEGRAL_TYPE_P (type)
12123 && TYPE_UNSIGNED (type)
12124 && code == FLOOR_DIV_EXPR)
12125 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12129 case ROUND_DIV_EXPR:
12130 case CEIL_DIV_EXPR:
12131 case EXACT_DIV_EXPR:
12132 if (integer_onep (arg1))
12133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12134 if (integer_zerop (arg1))
12136 /* X / -1 is -X. */
12137 if (!TYPE_UNSIGNED (type)
12138 && TREE_CODE (arg1) == INTEGER_CST
12139 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12140 && TREE_INT_CST_HIGH (arg1) == -1)
12141 return fold_convert_loc (loc, type, negate_expr (arg0));
12143 /* Convert -A / -B to A / B when the type is signed and overflow is
12145 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12146 && TREE_CODE (arg0) == NEGATE_EXPR
12147 && negate_expr_p (arg1))
12149 if (INTEGRAL_TYPE_P (type))
12150 fold_overflow_warning (("assuming signed overflow does not occur "
12151 "when distributing negation across "
12153 WARN_STRICT_OVERFLOW_MISC);
12154 return fold_build2_loc (loc, code, type,
12155 fold_convert_loc (loc, type,
12156 TREE_OPERAND (arg0, 0)),
12157 fold_convert_loc (loc, type,
12158 negate_expr (arg1)));
12160 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12161 && TREE_CODE (arg1) == NEGATE_EXPR
12162 && negate_expr_p (arg0))
12164 if (INTEGRAL_TYPE_P (type))
12165 fold_overflow_warning (("assuming signed overflow does not occur "
12166 "when distributing negation across "
12168 WARN_STRICT_OVERFLOW_MISC);
12169 return fold_build2_loc (loc, code, type,
12170 fold_convert_loc (loc, type,
12171 negate_expr (arg0)),
12172 fold_convert_loc (loc, type,
12173 TREE_OPERAND (arg1, 0)));
12176 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12177 operation, EXACT_DIV_EXPR.
12179 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12180 At one time others generated faster code, it's not clear if they do
12181 after the last round to changes to the DIV code in expmed.c. */
12182 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12183 && multiple_of_p (type, arg0, arg1))
12184 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12186 strict_overflow_p = false;
12187 if (TREE_CODE (arg1) == INTEGER_CST
12188 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12189 &strict_overflow_p)))
12191 if (strict_overflow_p)
12192 fold_overflow_warning (("assuming signed overflow does not occur "
12193 "when simplifying division"),
12194 WARN_STRICT_OVERFLOW_MISC);
12195 return fold_convert_loc (loc, type, tem);
12200 case CEIL_MOD_EXPR:
12201 case FLOOR_MOD_EXPR:
12202 case ROUND_MOD_EXPR:
12203 case TRUNC_MOD_EXPR:
12204 /* X % 1 is always zero, but be sure to preserve any side
12206 if (integer_onep (arg1))
12207 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12209 /* X % 0, return X % 0 unchanged so that we can get the
12210 proper warnings and errors. */
12211 if (integer_zerop (arg1))
12214 /* 0 % X is always zero, but be sure to preserve any side
12215 effects in X. Place this after checking for X == 0. */
12216 if (integer_zerop (arg0))
12217 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12219 /* X % -1 is zero. */
12220 if (!TYPE_UNSIGNED (type)
12221 && TREE_CODE (arg1) == INTEGER_CST
12222 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12223 && TREE_INT_CST_HIGH (arg1) == -1)
12224 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12226 /* X % -C is the same as X % C. */
12227 if (code == TRUNC_MOD_EXPR
12228 && !TYPE_UNSIGNED (type)
12229 && TREE_CODE (arg1) == INTEGER_CST
12230 && !TREE_OVERFLOW (arg1)
12231 && TREE_INT_CST_HIGH (arg1) < 0
12232 && !TYPE_OVERFLOW_TRAPS (type)
12233 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12234 && !sign_bit_p (arg1, arg1))
12235 return fold_build2_loc (loc, code, type,
12236 fold_convert_loc (loc, type, arg0),
12237 fold_convert_loc (loc, type,
12238 negate_expr (arg1)));
12240 /* X % -Y is the same as X % Y. */
12241 if (code == TRUNC_MOD_EXPR
12242 && !TYPE_UNSIGNED (type)
12243 && TREE_CODE (arg1) == NEGATE_EXPR
12244 && !TYPE_OVERFLOW_TRAPS (type))
12245 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12246 fold_convert_loc (loc, type,
12247 TREE_OPERAND (arg1, 0)));
12249 strict_overflow_p = false;
12250 if (TREE_CODE (arg1) == INTEGER_CST
12251 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12252 &strict_overflow_p)))
12254 if (strict_overflow_p)
12255 fold_overflow_warning (("assuming signed overflow does not occur "
12256 "when simplifying modulus"),
12257 WARN_STRICT_OVERFLOW_MISC);
12258 return fold_convert_loc (loc, type, tem);
12261 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12262 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12263 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12264 && (TYPE_UNSIGNED (type)
12265 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12268 /* Also optimize A % (C << N) where C is a power of 2,
12269 to A & ((C << N) - 1). */
12270 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12271 c = TREE_OPERAND (arg1, 0);
12273 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12276 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12277 build_int_cst (TREE_TYPE (arg1), 1));
12278 if (strict_overflow_p)
12279 fold_overflow_warning (("assuming signed overflow does not "
12280 "occur when simplifying "
12281 "X % (power of two)"),
12282 WARN_STRICT_OVERFLOW_MISC);
12283 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12284 fold_convert_loc (loc, type, arg0),
12285 fold_convert_loc (loc, type, mask));
12293 if (integer_all_onesp (arg0))
12294 return omit_one_operand_loc (loc, type, arg0, arg1);
12298 /* Optimize -1 >> x for arithmetic right shifts. */
12299 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12300 && tree_expr_nonnegative_p (arg1))
12301 return omit_one_operand_loc (loc, type, arg0, arg1);
12302 /* ... fall through ... */
12306 if (integer_zerop (arg1))
12307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12308 if (integer_zerop (arg0))
12309 return omit_one_operand_loc (loc, type, arg0, arg1);
12311 /* Since negative shift count is not well-defined,
12312 don't try to compute it in the compiler. */
12313 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12316 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12317 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12318 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12319 && host_integerp (TREE_OPERAND (arg0, 1), false)
12320 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12322 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12323 + TREE_INT_CST_LOW (arg1));
12325 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12326 being well defined. */
12327 if (low >= TYPE_PRECISION (type))
12329 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12330 low = low % TYPE_PRECISION (type);
12331 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12332 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12333 TREE_OPERAND (arg0, 0));
12335 low = TYPE_PRECISION (type) - 1;
12338 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12339 build_int_cst (type, low));
12342 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12343 into x & ((unsigned)-1 >> c) for unsigned types. */
12344 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12345 || (TYPE_UNSIGNED (type)
12346 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12347 && host_integerp (arg1, false)
12348 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12349 && host_integerp (TREE_OPERAND (arg0, 1), false)
12350 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12352 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12353 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12359 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12361 lshift = build_int_cst (type, -1);
12362 lshift = int_const_binop (code, lshift, arg1, 0);
12364 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12368 /* Rewrite an LROTATE_EXPR by a constant into an
12369 RROTATE_EXPR by a new constant. */
12370 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12372 tree tem = build_int_cst (TREE_TYPE (arg1),
12373 TYPE_PRECISION (type));
12374 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12375 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12378 /* If we have a rotate of a bit operation with the rotate count and
12379 the second operand of the bit operation both constant,
12380 permute the two operations. */
12381 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12382 && (TREE_CODE (arg0) == BIT_AND_EXPR
12383 || TREE_CODE (arg0) == BIT_IOR_EXPR
12384 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12385 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12386 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12387 fold_build2_loc (loc, code, type,
12388 TREE_OPERAND (arg0, 0), arg1),
12389 fold_build2_loc (loc, code, type,
12390 TREE_OPERAND (arg0, 1), arg1));
12392 /* Two consecutive rotates adding up to the precision of the
12393 type can be ignored. */
12394 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12395 && TREE_CODE (arg0) == RROTATE_EXPR
12396 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12397 && TREE_INT_CST_HIGH (arg1) == 0
12398 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12399 && ((TREE_INT_CST_LOW (arg1)
12400 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12401 == (unsigned int) TYPE_PRECISION (type)))
12402 return TREE_OPERAND (arg0, 0);
12404 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12405 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12406 if the latter can be further optimized. */
12407 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12408 && TREE_CODE (arg0) == BIT_AND_EXPR
12409 && TREE_CODE (arg1) == INTEGER_CST
12410 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12412 tree mask = fold_build2_loc (loc, code, type,
12413 fold_convert_loc (loc, type,
12414 TREE_OPERAND (arg0, 1)),
12416 tree shift = fold_build2_loc (loc, code, type,
12417 fold_convert_loc (loc, type,
12418 TREE_OPERAND (arg0, 0)),
12420 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12428 if (operand_equal_p (arg0, arg1, 0))
12429 return omit_one_operand_loc (loc, type, arg0, arg1);
12430 if (INTEGRAL_TYPE_P (type)
12431 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12432 return omit_one_operand_loc (loc, type, arg1, arg0);
12433 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12439 if (operand_equal_p (arg0, arg1, 0))
12440 return omit_one_operand_loc (loc, type, arg0, arg1);
12441 if (INTEGRAL_TYPE_P (type)
12442 && TYPE_MAX_VALUE (type)
12443 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12444 return omit_one_operand_loc (loc, type, arg1, arg0);
12445 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12450 case TRUTH_ANDIF_EXPR:
12451 /* Note that the operands of this must be ints
12452 and their values must be 0 or 1.
12453 ("true" is a fixed value perhaps depending on the language.) */
12454 /* If first arg is constant zero, return it. */
12455 if (integer_zerop (arg0))
12456 return fold_convert_loc (loc, type, arg0);
12457 case TRUTH_AND_EXPR:
12458 /* If either arg is constant true, drop it. */
12459 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12460 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12461 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12462 /* Preserve sequence points. */
12463 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12464 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12465 /* If second arg is constant zero, result is zero, but first arg
12466 must be evaluated. */
12467 if (integer_zerop (arg1))
12468 return omit_one_operand_loc (loc, type, arg1, arg0);
12469 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12470 case will be handled here. */
12471 if (integer_zerop (arg0))
12472 return omit_one_operand_loc (loc, type, arg0, arg1);
12474 /* !X && X is always false. */
12475 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12476 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12477 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12478 /* X && !X is always false. */
12479 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12480 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12481 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12483 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12484 means A >= Y && A != MAX, but in this case we know that
12487 if (!TREE_SIDE_EFFECTS (arg0)
12488 && !TREE_SIDE_EFFECTS (arg1))
12490 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12491 if (tem && !operand_equal_p (tem, arg0, 0))
12492 return fold_build2_loc (loc, code, type, tem, arg1);
12494 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12495 if (tem && !operand_equal_p (tem, arg1, 0))
12496 return fold_build2_loc (loc, code, type, arg0, tem);
12500 /* We only do these simplifications if we are optimizing. */
12504 /* Check for things like (A || B) && (A || C). We can convert this
12505 to A || (B && C). Note that either operator can be any of the four
12506 truth and/or operations and the transformation will still be
12507 valid. Also note that we only care about order for the
12508 ANDIF and ORIF operators. If B contains side effects, this
12509 might change the truth-value of A. */
12510 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12511 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12512 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12513 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12514 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12515 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12517 tree a00 = TREE_OPERAND (arg0, 0);
12518 tree a01 = TREE_OPERAND (arg0, 1);
12519 tree a10 = TREE_OPERAND (arg1, 0);
12520 tree a11 = TREE_OPERAND (arg1, 1);
12521 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12522 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12523 && (code == TRUTH_AND_EXPR
12524 || code == TRUTH_OR_EXPR));
12526 if (operand_equal_p (a00, a10, 0))
12527 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12528 fold_build2_loc (loc, code, type, a01, a11));
12529 else if (commutative && operand_equal_p (a00, a11, 0))
12530 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12531 fold_build2_loc (loc, code, type, a01, a10));
12532 else if (commutative && operand_equal_p (a01, a10, 0))
12533 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12534 fold_build2_loc (loc, code, type, a00, a11));
12536 /* This case if tricky because we must either have commutative
12537 operators or else A10 must not have side-effects. */
12539 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12540 && operand_equal_p (a01, a11, 0))
12541 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12542 fold_build2_loc (loc, code, type, a00, a10),
12546 /* See if we can build a range comparison. */
12547 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12550 /* Check for the possibility of merging component references. If our
12551 lhs is another similar operation, try to merge its rhs with our
12552 rhs. Then try to merge our lhs and rhs. */
12553 if (TREE_CODE (arg0) == code
12554 && 0 != (tem = fold_truthop (loc, code, type,
12555 TREE_OPERAND (arg0, 1), arg1)))
12556 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12558 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12563 case TRUTH_ORIF_EXPR:
12564 /* Note that the operands of this must be ints
12565 and their values must be 0 or true.
12566 ("true" is a fixed value perhaps depending on the language.) */
12567 /* If first arg is constant true, return it. */
12568 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12569 return fold_convert_loc (loc, type, arg0);
12570 case TRUTH_OR_EXPR:
12571 /* If either arg is constant zero, drop it. */
12572 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12573 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12574 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12575 /* Preserve sequence points. */
12576 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12577 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12578 /* If second arg is constant true, result is true, but we must
12579 evaluate first arg. */
12580 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12581 return omit_one_operand_loc (loc, type, arg1, arg0);
12582 /* Likewise for first arg, but note this only occurs here for
12584 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12585 return omit_one_operand_loc (loc, type, arg0, arg1);
12587 /* !X || X is always true. */
12588 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12589 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12590 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12591 /* X || !X is always true. */
12592 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12593 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12594 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12598 case TRUTH_XOR_EXPR:
12599 /* If the second arg is constant zero, drop it. */
12600 if (integer_zerop (arg1))
12601 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12602 /* If the second arg is constant true, this is a logical inversion. */
12603 if (integer_onep (arg1))
12605 /* Only call invert_truthvalue if operand is a truth value. */
12606 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12607 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12609 tem = invert_truthvalue_loc (loc, arg0);
12610 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12612 /* Identical arguments cancel to zero. */
12613 if (operand_equal_p (arg0, arg1, 0))
12614 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12616 /* !X ^ X is always true. */
12617 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12618 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12619 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12621 /* X ^ !X is always true. */
12622 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12623 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12624 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12630 tem = fold_comparison (loc, code, type, op0, op1);
12631 if (tem != NULL_TREE)
12634 /* bool_var != 0 becomes bool_var. */
12635 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12636 && code == NE_EXPR)
12637 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12639 /* bool_var == 1 becomes bool_var. */
12640 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12641 && code == EQ_EXPR)
12642 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12644 /* bool_var != 1 becomes !bool_var. */
12645 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12646 && code == NE_EXPR)
12647 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12648 fold_convert_loc (loc, type, arg0));
12650 /* bool_var == 0 becomes !bool_var. */
12651 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12652 && code == EQ_EXPR)
12653 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12654 fold_convert_loc (loc, type, arg0));
12656 /* !exp != 0 becomes !exp */
12657 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12658 && code == NE_EXPR)
12659 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12661 /* If this is an equality comparison of the address of two non-weak,
12662 unaliased symbols neither of which are extern (since we do not
12663 have access to attributes for externs), then we know the result. */
12664 if (TREE_CODE (arg0) == ADDR_EXPR
12665 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12666 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12667 && ! lookup_attribute ("alias",
12668 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12669 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12670 && TREE_CODE (arg1) == ADDR_EXPR
12671 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12672 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12673 && ! lookup_attribute ("alias",
12674 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12675 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12677 /* We know that we're looking at the address of two
12678 non-weak, unaliased, static _DECL nodes.
12680 It is both wasteful and incorrect to call operand_equal_p
12681 to compare the two ADDR_EXPR nodes. It is wasteful in that
12682 all we need to do is test pointer equality for the arguments
12683 to the two ADDR_EXPR nodes. It is incorrect to use
12684 operand_equal_p as that function is NOT equivalent to a
12685 C equality test. It can in fact return false for two
12686 objects which would test as equal using the C equality
12688 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12689 return constant_boolean_node (equal
12690 ? code == EQ_EXPR : code != EQ_EXPR,
12694 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12695 a MINUS_EXPR of a constant, we can convert it into a comparison with
12696 a revised constant as long as no overflow occurs. */
12697 if (TREE_CODE (arg1) == INTEGER_CST
12698 && (TREE_CODE (arg0) == PLUS_EXPR
12699 || TREE_CODE (arg0) == MINUS_EXPR)
12700 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12701 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12702 ? MINUS_EXPR : PLUS_EXPR,
12703 fold_convert_loc (loc, TREE_TYPE (arg0),
12705 TREE_OPERAND (arg0, 1), 0))
12706 && !TREE_OVERFLOW (tem))
12707 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12709 /* Similarly for a NEGATE_EXPR. */
12710 if (TREE_CODE (arg0) == NEGATE_EXPR
12711 && TREE_CODE (arg1) == INTEGER_CST
12712 && 0 != (tem = negate_expr (arg1))
12713 && TREE_CODE (tem) == INTEGER_CST
12714 && !TREE_OVERFLOW (tem))
12715 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12717 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12718 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12719 && TREE_CODE (arg1) == INTEGER_CST
12720 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12721 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12722 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12723 fold_convert_loc (loc,
12726 TREE_OPERAND (arg0, 1)));
12728 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12729 if ((TREE_CODE (arg0) == PLUS_EXPR
12730 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12731 || TREE_CODE (arg0) == MINUS_EXPR)
12732 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12733 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12734 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12736 tree val = TREE_OPERAND (arg0, 1);
12737 return omit_two_operands_loc (loc, type,
12738 fold_build2_loc (loc, code, type,
12740 build_int_cst (TREE_TYPE (val),
12742 TREE_OPERAND (arg0, 0), arg1);
12745 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12746 if (TREE_CODE (arg0) == MINUS_EXPR
12747 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12748 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12749 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12751 return omit_two_operands_loc (loc, type,
12753 ? boolean_true_node : boolean_false_node,
12754 TREE_OPERAND (arg0, 1), arg1);
12757 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12758 for !=. Don't do this for ordered comparisons due to overflow. */
12759 if (TREE_CODE (arg0) == MINUS_EXPR
12760 && integer_zerop (arg1))
12761 return fold_build2_loc (loc, code, type,
12762 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12764 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12765 if (TREE_CODE (arg0) == ABS_EXPR
12766 && (integer_zerop (arg1) || real_zerop (arg1)))
12767 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12769 /* If this is an EQ or NE comparison with zero and ARG0 is
12770 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12771 two operations, but the latter can be done in one less insn
12772 on machines that have only two-operand insns or on which a
12773 constant cannot be the first operand. */
12774 if (TREE_CODE (arg0) == BIT_AND_EXPR
12775 && integer_zerop (arg1))
12777 tree arg00 = TREE_OPERAND (arg0, 0);
12778 tree arg01 = TREE_OPERAND (arg0, 1);
12779 if (TREE_CODE (arg00) == LSHIFT_EXPR
12780 && integer_onep (TREE_OPERAND (arg00, 0)))
12782 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12783 arg01, TREE_OPERAND (arg00, 1));
12784 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12785 build_int_cst (TREE_TYPE (arg0), 1));
12786 return fold_build2_loc (loc, code, type,
12787 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12790 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12791 && integer_onep (TREE_OPERAND (arg01, 0)))
12793 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12794 arg00, TREE_OPERAND (arg01, 1));
12795 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12796 build_int_cst (TREE_TYPE (arg0), 1));
12797 return fold_build2_loc (loc, code, type,
12798 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12803 /* If this is an NE or EQ comparison of zero against the result of a
12804 signed MOD operation whose second operand is a power of 2, make
12805 the MOD operation unsigned since it is simpler and equivalent. */
12806 if (integer_zerop (arg1)
12807 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12808 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12809 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12810 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12811 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12812 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12814 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12815 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12816 fold_convert_loc (loc, newtype,
12817 TREE_OPERAND (arg0, 0)),
12818 fold_convert_loc (loc, newtype,
12819 TREE_OPERAND (arg0, 1)));
12821 return fold_build2_loc (loc, code, type, newmod,
12822 fold_convert_loc (loc, newtype, arg1));
12825 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12826 C1 is a valid shift constant, and C2 is a power of two, i.e.
12828 if (TREE_CODE (arg0) == BIT_AND_EXPR
12829 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12830 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12832 && integer_pow2p (TREE_OPERAND (arg0, 1))
12833 && integer_zerop (arg1))
12835 tree itype = TREE_TYPE (arg0);
12836 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12837 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12839 /* Check for a valid shift count. */
12840 if (TREE_INT_CST_HIGH (arg001) == 0
12841 && TREE_INT_CST_LOW (arg001) < prec)
12843 tree arg01 = TREE_OPERAND (arg0, 1);
12844 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12845 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12846 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12847 can be rewritten as (X & (C2 << C1)) != 0. */
12848 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12850 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12851 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12852 return fold_build2_loc (loc, code, type, tem, arg1);
12854 /* Otherwise, for signed (arithmetic) shifts,
12855 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12856 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12857 else if (!TYPE_UNSIGNED (itype))
12858 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12859 arg000, build_int_cst (itype, 0));
12860 /* Otherwise, of unsigned (logical) shifts,
12861 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12862 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12864 return omit_one_operand_loc (loc, type,
12865 code == EQ_EXPR ? integer_one_node
12866 : integer_zero_node,
12871 /* If this is an NE comparison of zero with an AND of one, remove the
12872 comparison since the AND will give the correct value. */
12873 if (code == NE_EXPR
12874 && integer_zerop (arg1)
12875 && TREE_CODE (arg0) == BIT_AND_EXPR
12876 && integer_onep (TREE_OPERAND (arg0, 1)))
12877 return fold_convert_loc (loc, type, arg0);
12879 /* If we have (A & C) == C where C is a power of 2, convert this into
12880 (A & C) != 0. Similarly for NE_EXPR. */
12881 if (TREE_CODE (arg0) == BIT_AND_EXPR
12882 && integer_pow2p (TREE_OPERAND (arg0, 1))
12883 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12884 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12885 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12886 integer_zero_node));
12888 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12889 bit, then fold the expression into A < 0 or A >= 0. */
12890 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12894 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12895 Similarly for NE_EXPR. */
12896 if (TREE_CODE (arg0) == BIT_AND_EXPR
12897 && TREE_CODE (arg1) == INTEGER_CST
12898 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12900 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12901 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12902 TREE_OPERAND (arg0, 1));
12903 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12905 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12906 if (integer_nonzerop (dandnotc))
12907 return omit_one_operand_loc (loc, type, rslt, arg0);
12910 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12911 Similarly for NE_EXPR. */
12912 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12913 && TREE_CODE (arg1) == INTEGER_CST
12914 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12916 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12917 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12918 TREE_OPERAND (arg0, 1), notd);
12919 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12920 if (integer_nonzerop (candnotd))
12921 return omit_one_operand_loc (loc, type, rslt, arg0);
12924 /* If this is a comparison of a field, we may be able to simplify it. */
12925 if ((TREE_CODE (arg0) == COMPONENT_REF
12926 || TREE_CODE (arg0) == BIT_FIELD_REF)
12927 /* Handle the constant case even without -O
12928 to make sure the warnings are given. */
12929 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12931 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12936 /* Optimize comparisons of strlen vs zero to a compare of the
12937 first character of the string vs zero. To wit,
12938 strlen(ptr) == 0 => *ptr == 0
12939 strlen(ptr) != 0 => *ptr != 0
12940 Other cases should reduce to one of these two (or a constant)
12941 due to the return value of strlen being unsigned. */
12942 if (TREE_CODE (arg0) == CALL_EXPR
12943 && integer_zerop (arg1))
12945 tree fndecl = get_callee_fndecl (arg0);
12948 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12949 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12950 && call_expr_nargs (arg0) == 1
12951 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12953 tree iref = build_fold_indirect_ref_loc (loc,
12954 CALL_EXPR_ARG (arg0, 0));
12955 return fold_build2_loc (loc, code, type, iref,
12956 build_int_cst (TREE_TYPE (iref), 0));
12960 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12961 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12962 if (TREE_CODE (arg0) == RSHIFT_EXPR
12963 && integer_zerop (arg1)
12964 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12966 tree arg00 = TREE_OPERAND (arg0, 0);
12967 tree arg01 = TREE_OPERAND (arg0, 1);
12968 tree itype = TREE_TYPE (arg00);
12969 if (TREE_INT_CST_HIGH (arg01) == 0
12970 && TREE_INT_CST_LOW (arg01)
12971 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12973 if (TYPE_UNSIGNED (itype))
12975 itype = signed_type_for (itype);
12976 arg00 = fold_convert_loc (loc, itype, arg00);
12978 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12979 type, arg00, build_int_cst (itype, 0));
12983 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12984 if (integer_zerop (arg1)
12985 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12986 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12987 TREE_OPERAND (arg0, 1));
12989 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12990 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12991 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12992 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12993 build_int_cst (TREE_TYPE (arg1), 0));
12994 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12995 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12997 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12998 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12999 build_int_cst (TREE_TYPE (arg1), 0));
13001 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13002 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13003 && TREE_CODE (arg1) == INTEGER_CST
13004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13005 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13006 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13007 TREE_OPERAND (arg0, 1), arg1));
13009 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13010 (X & C) == 0 when C is a single bit. */
13011 if (TREE_CODE (arg0) == BIT_AND_EXPR
13012 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13013 && integer_zerop (arg1)
13014 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13016 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13017 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13018 TREE_OPERAND (arg0, 1));
13019 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13023 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13024 constant C is a power of two, i.e. a single bit. */
13025 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13026 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13027 && integer_zerop (arg1)
13028 && integer_pow2p (TREE_OPERAND (arg0, 1))
13029 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13030 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13032 tree arg00 = TREE_OPERAND (arg0, 0);
13033 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13034 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13037 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13038 when is C is a power of two, i.e. a single bit. */
13039 if (TREE_CODE (arg0) == BIT_AND_EXPR
13040 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13041 && integer_zerop (arg1)
13042 && integer_pow2p (TREE_OPERAND (arg0, 1))
13043 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13044 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13046 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13047 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13048 arg000, TREE_OPERAND (arg0, 1));
13049 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13050 tem, build_int_cst (TREE_TYPE (tem), 0));
13053 if (integer_zerop (arg1)
13054 && tree_expr_nonzero_p (arg0))
13056 tree res = constant_boolean_node (code==NE_EXPR, type);
13057 return omit_one_operand_loc (loc, type, res, arg0);
13060 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13061 if (TREE_CODE (arg0) == NEGATE_EXPR
13062 && TREE_CODE (arg1) == NEGATE_EXPR)
13063 return fold_build2_loc (loc, code, type,
13064 TREE_OPERAND (arg0, 0),
13065 TREE_OPERAND (arg1, 0));
13067 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13068 if (TREE_CODE (arg0) == BIT_AND_EXPR
13069 && TREE_CODE (arg1) == BIT_AND_EXPR)
13071 tree arg00 = TREE_OPERAND (arg0, 0);
13072 tree arg01 = TREE_OPERAND (arg0, 1);
13073 tree arg10 = TREE_OPERAND (arg1, 0);
13074 tree arg11 = TREE_OPERAND (arg1, 1);
13075 tree itype = TREE_TYPE (arg0);
13077 if (operand_equal_p (arg01, arg11, 0))
13078 return fold_build2_loc (loc, code, type,
13079 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13080 fold_build2_loc (loc,
13081 BIT_XOR_EXPR, itype,
13084 build_int_cst (itype, 0));
13086 if (operand_equal_p (arg01, arg10, 0))
13087 return fold_build2_loc (loc, code, type,
13088 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13089 fold_build2_loc (loc,
13090 BIT_XOR_EXPR, itype,
13093 build_int_cst (itype, 0));
13095 if (operand_equal_p (arg00, arg11, 0))
13096 return fold_build2_loc (loc, code, type,
13097 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13098 fold_build2_loc (loc,
13099 BIT_XOR_EXPR, itype,
13102 build_int_cst (itype, 0));
13104 if (operand_equal_p (arg00, arg10, 0))
13105 return fold_build2_loc (loc, code, type,
13106 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13107 fold_build2_loc (loc,
13108 BIT_XOR_EXPR, itype,
13111 build_int_cst (itype, 0));
13114 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13115 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13117 tree arg00 = TREE_OPERAND (arg0, 0);
13118 tree arg01 = TREE_OPERAND (arg0, 1);
13119 tree arg10 = TREE_OPERAND (arg1, 0);
13120 tree arg11 = TREE_OPERAND (arg1, 1);
13121 tree itype = TREE_TYPE (arg0);
13123 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13124 operand_equal_p guarantees no side-effects so we don't need
13125 to use omit_one_operand on Z. */
13126 if (operand_equal_p (arg01, arg11, 0))
13127 return fold_build2_loc (loc, code, type, arg00, arg10);
13128 if (operand_equal_p (arg01, arg10, 0))
13129 return fold_build2_loc (loc, code, type, arg00, arg11);
13130 if (operand_equal_p (arg00, arg11, 0))
13131 return fold_build2_loc (loc, code, type, arg01, arg10);
13132 if (operand_equal_p (arg00, arg10, 0))
13133 return fold_build2_loc (loc, code, type, arg01, arg11);
13135 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13136 if (TREE_CODE (arg01) == INTEGER_CST
13137 && TREE_CODE (arg11) == INTEGER_CST)
13138 return fold_build2_loc (loc, code, type,
13139 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13140 fold_build2_loc (loc,
13141 BIT_XOR_EXPR, itype,
13146 /* Attempt to simplify equality/inequality comparisons of complex
13147 values. Only lower the comparison if the result is known or
13148 can be simplified to a single scalar comparison. */
13149 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13150 || TREE_CODE (arg0) == COMPLEX_CST)
13151 && (TREE_CODE (arg1) == COMPLEX_EXPR
13152 || TREE_CODE (arg1) == COMPLEX_CST))
13154 tree real0, imag0, real1, imag1;
13157 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13159 real0 = TREE_OPERAND (arg0, 0);
13160 imag0 = TREE_OPERAND (arg0, 1);
13164 real0 = TREE_REALPART (arg0);
13165 imag0 = TREE_IMAGPART (arg0);
13168 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13170 real1 = TREE_OPERAND (arg1, 0);
13171 imag1 = TREE_OPERAND (arg1, 1);
13175 real1 = TREE_REALPART (arg1);
13176 imag1 = TREE_IMAGPART (arg1);
13179 rcond = fold_binary_loc (loc, code, type, real0, real1);
13180 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13182 if (integer_zerop (rcond))
13184 if (code == EQ_EXPR)
13185 return omit_two_operands_loc (loc, type, boolean_false_node,
13187 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13191 if (code == NE_EXPR)
13192 return omit_two_operands_loc (loc, type, boolean_true_node,
13194 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13198 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13199 if (icond && TREE_CODE (icond) == INTEGER_CST)
13201 if (integer_zerop (icond))
13203 if (code == EQ_EXPR)
13204 return omit_two_operands_loc (loc, type, boolean_false_node,
13206 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13210 if (code == NE_EXPR)
13211 return omit_two_operands_loc (loc, type, boolean_true_node,
13213 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13224 tem = fold_comparison (loc, code, type, op0, op1);
13225 if (tem != NULL_TREE)
13228 /* Transform comparisons of the form X +- C CMP X. */
13229 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13230 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13231 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13232 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13233 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13234 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13236 tree arg01 = TREE_OPERAND (arg0, 1);
13237 enum tree_code code0 = TREE_CODE (arg0);
13240 if (TREE_CODE (arg01) == REAL_CST)
13241 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13243 is_positive = tree_int_cst_sgn (arg01);
13245 /* (X - c) > X becomes false. */
13246 if (code == GT_EXPR
13247 && ((code0 == MINUS_EXPR && is_positive >= 0)
13248 || (code0 == PLUS_EXPR && is_positive <= 0)))
13250 if (TREE_CODE (arg01) == INTEGER_CST
13251 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13252 fold_overflow_warning (("assuming signed overflow does not "
13253 "occur when assuming that (X - c) > X "
13254 "is always false"),
13255 WARN_STRICT_OVERFLOW_ALL);
13256 return constant_boolean_node (0, type);
13259 /* Likewise (X + c) < X becomes false. */
13260 if (code == LT_EXPR
13261 && ((code0 == PLUS_EXPR && is_positive >= 0)
13262 || (code0 == MINUS_EXPR && is_positive <= 0)))
13264 if (TREE_CODE (arg01) == INTEGER_CST
13265 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13266 fold_overflow_warning (("assuming signed overflow does not "
13267 "occur when assuming that "
13268 "(X + c) < X is always false"),
13269 WARN_STRICT_OVERFLOW_ALL);
13270 return constant_boolean_node (0, type);
13273 /* Convert (X - c) <= X to true. */
13274 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13276 && ((code0 == MINUS_EXPR && is_positive >= 0)
13277 || (code0 == PLUS_EXPR && is_positive <= 0)))
13279 if (TREE_CODE (arg01) == INTEGER_CST
13280 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13281 fold_overflow_warning (("assuming signed overflow does not "
13282 "occur when assuming that "
13283 "(X - c) <= X is always true"),
13284 WARN_STRICT_OVERFLOW_ALL);
13285 return constant_boolean_node (1, type);
13288 /* Convert (X + c) >= X to true. */
13289 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13291 && ((code0 == PLUS_EXPR && is_positive >= 0)
13292 || (code0 == MINUS_EXPR && is_positive <= 0)))
13294 if (TREE_CODE (arg01) == INTEGER_CST
13295 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13296 fold_overflow_warning (("assuming signed overflow does not "
13297 "occur when assuming that "
13298 "(X + c) >= X is always true"),
13299 WARN_STRICT_OVERFLOW_ALL);
13300 return constant_boolean_node (1, type);
13303 if (TREE_CODE (arg01) == INTEGER_CST)
13305 /* Convert X + c > X and X - c < X to true for integers. */
13306 if (code == GT_EXPR
13307 && ((code0 == PLUS_EXPR && is_positive > 0)
13308 || (code0 == MINUS_EXPR && is_positive < 0)))
13310 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13311 fold_overflow_warning (("assuming signed overflow does "
13312 "not occur when assuming that "
13313 "(X + c) > X is always true"),
13314 WARN_STRICT_OVERFLOW_ALL);
13315 return constant_boolean_node (1, type);
13318 if (code == LT_EXPR
13319 && ((code0 == MINUS_EXPR && is_positive > 0)
13320 || (code0 == PLUS_EXPR && is_positive < 0)))
13322 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13323 fold_overflow_warning (("assuming signed overflow does "
13324 "not occur when assuming that "
13325 "(X - c) < X is always true"),
13326 WARN_STRICT_OVERFLOW_ALL);
13327 return constant_boolean_node (1, type);
13330 /* Convert X + c <= X and X - c >= X to false for integers. */
13331 if (code == LE_EXPR
13332 && ((code0 == PLUS_EXPR && is_positive > 0)
13333 || (code0 == MINUS_EXPR && is_positive < 0)))
13335 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13336 fold_overflow_warning (("assuming signed overflow does "
13337 "not occur when assuming that "
13338 "(X + c) <= X is always false"),
13339 WARN_STRICT_OVERFLOW_ALL);
13340 return constant_boolean_node (0, type);
13343 if (code == GE_EXPR
13344 && ((code0 == MINUS_EXPR && is_positive > 0)
13345 || (code0 == PLUS_EXPR && is_positive < 0)))
13347 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13348 fold_overflow_warning (("assuming signed overflow does "
13349 "not occur when assuming that "
13350 "(X - c) >= X is always false"),
13351 WARN_STRICT_OVERFLOW_ALL);
13352 return constant_boolean_node (0, type);
13357 /* Comparisons with the highest or lowest possible integer of
13358 the specified precision will have known values. */
13360 tree arg1_type = TREE_TYPE (arg1);
13361 unsigned int width = TYPE_PRECISION (arg1_type);
13363 if (TREE_CODE (arg1) == INTEGER_CST
13364 && width <= 2 * HOST_BITS_PER_WIDE_INT
13365 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13367 HOST_WIDE_INT signed_max_hi;
13368 unsigned HOST_WIDE_INT signed_max_lo;
13369 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13371 if (width <= HOST_BITS_PER_WIDE_INT)
13373 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13378 if (TYPE_UNSIGNED (arg1_type))
13380 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13386 max_lo = signed_max_lo;
13387 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13393 width -= HOST_BITS_PER_WIDE_INT;
13394 signed_max_lo = -1;
13395 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13400 if (TYPE_UNSIGNED (arg1_type))
13402 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13407 max_hi = signed_max_hi;
13408 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13412 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13413 && TREE_INT_CST_LOW (arg1) == max_lo)
13417 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13420 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13423 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13426 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13428 /* The GE_EXPR and LT_EXPR cases above are not normally
13429 reached because of previous transformations. */
13434 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13436 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13440 arg1 = const_binop (PLUS_EXPR, arg1,
13441 build_int_cst (TREE_TYPE (arg1), 1), 0);
13442 return fold_build2_loc (loc, EQ_EXPR, type,
13443 fold_convert_loc (loc,
13444 TREE_TYPE (arg1), arg0),
13447 arg1 = const_binop (PLUS_EXPR, arg1,
13448 build_int_cst (TREE_TYPE (arg1), 1), 0);
13449 return fold_build2_loc (loc, NE_EXPR, type,
13450 fold_convert_loc (loc, TREE_TYPE (arg1),
13456 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13458 && TREE_INT_CST_LOW (arg1) == min_lo)
13462 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13465 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13468 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13471 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13476 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13478 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13482 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13483 return fold_build2_loc (loc, NE_EXPR, type,
13484 fold_convert_loc (loc,
13485 TREE_TYPE (arg1), arg0),
13488 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13489 return fold_build2_loc (loc, EQ_EXPR, type,
13490 fold_convert_loc (loc, TREE_TYPE (arg1),
13497 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13498 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13499 && TYPE_UNSIGNED (arg1_type)
13500 /* We will flip the signedness of the comparison operator
13501 associated with the mode of arg1, so the sign bit is
13502 specified by this mode. Check that arg1 is the signed
13503 max associated with this sign bit. */
13504 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13505 /* signed_type does not work on pointer types. */
13506 && INTEGRAL_TYPE_P (arg1_type))
13508 /* The following case also applies to X < signed_max+1
13509 and X >= signed_max+1 because previous transformations. */
13510 if (code == LE_EXPR || code == GT_EXPR)
13513 st = signed_type_for (TREE_TYPE (arg1));
13514 return fold_build2_loc (loc,
13515 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13516 type, fold_convert_loc (loc, st, arg0),
13517 build_int_cst (st, 0));
13523 /* If we are comparing an ABS_EXPR with a constant, we can
13524 convert all the cases into explicit comparisons, but they may
13525 well not be faster than doing the ABS and one comparison.
13526 But ABS (X) <= C is a range comparison, which becomes a subtraction
13527 and a comparison, and is probably faster. */
13528 if (code == LE_EXPR
13529 && TREE_CODE (arg1) == INTEGER_CST
13530 && TREE_CODE (arg0) == ABS_EXPR
13531 && ! TREE_SIDE_EFFECTS (arg0)
13532 && (0 != (tem = negate_expr (arg1)))
13533 && TREE_CODE (tem) == INTEGER_CST
13534 && !TREE_OVERFLOW (tem))
13535 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13536 build2 (GE_EXPR, type,
13537 TREE_OPERAND (arg0, 0), tem),
13538 build2 (LE_EXPR, type,
13539 TREE_OPERAND (arg0, 0), arg1));
13541 /* Convert ABS_EXPR<x> >= 0 to true. */
13542 strict_overflow_p = false;
13543 if (code == GE_EXPR
13544 && (integer_zerop (arg1)
13545 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13546 && real_zerop (arg1)))
13547 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13549 if (strict_overflow_p)
13550 fold_overflow_warning (("assuming signed overflow does not occur "
13551 "when simplifying comparison of "
13552 "absolute value and zero"),
13553 WARN_STRICT_OVERFLOW_CONDITIONAL);
13554 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13557 /* Convert ABS_EXPR<x> < 0 to false. */
13558 strict_overflow_p = false;
13559 if (code == LT_EXPR
13560 && (integer_zerop (arg1) || real_zerop (arg1))
13561 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13563 if (strict_overflow_p)
13564 fold_overflow_warning (("assuming signed overflow does not occur "
13565 "when simplifying comparison of "
13566 "absolute value and zero"),
13567 WARN_STRICT_OVERFLOW_CONDITIONAL);
13568 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13571 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13572 and similarly for >= into !=. */
13573 if ((code == LT_EXPR || code == GE_EXPR)
13574 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13575 && TREE_CODE (arg1) == LSHIFT_EXPR
13576 && integer_onep (TREE_OPERAND (arg1, 0)))
13578 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13579 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13580 TREE_OPERAND (arg1, 1)),
13581 build_int_cst (TREE_TYPE (arg0), 0));
13582 goto fold_binary_exit;
13585 if ((code == LT_EXPR || code == GE_EXPR)
13586 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13587 && CONVERT_EXPR_P (arg1)
13588 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13589 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13591 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13592 fold_convert_loc (loc, TREE_TYPE (arg0),
13593 build2 (RSHIFT_EXPR,
13594 TREE_TYPE (arg0), arg0,
13595 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13597 build_int_cst (TREE_TYPE (arg0), 0));
13598 goto fold_binary_exit;
13603 case UNORDERED_EXPR:
13611 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13613 t1 = fold_relational_const (code, type, arg0, arg1);
13614 if (t1 != NULL_TREE)
13618 /* If the first operand is NaN, the result is constant. */
13619 if (TREE_CODE (arg0) == REAL_CST
13620 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13621 && (code != LTGT_EXPR || ! flag_trapping_math))
13623 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13624 ? integer_zero_node
13625 : integer_one_node;
13626 return omit_one_operand_loc (loc, type, t1, arg1);
13629 /* If the second operand is NaN, the result is constant. */
13630 if (TREE_CODE (arg1) == REAL_CST
13631 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13632 && (code != LTGT_EXPR || ! flag_trapping_math))
13634 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13635 ? integer_zero_node
13636 : integer_one_node;
13637 return omit_one_operand_loc (loc, type, t1, arg0);
13640 /* Simplify unordered comparison of something with itself. */
13641 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13642 && operand_equal_p (arg0, arg1, 0))
13643 return constant_boolean_node (1, type);
13645 if (code == LTGT_EXPR
13646 && !flag_trapping_math
13647 && operand_equal_p (arg0, arg1, 0))
13648 return constant_boolean_node (0, type);
13650 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13652 tree targ0 = strip_float_extensions (arg0);
13653 tree targ1 = strip_float_extensions (arg1);
13654 tree newtype = TREE_TYPE (targ0);
13656 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13657 newtype = TREE_TYPE (targ1);
13659 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13660 return fold_build2_loc (loc, code, type,
13661 fold_convert_loc (loc, newtype, targ0),
13662 fold_convert_loc (loc, newtype, targ1));
13667 case COMPOUND_EXPR:
13668 /* When pedantic, a compound expression can be neither an lvalue
13669 nor an integer constant expression. */
13670 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13672 /* Don't let (0, 0) be null pointer constant. */
13673 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13674 : fold_convert_loc (loc, type, arg1);
13675 return pedantic_non_lvalue_loc (loc, tem);
13678 if ((TREE_CODE (arg0) == REAL_CST
13679 && TREE_CODE (arg1) == REAL_CST)
13680 || (TREE_CODE (arg0) == INTEGER_CST
13681 && TREE_CODE (arg1) == INTEGER_CST))
13682 return build_complex (type, arg0, arg1);
13686 /* An ASSERT_EXPR should never be passed to fold_binary. */
13687 gcc_unreachable ();
13691 } /* switch (code) */
13693 protected_set_expr_location (tem, loc);
13697 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13698 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13702 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13704 switch (TREE_CODE (*tp))
13710 *walk_subtrees = 0;
13712 /* ... fall through ... */
13719 /* Return whether the sub-tree ST contains a label which is accessible from
13720 outside the sub-tree. */
13723 contains_label_p (tree st)
13726 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13729 /* Fold a ternary expression of code CODE and type TYPE with operands
13730 OP0, OP1, and OP2. Return the folded expression if folding is
13731 successful. Otherwise, return NULL_TREE. */
13734 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13735 tree op0, tree op1, tree op2)
13738 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13739 enum tree_code_class kind = TREE_CODE_CLASS (code);
13741 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13742 && TREE_CODE_LENGTH (code) == 3);
13744 /* Strip any conversions that don't change the mode. This is safe
13745 for every expression, except for a comparison expression because
13746 its signedness is derived from its operands. So, in the latter
13747 case, only strip conversions that don't change the signedness.
13749 Note that this is done as an internal manipulation within the
13750 constant folder, in order to find the simplest representation of
13751 the arguments so that their form can be studied. In any cases,
13752 the appropriate type conversions should be put back in the tree
13753 that will get out of the constant folder. */
13768 case COMPONENT_REF:
13769 if (TREE_CODE (arg0) == CONSTRUCTOR
13770 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13772 unsigned HOST_WIDE_INT idx;
13774 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13781 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13782 so all simple results must be passed through pedantic_non_lvalue. */
13783 if (TREE_CODE (arg0) == INTEGER_CST)
13785 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13786 tem = integer_zerop (arg0) ? op2 : op1;
13787 /* Only optimize constant conditions when the selected branch
13788 has the same type as the COND_EXPR. This avoids optimizing
13789 away "c ? x : throw", where the throw has a void type.
13790 Avoid throwing away that operand which contains label. */
13791 if ((!TREE_SIDE_EFFECTS (unused_op)
13792 || !contains_label_p (unused_op))
13793 && (! VOID_TYPE_P (TREE_TYPE (tem))
13794 || VOID_TYPE_P (type)))
13795 return pedantic_non_lvalue_loc (loc, tem);
13798 if (operand_equal_p (arg1, op2, 0))
13799 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13801 /* If we have A op B ? A : C, we may be able to convert this to a
13802 simpler expression, depending on the operation and the values
13803 of B and C. Signed zeros prevent all of these transformations,
13804 for reasons given above each one.
13806 Also try swapping the arguments and inverting the conditional. */
13807 if (COMPARISON_CLASS_P (arg0)
13808 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13809 arg1, TREE_OPERAND (arg0, 1))
13810 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13812 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13817 if (COMPARISON_CLASS_P (arg0)
13818 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13820 TREE_OPERAND (arg0, 1))
13821 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13823 tem = fold_truth_not_expr (loc, arg0);
13824 if (tem && COMPARISON_CLASS_P (tem))
13826 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13832 /* If the second operand is simpler than the third, swap them
13833 since that produces better jump optimization results. */
13834 if (truth_value_p (TREE_CODE (arg0))
13835 && tree_swap_operands_p (op1, op2, false))
13837 /* See if this can be inverted. If it can't, possibly because
13838 it was a floating-point inequality comparison, don't do
13840 tem = fold_truth_not_expr (loc, arg0);
13842 return fold_build3_loc (loc, code, type, tem, op2, op1);
13845 /* Convert A ? 1 : 0 to simply A. */
13846 if (integer_onep (op1)
13847 && integer_zerop (op2)
13848 /* If we try to convert OP0 to our type, the
13849 call to fold will try to move the conversion inside
13850 a COND, which will recurse. In that case, the COND_EXPR
13851 is probably the best choice, so leave it alone. */
13852 && type == TREE_TYPE (arg0))
13853 return pedantic_non_lvalue_loc (loc, arg0);
13855 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13856 over COND_EXPR in cases such as floating point comparisons. */
13857 if (integer_zerop (op1)
13858 && integer_onep (op2)
13859 && truth_value_p (TREE_CODE (arg0)))
13860 return pedantic_non_lvalue_loc (loc,
13861 fold_convert_loc (loc, type,
13862 invert_truthvalue_loc (loc,
13865 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13866 if (TREE_CODE (arg0) == LT_EXPR
13867 && integer_zerop (TREE_OPERAND (arg0, 1))
13868 && integer_zerop (op2)
13869 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13871 /* sign_bit_p only checks ARG1 bits within A's precision.
13872 If <sign bit of A> has wider type than A, bits outside
13873 of A's precision in <sign bit of A> need to be checked.
13874 If they are all 0, this optimization needs to be done
13875 in unsigned A's type, if they are all 1 in signed A's type,
13876 otherwise this can't be done. */
13877 if (TYPE_PRECISION (TREE_TYPE (tem))
13878 < TYPE_PRECISION (TREE_TYPE (arg1))
13879 && TYPE_PRECISION (TREE_TYPE (tem))
13880 < TYPE_PRECISION (type))
13882 unsigned HOST_WIDE_INT mask_lo;
13883 HOST_WIDE_INT mask_hi;
13884 int inner_width, outer_width;
13887 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13888 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13889 if (outer_width > TYPE_PRECISION (type))
13890 outer_width = TYPE_PRECISION (type);
13892 if (outer_width > HOST_BITS_PER_WIDE_INT)
13894 mask_hi = ((unsigned HOST_WIDE_INT) -1
13895 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13901 mask_lo = ((unsigned HOST_WIDE_INT) -1
13902 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13904 if (inner_width > HOST_BITS_PER_WIDE_INT)
13906 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13907 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13911 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13912 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13914 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13915 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13917 tem_type = signed_type_for (TREE_TYPE (tem));
13918 tem = fold_convert_loc (loc, tem_type, tem);
13920 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13921 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13923 tem_type = unsigned_type_for (TREE_TYPE (tem));
13924 tem = fold_convert_loc (loc, tem_type, tem);
13932 fold_convert_loc (loc, type,
13933 fold_build2_loc (loc, BIT_AND_EXPR,
13934 TREE_TYPE (tem), tem,
13935 fold_convert_loc (loc,
13940 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13941 already handled above. */
13942 if (TREE_CODE (arg0) == BIT_AND_EXPR
13943 && integer_onep (TREE_OPERAND (arg0, 1))
13944 && integer_zerop (op2)
13945 && integer_pow2p (arg1))
13947 tree tem = TREE_OPERAND (arg0, 0);
13949 if (TREE_CODE (tem) == RSHIFT_EXPR
13950 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13951 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13952 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13953 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13954 TREE_OPERAND (tem, 0), arg1);
13957 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13958 is probably obsolete because the first operand should be a
13959 truth value (that's why we have the two cases above), but let's
13960 leave it in until we can confirm this for all front-ends. */
13961 if (integer_zerop (op2)
13962 && TREE_CODE (arg0) == NE_EXPR
13963 && integer_zerop (TREE_OPERAND (arg0, 1))
13964 && integer_pow2p (arg1)
13965 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13966 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13967 arg1, OEP_ONLY_CONST))
13968 return pedantic_non_lvalue_loc (loc,
13969 fold_convert_loc (loc, type,
13970 TREE_OPERAND (arg0, 0)));
13972 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13973 if (integer_zerop (op2)
13974 && truth_value_p (TREE_CODE (arg0))
13975 && truth_value_p (TREE_CODE (arg1)))
13976 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13977 fold_convert_loc (loc, type, arg0),
13980 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13981 if (integer_onep (op2)
13982 && truth_value_p (TREE_CODE (arg0))
13983 && truth_value_p (TREE_CODE (arg1)))
13985 /* Only perform transformation if ARG0 is easily inverted. */
13986 tem = fold_truth_not_expr (loc, arg0);
13988 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13989 fold_convert_loc (loc, type, tem),
13993 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13994 if (integer_zerop (arg1)
13995 && truth_value_p (TREE_CODE (arg0))
13996 && truth_value_p (TREE_CODE (op2)))
13998 /* Only perform transformation if ARG0 is easily inverted. */
13999 tem = fold_truth_not_expr (loc, arg0);
14001 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14002 fold_convert_loc (loc, type, tem),
14006 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14007 if (integer_onep (arg1)
14008 && truth_value_p (TREE_CODE (arg0))
14009 && truth_value_p (TREE_CODE (op2)))
14010 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14011 fold_convert_loc (loc, type, arg0),
14017 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14018 of fold_ternary on them. */
14019 gcc_unreachable ();
14021 case BIT_FIELD_REF:
14022 if ((TREE_CODE (arg0) == VECTOR_CST
14023 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14024 && type == TREE_TYPE (TREE_TYPE (arg0)))
14026 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14027 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14030 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14031 && (idx % width) == 0
14032 && (idx = idx / width)
14033 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14035 tree elements = NULL_TREE;
14037 if (TREE_CODE (arg0) == VECTOR_CST)
14038 elements = TREE_VECTOR_CST_ELTS (arg0);
14041 unsigned HOST_WIDE_INT idx;
14044 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14045 elements = tree_cons (NULL_TREE, value, elements);
14047 while (idx-- > 0 && elements)
14048 elements = TREE_CHAIN (elements);
14050 return TREE_VALUE (elements);
14052 return fold_convert_loc (loc, type, integer_zero_node);
14056 /* A bit-field-ref that referenced the full argument can be stripped. */
14057 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14058 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14059 && integer_zerop (op2))
14060 return fold_convert_loc (loc, type, arg0);
14066 } /* switch (code) */
14069 /* Perform constant folding and related simplification of EXPR.
14070 The related simplifications include x*1 => x, x*0 => 0, etc.,
14071 and application of the associative law.
14072 NOP_EXPR conversions may be removed freely (as long as we
14073 are careful not to change the type of the overall expression).
14074 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14075 but we can constant-fold them if they have constant operands. */
14077 #ifdef ENABLE_FOLD_CHECKING
14078 # define fold(x) fold_1 (x)
14079 static tree fold_1 (tree);
14085 const tree t = expr;
14086 enum tree_code code = TREE_CODE (t);
14087 enum tree_code_class kind = TREE_CODE_CLASS (code);
14089 location_t loc = EXPR_LOCATION (expr);
14091 /* Return right away if a constant. */
14092 if (kind == tcc_constant)
14095 /* CALL_EXPR-like objects with variable numbers of operands are
14096 treated specially. */
14097 if (kind == tcc_vl_exp)
14099 if (code == CALL_EXPR)
14101 tem = fold_call_expr (loc, expr, false);
14102 return tem ? tem : expr;
14107 if (IS_EXPR_CODE_CLASS (kind))
14109 tree type = TREE_TYPE (t);
14110 tree op0, op1, op2;
14112 switch (TREE_CODE_LENGTH (code))
14115 op0 = TREE_OPERAND (t, 0);
14116 tem = fold_unary_loc (loc, code, type, op0);
14117 return tem ? tem : expr;
14119 op0 = TREE_OPERAND (t, 0);
14120 op1 = TREE_OPERAND (t, 1);
14121 tem = fold_binary_loc (loc, code, type, op0, op1);
14122 return tem ? tem : expr;
14124 op0 = TREE_OPERAND (t, 0);
14125 op1 = TREE_OPERAND (t, 1);
14126 op2 = TREE_OPERAND (t, 2);
14127 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14128 return tem ? tem : expr;
14138 tree op0 = TREE_OPERAND (t, 0);
14139 tree op1 = TREE_OPERAND (t, 1);
14141 if (TREE_CODE (op1) == INTEGER_CST
14142 && TREE_CODE (op0) == CONSTRUCTOR
14143 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14145 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14146 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14147 unsigned HOST_WIDE_INT begin = 0;
14149 /* Find a matching index by means of a binary search. */
14150 while (begin != end)
14152 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14153 tree index = VEC_index (constructor_elt, elts, middle)->index;
14155 if (TREE_CODE (index) == INTEGER_CST
14156 && tree_int_cst_lt (index, op1))
14157 begin = middle + 1;
14158 else if (TREE_CODE (index) == INTEGER_CST
14159 && tree_int_cst_lt (op1, index))
14161 else if (TREE_CODE (index) == RANGE_EXPR
14162 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14163 begin = middle + 1;
14164 else if (TREE_CODE (index) == RANGE_EXPR
14165 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14168 return VEC_index (constructor_elt, elts, middle)->value;
14176 return fold (DECL_INITIAL (t));
14180 } /* switch (code) */
14183 #ifdef ENABLE_FOLD_CHECKING
14186 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14187 static void fold_check_failed (const_tree, const_tree);
14188 void print_fold_checksum (const_tree);
14190 /* When --enable-checking=fold, compute a digest of expr before
14191 and after actual fold call to see if fold did not accidentally
14192 change original expr. */
14198 struct md5_ctx ctx;
14199 unsigned char checksum_before[16], checksum_after[16];
14202 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14203 md5_init_ctx (&ctx);
14204 fold_checksum_tree (expr, &ctx, ht);
14205 md5_finish_ctx (&ctx, checksum_before);
14208 ret = fold_1 (expr);
14210 md5_init_ctx (&ctx);
14211 fold_checksum_tree (expr, &ctx, ht);
14212 md5_finish_ctx (&ctx, checksum_after);
14215 if (memcmp (checksum_before, checksum_after, 16))
14216 fold_check_failed (expr, ret);
14222 print_fold_checksum (const_tree expr)
14224 struct md5_ctx ctx;
14225 unsigned char checksum[16], cnt;
14228 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14229 md5_init_ctx (&ctx);
14230 fold_checksum_tree (expr, &ctx, ht);
14231 md5_finish_ctx (&ctx, checksum);
14233 for (cnt = 0; cnt < 16; ++cnt)
14234 fprintf (stderr, "%02x", checksum[cnt]);
14235 putc ('\n', stderr);
14239 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14241 internal_error ("fold check: original tree changed by fold");
14245 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14248 enum tree_code code;
14249 union tree_node buf;
14254 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14255 <= sizeof (struct tree_function_decl))
14256 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14259 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14263 code = TREE_CODE (expr);
14264 if (TREE_CODE_CLASS (code) == tcc_declaration
14265 && DECL_ASSEMBLER_NAME_SET_P (expr))
14267 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14268 memcpy ((char *) &buf, expr, tree_size (expr));
14269 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14270 expr = (tree) &buf;
14272 else if (TREE_CODE_CLASS (code) == tcc_type
14273 && (TYPE_POINTER_TO (expr)
14274 || TYPE_REFERENCE_TO (expr)
14275 || TYPE_CACHED_VALUES_P (expr)
14276 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14277 || TYPE_NEXT_VARIANT (expr)))
14279 /* Allow these fields to be modified. */
14281 memcpy ((char *) &buf, expr, tree_size (expr));
14282 expr = tmp = (tree) &buf;
14283 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14284 TYPE_POINTER_TO (tmp) = NULL;
14285 TYPE_REFERENCE_TO (tmp) = NULL;
14286 TYPE_NEXT_VARIANT (tmp) = NULL;
14287 if (TYPE_CACHED_VALUES_P (tmp))
14289 TYPE_CACHED_VALUES_P (tmp) = 0;
14290 TYPE_CACHED_VALUES (tmp) = NULL;
14293 md5_process_bytes (expr, tree_size (expr), ctx);
14294 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14295 if (TREE_CODE_CLASS (code) != tcc_type
14296 && TREE_CODE_CLASS (code) != tcc_declaration
14297 && code != TREE_LIST
14298 && code != SSA_NAME)
14299 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14300 switch (TREE_CODE_CLASS (code))
14306 md5_process_bytes (TREE_STRING_POINTER (expr),
14307 TREE_STRING_LENGTH (expr), ctx);
14310 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14311 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14314 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14320 case tcc_exceptional:
14324 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14325 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14326 expr = TREE_CHAIN (expr);
14327 goto recursive_label;
14330 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14331 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14337 case tcc_expression:
14338 case tcc_reference:
14339 case tcc_comparison:
14342 case tcc_statement:
14344 len = TREE_OPERAND_LENGTH (expr);
14345 for (i = 0; i < len; ++i)
14346 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14348 case tcc_declaration:
14349 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14350 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14351 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14353 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14354 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14355 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14356 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14357 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14359 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14360 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14362 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14364 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14365 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14366 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14370 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14371 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14372 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14373 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14374 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14375 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14376 if (INTEGRAL_TYPE_P (expr)
14377 || SCALAR_FLOAT_TYPE_P (expr))
14379 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14380 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14382 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14383 if (TREE_CODE (expr) == RECORD_TYPE
14384 || TREE_CODE (expr) == UNION_TYPE
14385 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14386 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14387 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14394 /* Helper function for outputting the checksum of a tree T. When
14395 debugging with gdb, you can "define mynext" to be "next" followed
14396 by "call debug_fold_checksum (op0)", then just trace down till the
14400 debug_fold_checksum (const_tree t)
14403 unsigned char checksum[16];
14404 struct md5_ctx ctx;
14405 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14407 md5_init_ctx (&ctx);
14408 fold_checksum_tree (t, &ctx, ht);
14409 md5_finish_ctx (&ctx, checksum);
14412 for (i = 0; i < 16; i++)
14413 fprintf (stderr, "%d ", checksum[i]);
14415 fprintf (stderr, "\n");
14420 /* Fold a unary tree expression with code CODE of type TYPE with an
14421 operand OP0. LOC is the location of the resulting expression.
14422 Return a folded expression if successful. Otherwise, return a tree
14423 expression with code CODE of type TYPE with an operand OP0. */
14426 fold_build1_stat_loc (location_t loc,
14427 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14430 #ifdef ENABLE_FOLD_CHECKING
14431 unsigned char checksum_before[16], checksum_after[16];
14432 struct md5_ctx ctx;
14435 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14436 md5_init_ctx (&ctx);
14437 fold_checksum_tree (op0, &ctx, ht);
14438 md5_finish_ctx (&ctx, checksum_before);
14442 tem = fold_unary_loc (loc, code, type, op0);
14445 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14446 SET_EXPR_LOCATION (tem, loc);
14449 #ifdef ENABLE_FOLD_CHECKING
14450 md5_init_ctx (&ctx);
14451 fold_checksum_tree (op0, &ctx, ht);
14452 md5_finish_ctx (&ctx, checksum_after);
14455 if (memcmp (checksum_before, checksum_after, 16))
14456 fold_check_failed (op0, tem);
14461 /* Fold a binary tree expression with code CODE of type TYPE with
14462 operands OP0 and OP1. LOC is the location of the resulting
14463 expression. Return a folded expression if successful. Otherwise,
14464 return a tree expression with code CODE of type TYPE with operands
14468 fold_build2_stat_loc (location_t loc,
14469 enum tree_code code, tree type, tree op0, tree op1
14473 #ifdef ENABLE_FOLD_CHECKING
14474 unsigned char checksum_before_op0[16],
14475 checksum_before_op1[16],
14476 checksum_after_op0[16],
14477 checksum_after_op1[16];
14478 struct md5_ctx ctx;
14481 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14482 md5_init_ctx (&ctx);
14483 fold_checksum_tree (op0, &ctx, ht);
14484 md5_finish_ctx (&ctx, checksum_before_op0);
14487 md5_init_ctx (&ctx);
14488 fold_checksum_tree (op1, &ctx, ht);
14489 md5_finish_ctx (&ctx, checksum_before_op1);
14493 tem = fold_binary_loc (loc, code, type, op0, op1);
14496 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14497 SET_EXPR_LOCATION (tem, loc);
14500 #ifdef ENABLE_FOLD_CHECKING
14501 md5_init_ctx (&ctx);
14502 fold_checksum_tree (op0, &ctx, ht);
14503 md5_finish_ctx (&ctx, checksum_after_op0);
14506 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14507 fold_check_failed (op0, tem);
14509 md5_init_ctx (&ctx);
14510 fold_checksum_tree (op1, &ctx, ht);
14511 md5_finish_ctx (&ctx, checksum_after_op1);
14514 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14515 fold_check_failed (op1, tem);
14520 /* Fold a ternary tree expression with code CODE of type TYPE with
14521 operands OP0, OP1, and OP2. Return a folded expression if
14522 successful. Otherwise, return a tree expression with code CODE of
14523 type TYPE with operands OP0, OP1, and OP2. */
14526 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14527 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14530 #ifdef ENABLE_FOLD_CHECKING
14531 unsigned char checksum_before_op0[16],
14532 checksum_before_op1[16],
14533 checksum_before_op2[16],
14534 checksum_after_op0[16],
14535 checksum_after_op1[16],
14536 checksum_after_op2[16];
14537 struct md5_ctx ctx;
14540 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14541 md5_init_ctx (&ctx);
14542 fold_checksum_tree (op0, &ctx, ht);
14543 md5_finish_ctx (&ctx, checksum_before_op0);
14546 md5_init_ctx (&ctx);
14547 fold_checksum_tree (op1, &ctx, ht);
14548 md5_finish_ctx (&ctx, checksum_before_op1);
14551 md5_init_ctx (&ctx);
14552 fold_checksum_tree (op2, &ctx, ht);
14553 md5_finish_ctx (&ctx, checksum_before_op2);
14557 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14558 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14561 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14562 SET_EXPR_LOCATION (tem, loc);
14565 #ifdef ENABLE_FOLD_CHECKING
14566 md5_init_ctx (&ctx);
14567 fold_checksum_tree (op0, &ctx, ht);
14568 md5_finish_ctx (&ctx, checksum_after_op0);
14571 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14572 fold_check_failed (op0, tem);
14574 md5_init_ctx (&ctx);
14575 fold_checksum_tree (op1, &ctx, ht);
14576 md5_finish_ctx (&ctx, checksum_after_op1);
14579 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14580 fold_check_failed (op1, tem);
14582 md5_init_ctx (&ctx);
14583 fold_checksum_tree (op2, &ctx, ht);
14584 md5_finish_ctx (&ctx, checksum_after_op2);
14587 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14588 fold_check_failed (op2, tem);
14593 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14594 arguments in ARGARRAY, and a null static chain.
14595 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14596 of type TYPE from the given operands as constructed by build_call_array. */
14599 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14600 int nargs, tree *argarray)
14603 #ifdef ENABLE_FOLD_CHECKING
14604 unsigned char checksum_before_fn[16],
14605 checksum_before_arglist[16],
14606 checksum_after_fn[16],
14607 checksum_after_arglist[16];
14608 struct md5_ctx ctx;
14612 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14613 md5_init_ctx (&ctx);
14614 fold_checksum_tree (fn, &ctx, ht);
14615 md5_finish_ctx (&ctx, checksum_before_fn);
14618 md5_init_ctx (&ctx);
14619 for (i = 0; i < nargs; i++)
14620 fold_checksum_tree (argarray[i], &ctx, ht);
14621 md5_finish_ctx (&ctx, checksum_before_arglist);
14625 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14627 #ifdef ENABLE_FOLD_CHECKING
14628 md5_init_ctx (&ctx);
14629 fold_checksum_tree (fn, &ctx, ht);
14630 md5_finish_ctx (&ctx, checksum_after_fn);
14633 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14634 fold_check_failed (fn, tem);
14636 md5_init_ctx (&ctx);
14637 for (i = 0; i < nargs; i++)
14638 fold_checksum_tree (argarray[i], &ctx, ht);
14639 md5_finish_ctx (&ctx, checksum_after_arglist);
14642 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14643 fold_check_failed (NULL_TREE, tem);
14648 /* Perform constant folding and related simplification of initializer
14649 expression EXPR. These behave identically to "fold_buildN" but ignore
14650 potential run-time traps and exceptions that fold must preserve. */
14652 #define START_FOLD_INIT \
14653 int saved_signaling_nans = flag_signaling_nans;\
14654 int saved_trapping_math = flag_trapping_math;\
14655 int saved_rounding_math = flag_rounding_math;\
14656 int saved_trapv = flag_trapv;\
14657 int saved_folding_initializer = folding_initializer;\
14658 flag_signaling_nans = 0;\
14659 flag_trapping_math = 0;\
14660 flag_rounding_math = 0;\
14662 folding_initializer = 1;
14664 #define END_FOLD_INIT \
14665 flag_signaling_nans = saved_signaling_nans;\
14666 flag_trapping_math = saved_trapping_math;\
14667 flag_rounding_math = saved_rounding_math;\
14668 flag_trapv = saved_trapv;\
14669 folding_initializer = saved_folding_initializer;
14672 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14673 tree type, tree op)
14678 result = fold_build1_loc (loc, code, type, op);
14685 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14686 tree type, tree op0, tree op1)
14691 result = fold_build2_loc (loc, code, type, op0, op1);
14698 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14699 tree type, tree op0, tree op1, tree op2)
14704 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14711 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14712 int nargs, tree *argarray)
14717 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14723 #undef START_FOLD_INIT
14724 #undef END_FOLD_INIT
14726 /* Determine if first argument is a multiple of second argument. Return 0 if
14727 it is not, or we cannot easily determined it to be.
14729 An example of the sort of thing we care about (at this point; this routine
14730 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14731 fold cases do now) is discovering that
14733 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14739 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14741 This code also handles discovering that
14743 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14745 is a multiple of 8 so we don't have to worry about dealing with a
14746 possible remainder.
14748 Note that we *look* inside a SAVE_EXPR only to determine how it was
14749 calculated; it is not safe for fold to do much of anything else with the
14750 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14751 at run time. For example, the latter example above *cannot* be implemented
14752 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14753 evaluation time of the original SAVE_EXPR is not necessarily the same at
14754 the time the new expression is evaluated. The only optimization of this
14755 sort that would be valid is changing
14757 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14761 SAVE_EXPR (I) * SAVE_EXPR (J)
14763 (where the same SAVE_EXPR (J) is used in the original and the
14764 transformed version). */
14767 multiple_of_p (tree type, const_tree top, const_tree bottom)
14769 if (operand_equal_p (top, bottom, 0))
14772 if (TREE_CODE (type) != INTEGER_TYPE)
14775 switch (TREE_CODE (top))
14778 /* Bitwise and provides a power of two multiple. If the mask is
14779 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14780 if (!integer_pow2p (bottom))
14785 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14786 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14790 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14791 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14794 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14798 op1 = TREE_OPERAND (top, 1);
14799 /* const_binop may not detect overflow correctly,
14800 so check for it explicitly here. */
14801 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14802 > TREE_INT_CST_LOW (op1)
14803 && TREE_INT_CST_HIGH (op1) == 0
14804 && 0 != (t1 = fold_convert (type,
14805 const_binop (LSHIFT_EXPR,
14808 && !TREE_OVERFLOW (t1))
14809 return multiple_of_p (type, t1, bottom);
14814 /* Can't handle conversions from non-integral or wider integral type. */
14815 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14816 || (TYPE_PRECISION (type)
14817 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14820 /* .. fall through ... */
14823 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14826 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14827 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14830 if (TREE_CODE (bottom) != INTEGER_CST
14831 || integer_zerop (bottom)
14832 || (TYPE_UNSIGNED (type)
14833 && (tree_int_cst_sgn (top) < 0
14834 || tree_int_cst_sgn (bottom) < 0)))
14836 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14844 /* Return true if CODE or TYPE is known to be non-negative. */
14847 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14849 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14850 && truth_value_p (code))
14851 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14852 have a signed:1 type (where the value is -1 and 0). */
14857 /* Return true if (CODE OP0) is known to be non-negative. If the return
14858 value is based on the assumption that signed overflow is undefined,
14859 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14860 *STRICT_OVERFLOW_P. */
14863 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14864 bool *strict_overflow_p)
14866 if (TYPE_UNSIGNED (type))
14872 /* We can't return 1 if flag_wrapv is set because
14873 ABS_EXPR<INT_MIN> = INT_MIN. */
14874 if (!INTEGRAL_TYPE_P (type))
14876 if (TYPE_OVERFLOW_UNDEFINED (type))
14878 *strict_overflow_p = true;
14883 case NON_LVALUE_EXPR:
14885 case FIX_TRUNC_EXPR:
14886 return tree_expr_nonnegative_warnv_p (op0,
14887 strict_overflow_p);
14891 tree inner_type = TREE_TYPE (op0);
14892 tree outer_type = type;
14894 if (TREE_CODE (outer_type) == REAL_TYPE)
14896 if (TREE_CODE (inner_type) == REAL_TYPE)
14897 return tree_expr_nonnegative_warnv_p (op0,
14898 strict_overflow_p);
14899 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14901 if (TYPE_UNSIGNED (inner_type))
14903 return tree_expr_nonnegative_warnv_p (op0,
14904 strict_overflow_p);
14907 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14909 if (TREE_CODE (inner_type) == REAL_TYPE)
14910 return tree_expr_nonnegative_warnv_p (op0,
14911 strict_overflow_p);
14912 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14913 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14914 && TYPE_UNSIGNED (inner_type);
14920 return tree_simple_nonnegative_warnv_p (code, type);
14923 /* We don't know sign of `t', so be conservative and return false. */
14927 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14928 value is based on the assumption that signed overflow is undefined,
14929 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14930 *STRICT_OVERFLOW_P. */
14933 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14934 tree op1, bool *strict_overflow_p)
14936 if (TYPE_UNSIGNED (type))
14941 case POINTER_PLUS_EXPR:
14943 if (FLOAT_TYPE_P (type))
14944 return (tree_expr_nonnegative_warnv_p (op0,
14946 && tree_expr_nonnegative_warnv_p (op1,
14947 strict_overflow_p));
14949 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14950 both unsigned and at least 2 bits shorter than the result. */
14951 if (TREE_CODE (type) == INTEGER_TYPE
14952 && TREE_CODE (op0) == NOP_EXPR
14953 && TREE_CODE (op1) == NOP_EXPR)
14955 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14956 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14957 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14958 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14960 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14961 TYPE_PRECISION (inner2)) + 1;
14962 return prec < TYPE_PRECISION (type);
14968 if (FLOAT_TYPE_P (type))
14970 /* x * x for floating point x is always non-negative. */
14971 if (operand_equal_p (op0, op1, 0))
14973 return (tree_expr_nonnegative_warnv_p (op0,
14975 && tree_expr_nonnegative_warnv_p (op1,
14976 strict_overflow_p));
14979 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14980 both unsigned and their total bits is shorter than the result. */
14981 if (TREE_CODE (type) == INTEGER_TYPE
14982 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14983 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14985 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14986 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14988 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14989 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14992 bool unsigned0 = TYPE_UNSIGNED (inner0);
14993 bool unsigned1 = TYPE_UNSIGNED (inner1);
14995 if (TREE_CODE (op0) == INTEGER_CST)
14996 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14998 if (TREE_CODE (op1) == INTEGER_CST)
14999 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15001 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15002 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15004 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15005 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15006 : TYPE_PRECISION (inner0);
15008 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15009 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15010 : TYPE_PRECISION (inner1);
15012 return precision0 + precision1 < TYPE_PRECISION (type);
15019 return (tree_expr_nonnegative_warnv_p (op0,
15021 || tree_expr_nonnegative_warnv_p (op1,
15022 strict_overflow_p));
15028 case TRUNC_DIV_EXPR:
15029 case CEIL_DIV_EXPR:
15030 case FLOOR_DIV_EXPR:
15031 case ROUND_DIV_EXPR:
15032 return (tree_expr_nonnegative_warnv_p (op0,
15034 && tree_expr_nonnegative_warnv_p (op1,
15035 strict_overflow_p));
15037 case TRUNC_MOD_EXPR:
15038 case CEIL_MOD_EXPR:
15039 case FLOOR_MOD_EXPR:
15040 case ROUND_MOD_EXPR:
15041 return tree_expr_nonnegative_warnv_p (op0,
15042 strict_overflow_p);
15044 return tree_simple_nonnegative_warnv_p (code, type);
15047 /* We don't know sign of `t', so be conservative and return false. */
15051 /* Return true if T is known to be non-negative. If the return
15052 value is based on the assumption that signed overflow is undefined,
15053 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15054 *STRICT_OVERFLOW_P. */
15057 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15059 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15062 switch (TREE_CODE (t))
15065 return tree_int_cst_sgn (t) >= 0;
15068 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15071 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15074 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15076 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15077 strict_overflow_p));
15079 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15082 /* We don't know sign of `t', so be conservative and return false. */
15086 /* Return true if T is known to be non-negative. If the return
15087 value is based on the assumption that signed overflow is undefined,
15088 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15089 *STRICT_OVERFLOW_P. */
15092 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15093 tree arg0, tree arg1, bool *strict_overflow_p)
15095 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15096 switch (DECL_FUNCTION_CODE (fndecl))
15098 CASE_FLT_FN (BUILT_IN_ACOS):
15099 CASE_FLT_FN (BUILT_IN_ACOSH):
15100 CASE_FLT_FN (BUILT_IN_CABS):
15101 CASE_FLT_FN (BUILT_IN_COSH):
15102 CASE_FLT_FN (BUILT_IN_ERFC):
15103 CASE_FLT_FN (BUILT_IN_EXP):
15104 CASE_FLT_FN (BUILT_IN_EXP10):
15105 CASE_FLT_FN (BUILT_IN_EXP2):
15106 CASE_FLT_FN (BUILT_IN_FABS):
15107 CASE_FLT_FN (BUILT_IN_FDIM):
15108 CASE_FLT_FN (BUILT_IN_HYPOT):
15109 CASE_FLT_FN (BUILT_IN_POW10):
15110 CASE_INT_FN (BUILT_IN_FFS):
15111 CASE_INT_FN (BUILT_IN_PARITY):
15112 CASE_INT_FN (BUILT_IN_POPCOUNT):
15113 case BUILT_IN_BSWAP32:
15114 case BUILT_IN_BSWAP64:
15118 CASE_FLT_FN (BUILT_IN_SQRT):
15119 /* sqrt(-0.0) is -0.0. */
15120 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15122 return tree_expr_nonnegative_warnv_p (arg0,
15123 strict_overflow_p);
15125 CASE_FLT_FN (BUILT_IN_ASINH):
15126 CASE_FLT_FN (BUILT_IN_ATAN):
15127 CASE_FLT_FN (BUILT_IN_ATANH):
15128 CASE_FLT_FN (BUILT_IN_CBRT):
15129 CASE_FLT_FN (BUILT_IN_CEIL):
15130 CASE_FLT_FN (BUILT_IN_ERF):
15131 CASE_FLT_FN (BUILT_IN_EXPM1):
15132 CASE_FLT_FN (BUILT_IN_FLOOR):
15133 CASE_FLT_FN (BUILT_IN_FMOD):
15134 CASE_FLT_FN (BUILT_IN_FREXP):
15135 CASE_FLT_FN (BUILT_IN_LCEIL):
15136 CASE_FLT_FN (BUILT_IN_LDEXP):
15137 CASE_FLT_FN (BUILT_IN_LFLOOR):
15138 CASE_FLT_FN (BUILT_IN_LLCEIL):
15139 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15140 CASE_FLT_FN (BUILT_IN_LLRINT):
15141 CASE_FLT_FN (BUILT_IN_LLROUND):
15142 CASE_FLT_FN (BUILT_IN_LRINT):
15143 CASE_FLT_FN (BUILT_IN_LROUND):
15144 CASE_FLT_FN (BUILT_IN_MODF):
15145 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15146 CASE_FLT_FN (BUILT_IN_RINT):
15147 CASE_FLT_FN (BUILT_IN_ROUND):
15148 CASE_FLT_FN (BUILT_IN_SCALB):
15149 CASE_FLT_FN (BUILT_IN_SCALBLN):
15150 CASE_FLT_FN (BUILT_IN_SCALBN):
15151 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15152 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15153 CASE_FLT_FN (BUILT_IN_SINH):
15154 CASE_FLT_FN (BUILT_IN_TANH):
15155 CASE_FLT_FN (BUILT_IN_TRUNC):
15156 /* True if the 1st argument is nonnegative. */
15157 return tree_expr_nonnegative_warnv_p (arg0,
15158 strict_overflow_p);
15160 CASE_FLT_FN (BUILT_IN_FMAX):
15161 /* True if the 1st OR 2nd arguments are nonnegative. */
15162 return (tree_expr_nonnegative_warnv_p (arg0,
15164 || (tree_expr_nonnegative_warnv_p (arg1,
15165 strict_overflow_p)));
15167 CASE_FLT_FN (BUILT_IN_FMIN):
15168 /* True if the 1st AND 2nd arguments are nonnegative. */
15169 return (tree_expr_nonnegative_warnv_p (arg0,
15171 && (tree_expr_nonnegative_warnv_p (arg1,
15172 strict_overflow_p)));
15174 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15175 /* True if the 2nd argument is nonnegative. */
15176 return tree_expr_nonnegative_warnv_p (arg1,
15177 strict_overflow_p);
15179 CASE_FLT_FN (BUILT_IN_POWI):
15180 /* True if the 1st argument is nonnegative or the second
15181 argument is an even integer. */
15182 if (TREE_CODE (arg1) == INTEGER_CST
15183 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15185 return tree_expr_nonnegative_warnv_p (arg0,
15186 strict_overflow_p);
15188 CASE_FLT_FN (BUILT_IN_POW):
15189 /* True if the 1st argument is nonnegative or the second
15190 argument is an even integer valued real. */
15191 if (TREE_CODE (arg1) == REAL_CST)
15196 c = TREE_REAL_CST (arg1);
15197 n = real_to_integer (&c);
15200 REAL_VALUE_TYPE cint;
15201 real_from_integer (&cint, VOIDmode, n,
15202 n < 0 ? -1 : 0, 0);
15203 if (real_identical (&c, &cint))
15207 return tree_expr_nonnegative_warnv_p (arg0,
15208 strict_overflow_p);
15213 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15217 /* Return true if T is known to be non-negative. If the return
15218 value is based on the assumption that signed overflow is undefined,
15219 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15220 *STRICT_OVERFLOW_P. */
15223 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15225 enum tree_code code = TREE_CODE (t);
15226 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15233 tree temp = TARGET_EXPR_SLOT (t);
15234 t = TARGET_EXPR_INITIAL (t);
15236 /* If the initializer is non-void, then it's a normal expression
15237 that will be assigned to the slot. */
15238 if (!VOID_TYPE_P (t))
15239 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15241 /* Otherwise, the initializer sets the slot in some way. One common
15242 way is an assignment statement at the end of the initializer. */
15245 if (TREE_CODE (t) == BIND_EXPR)
15246 t = expr_last (BIND_EXPR_BODY (t));
15247 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15248 || TREE_CODE (t) == TRY_CATCH_EXPR)
15249 t = expr_last (TREE_OPERAND (t, 0));
15250 else if (TREE_CODE (t) == STATEMENT_LIST)
15255 if (TREE_CODE (t) == MODIFY_EXPR
15256 && TREE_OPERAND (t, 0) == temp)
15257 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15258 strict_overflow_p);
15265 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15266 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15268 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15269 get_callee_fndecl (t),
15272 strict_overflow_p);
15274 case COMPOUND_EXPR:
15276 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15277 strict_overflow_p);
15279 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15280 strict_overflow_p);
15282 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15283 strict_overflow_p);
15286 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15290 /* We don't know sign of `t', so be conservative and return false. */
15294 /* Return true if T is known to be non-negative. If the return
15295 value is based on the assumption that signed overflow is undefined,
15296 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15297 *STRICT_OVERFLOW_P. */
15300 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15302 enum tree_code code;
15303 if (t == error_mark_node)
15306 code = TREE_CODE (t);
15307 switch (TREE_CODE_CLASS (code))
15310 case tcc_comparison:
15311 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15313 TREE_OPERAND (t, 0),
15314 TREE_OPERAND (t, 1),
15315 strict_overflow_p);
15318 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15320 TREE_OPERAND (t, 0),
15321 strict_overflow_p);
15324 case tcc_declaration:
15325 case tcc_reference:
15326 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15334 case TRUTH_AND_EXPR:
15335 case TRUTH_OR_EXPR:
15336 case TRUTH_XOR_EXPR:
15337 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15339 TREE_OPERAND (t, 0),
15340 TREE_OPERAND (t, 1),
15341 strict_overflow_p);
15342 case TRUTH_NOT_EXPR:
15343 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15345 TREE_OPERAND (t, 0),
15346 strict_overflow_p);
15353 case WITH_SIZE_EXPR:
15355 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15358 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15362 /* Return true if `t' is known to be non-negative. Handle warnings
15363 about undefined signed overflow. */
15366 tree_expr_nonnegative_p (tree t)
15368 bool ret, strict_overflow_p;
15370 strict_overflow_p = false;
15371 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15372 if (strict_overflow_p)
15373 fold_overflow_warning (("assuming signed overflow does not occur when "
15374 "determining that expression is always "
15376 WARN_STRICT_OVERFLOW_MISC);
15381 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15382 For floating point we further ensure that T is not denormal.
15383 Similar logic is present in nonzero_address in rtlanal.h.
15385 If the return value is based on the assumption that signed overflow
15386 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15387 change *STRICT_OVERFLOW_P. */
15390 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15391 bool *strict_overflow_p)
15396 return tree_expr_nonzero_warnv_p (op0,
15397 strict_overflow_p);
15401 tree inner_type = TREE_TYPE (op0);
15402 tree outer_type = type;
15404 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15405 && tree_expr_nonzero_warnv_p (op0,
15406 strict_overflow_p));
15410 case NON_LVALUE_EXPR:
15411 return tree_expr_nonzero_warnv_p (op0,
15412 strict_overflow_p);
15421 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15422 For floating point we further ensure that T is not denormal.
15423 Similar logic is present in nonzero_address in rtlanal.h.
15425 If the return value is based on the assumption that signed overflow
15426 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15427 change *STRICT_OVERFLOW_P. */
15430 tree_binary_nonzero_warnv_p (enum tree_code code,
15433 tree op1, bool *strict_overflow_p)
15435 bool sub_strict_overflow_p;
15438 case POINTER_PLUS_EXPR:
15440 if (TYPE_OVERFLOW_UNDEFINED (type))
15442 /* With the presence of negative values it is hard
15443 to say something. */
15444 sub_strict_overflow_p = false;
15445 if (!tree_expr_nonnegative_warnv_p (op0,
15446 &sub_strict_overflow_p)
15447 || !tree_expr_nonnegative_warnv_p (op1,
15448 &sub_strict_overflow_p))
15450 /* One of operands must be positive and the other non-negative. */
15451 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15452 overflows, on a twos-complement machine the sum of two
15453 nonnegative numbers can never be zero. */
15454 return (tree_expr_nonzero_warnv_p (op0,
15456 || tree_expr_nonzero_warnv_p (op1,
15457 strict_overflow_p));
15462 if (TYPE_OVERFLOW_UNDEFINED (type))
15464 if (tree_expr_nonzero_warnv_p (op0,
15466 && tree_expr_nonzero_warnv_p (op1,
15467 strict_overflow_p))
15469 *strict_overflow_p = true;
15476 sub_strict_overflow_p = false;
15477 if (tree_expr_nonzero_warnv_p (op0,
15478 &sub_strict_overflow_p)
15479 && tree_expr_nonzero_warnv_p (op1,
15480 &sub_strict_overflow_p))
15482 if (sub_strict_overflow_p)
15483 *strict_overflow_p = true;
15488 sub_strict_overflow_p = false;
15489 if (tree_expr_nonzero_warnv_p (op0,
15490 &sub_strict_overflow_p))
15492 if (sub_strict_overflow_p)
15493 *strict_overflow_p = true;
15495 /* When both operands are nonzero, then MAX must be too. */
15496 if (tree_expr_nonzero_warnv_p (op1,
15497 strict_overflow_p))
15500 /* MAX where operand 0 is positive is positive. */
15501 return tree_expr_nonnegative_warnv_p (op0,
15502 strict_overflow_p);
15504 /* MAX where operand 1 is positive is positive. */
15505 else if (tree_expr_nonzero_warnv_p (op1,
15506 &sub_strict_overflow_p)
15507 && tree_expr_nonnegative_warnv_p (op1,
15508 &sub_strict_overflow_p))
15510 if (sub_strict_overflow_p)
15511 *strict_overflow_p = true;
15517 return (tree_expr_nonzero_warnv_p (op1,
15519 || tree_expr_nonzero_warnv_p (op0,
15520 strict_overflow_p));
15529 /* Return true when T is an address and is known to be nonzero.
15530 For floating point we further ensure that T is not denormal.
15531 Similar logic is present in nonzero_address in rtlanal.h.
15533 If the return value is based on the assumption that signed overflow
15534 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15535 change *STRICT_OVERFLOW_P. */
15538 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15540 bool sub_strict_overflow_p;
15541 switch (TREE_CODE (t))
15544 return !integer_zerop (t);
15548 tree base = get_base_address (TREE_OPERAND (t, 0));
15553 /* Weak declarations may link to NULL. Other things may also be NULL
15554 so protect with -fdelete-null-pointer-checks; but not variables
15555 allocated on the stack. */
15557 && (flag_delete_null_pointer_checks
15558 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15559 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15561 /* Constants are never weak. */
15562 if (CONSTANT_CLASS_P (base))
15569 sub_strict_overflow_p = false;
15570 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15571 &sub_strict_overflow_p)
15572 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15573 &sub_strict_overflow_p))
15575 if (sub_strict_overflow_p)
15576 *strict_overflow_p = true;
15587 /* Return true when T is an address and is known to be nonzero.
15588 For floating point we further ensure that T is not denormal.
15589 Similar logic is present in nonzero_address in rtlanal.h.
15591 If the return value is based on the assumption that signed overflow
15592 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15593 change *STRICT_OVERFLOW_P. */
15596 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15598 tree type = TREE_TYPE (t);
15599 enum tree_code code;
15601 /* Doing something useful for floating point would need more work. */
15602 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15605 code = TREE_CODE (t);
15606 switch (TREE_CODE_CLASS (code))
15609 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15610 strict_overflow_p);
15612 case tcc_comparison:
15613 return tree_binary_nonzero_warnv_p (code, type,
15614 TREE_OPERAND (t, 0),
15615 TREE_OPERAND (t, 1),
15616 strict_overflow_p);
15618 case tcc_declaration:
15619 case tcc_reference:
15620 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15628 case TRUTH_NOT_EXPR:
15629 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15630 strict_overflow_p);
15632 case TRUTH_AND_EXPR:
15633 case TRUTH_OR_EXPR:
15634 case TRUTH_XOR_EXPR:
15635 return tree_binary_nonzero_warnv_p (code, type,
15636 TREE_OPERAND (t, 0),
15637 TREE_OPERAND (t, 1),
15638 strict_overflow_p);
15645 case WITH_SIZE_EXPR:
15647 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15649 case COMPOUND_EXPR:
15652 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15653 strict_overflow_p);
15656 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15657 strict_overflow_p);
15660 return alloca_call_p (t);
15668 /* Return true when T is an address and is known to be nonzero.
15669 Handle warnings about undefined signed overflow. */
15672 tree_expr_nonzero_p (tree t)
15674 bool ret, strict_overflow_p;
15676 strict_overflow_p = false;
15677 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15678 if (strict_overflow_p)
15679 fold_overflow_warning (("assuming signed overflow does not occur when "
15680 "determining that expression is always "
15682 WARN_STRICT_OVERFLOW_MISC);
15686 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15687 attempt to fold the expression to a constant without modifying TYPE,
15690 If the expression could be simplified to a constant, then return
15691 the constant. If the expression would not be simplified to a
15692 constant, then return NULL_TREE. */
15695 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15697 tree tem = fold_binary (code, type, op0, op1);
15698 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15701 /* Given the components of a unary expression CODE, TYPE and OP0,
15702 attempt to fold the expression to a constant without modifying
15705 If the expression could be simplified to a constant, then return
15706 the constant. If the expression would not be simplified to a
15707 constant, then return NULL_TREE. */
15710 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15712 tree tem = fold_unary (code, type, op0);
15713 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15716 /* If EXP represents referencing an element in a constant string
15717 (either via pointer arithmetic or array indexing), return the
15718 tree representing the value accessed, otherwise return NULL. */
15721 fold_read_from_constant_string (tree exp)
15723 if ((TREE_CODE (exp) == INDIRECT_REF
15724 || TREE_CODE (exp) == ARRAY_REF)
15725 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15727 tree exp1 = TREE_OPERAND (exp, 0);
15730 location_t loc = EXPR_LOCATION (exp);
15732 if (TREE_CODE (exp) == INDIRECT_REF)
15733 string = string_constant (exp1, &index);
15736 tree low_bound = array_ref_low_bound (exp);
15737 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15739 /* Optimize the special-case of a zero lower bound.
15741 We convert the low_bound to sizetype to avoid some problems
15742 with constant folding. (E.g. suppose the lower bound is 1,
15743 and its mode is QI. Without the conversion,l (ARRAY
15744 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15745 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15746 if (! integer_zerop (low_bound))
15747 index = size_diffop_loc (loc, index,
15748 fold_convert_loc (loc, sizetype, low_bound));
15754 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15755 && TREE_CODE (string) == STRING_CST
15756 && TREE_CODE (index) == INTEGER_CST
15757 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15758 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15760 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15761 return build_int_cst_type (TREE_TYPE (exp),
15762 (TREE_STRING_POINTER (string)
15763 [TREE_INT_CST_LOW (index)]));
15768 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15769 an integer constant, real, or fixed-point constant.
15771 TYPE is the type of the result. */
15774 fold_negate_const (tree arg0, tree type)
15776 tree t = NULL_TREE;
15778 switch (TREE_CODE (arg0))
15782 unsigned HOST_WIDE_INT low;
15783 HOST_WIDE_INT high;
15784 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15785 TREE_INT_CST_HIGH (arg0),
15787 t = force_fit_type_double (type, low, high, 1,
15788 (overflow | TREE_OVERFLOW (arg0))
15789 && !TYPE_UNSIGNED (type));
15794 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15799 FIXED_VALUE_TYPE f;
15800 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15801 &(TREE_FIXED_CST (arg0)), NULL,
15802 TYPE_SATURATING (type));
15803 t = build_fixed (type, f);
15804 /* Propagate overflow flags. */
15805 if (overflow_p | TREE_OVERFLOW (arg0))
15806 TREE_OVERFLOW (t) = 1;
15811 gcc_unreachable ();
15817 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15818 an integer constant or real constant.
15820 TYPE is the type of the result. */
15823 fold_abs_const (tree arg0, tree type)
15825 tree t = NULL_TREE;
15827 switch (TREE_CODE (arg0))
15830 /* If the value is unsigned, then the absolute value is
15831 the same as the ordinary value. */
15832 if (TYPE_UNSIGNED (type))
15834 /* Similarly, if the value is non-negative. */
15835 else if (INT_CST_LT (integer_minus_one_node, arg0))
15837 /* If the value is negative, then the absolute value is
15841 unsigned HOST_WIDE_INT low;
15842 HOST_WIDE_INT high;
15843 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15844 TREE_INT_CST_HIGH (arg0),
15846 t = force_fit_type_double (type, low, high, -1,
15847 overflow | TREE_OVERFLOW (arg0));
15852 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15853 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15859 gcc_unreachable ();
15865 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15866 constant. TYPE is the type of the result. */
15869 fold_not_const (tree arg0, tree type)
15871 tree t = NULL_TREE;
15873 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15875 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15876 ~TREE_INT_CST_HIGH (arg0), 0,
15877 TREE_OVERFLOW (arg0));
15882 /* Given CODE, a relational operator, the target type, TYPE and two
15883 constant operands OP0 and OP1, return the result of the
15884 relational operation. If the result is not a compile time
15885 constant, then return NULL_TREE. */
15888 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15890 int result, invert;
15892 /* From here on, the only cases we handle are when the result is
15893 known to be a constant. */
15895 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15897 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15898 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15900 /* Handle the cases where either operand is a NaN. */
15901 if (real_isnan (c0) || real_isnan (c1))
15911 case UNORDERED_EXPR:
15925 if (flag_trapping_math)
15931 gcc_unreachable ();
15934 return constant_boolean_node (result, type);
15937 return constant_boolean_node (real_compare (code, c0, c1), type);
15940 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15942 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15943 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15944 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15947 /* Handle equality/inequality of complex constants. */
15948 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15950 tree rcond = fold_relational_const (code, type,
15951 TREE_REALPART (op0),
15952 TREE_REALPART (op1));
15953 tree icond = fold_relational_const (code, type,
15954 TREE_IMAGPART (op0),
15955 TREE_IMAGPART (op1));
15956 if (code == EQ_EXPR)
15957 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15958 else if (code == NE_EXPR)
15959 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15964 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15966 To compute GT, swap the arguments and do LT.
15967 To compute GE, do LT and invert the result.
15968 To compute LE, swap the arguments, do LT and invert the result.
15969 To compute NE, do EQ and invert the result.
15971 Therefore, the code below must handle only EQ and LT. */
15973 if (code == LE_EXPR || code == GT_EXPR)
15978 code = swap_tree_comparison (code);
15981 /* Note that it is safe to invert for real values here because we
15982 have already handled the one case that it matters. */
15985 if (code == NE_EXPR || code == GE_EXPR)
15988 code = invert_tree_comparison (code, false);
15991 /* Compute a result for LT or EQ if args permit;
15992 Otherwise return T. */
15993 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15995 if (code == EQ_EXPR)
15996 result = tree_int_cst_equal (op0, op1);
15997 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15998 result = INT_CST_LT_UNSIGNED (op0, op1);
16000 result = INT_CST_LT (op0, op1);
16007 return constant_boolean_node (result, type);
16010 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16011 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16015 fold_build_cleanup_point_expr (tree type, tree expr)
16017 /* If the expression does not have side effects then we don't have to wrap
16018 it with a cleanup point expression. */
16019 if (!TREE_SIDE_EFFECTS (expr))
16022 /* If the expression is a return, check to see if the expression inside the
16023 return has no side effects or the right hand side of the modify expression
16024 inside the return. If either don't have side effects set we don't need to
16025 wrap the expression in a cleanup point expression. Note we don't check the
16026 left hand side of the modify because it should always be a return decl. */
16027 if (TREE_CODE (expr) == RETURN_EXPR)
16029 tree op = TREE_OPERAND (expr, 0);
16030 if (!op || !TREE_SIDE_EFFECTS (op))
16032 op = TREE_OPERAND (op, 1);
16033 if (!TREE_SIDE_EFFECTS (op))
16037 return build1 (CLEANUP_POINT_EXPR, type, expr);
16040 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16041 of an indirection through OP0, or NULL_TREE if no simplification is
16045 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16051 subtype = TREE_TYPE (sub);
16052 if (!POINTER_TYPE_P (subtype))
16055 if (TREE_CODE (sub) == ADDR_EXPR)
16057 tree op = TREE_OPERAND (sub, 0);
16058 tree optype = TREE_TYPE (op);
16059 /* *&CONST_DECL -> to the value of the const decl. */
16060 if (TREE_CODE (op) == CONST_DECL)
16061 return DECL_INITIAL (op);
16062 /* *&p => p; make sure to handle *&"str"[cst] here. */
16063 if (type == optype)
16065 tree fop = fold_read_from_constant_string (op);
16071 /* *(foo *)&fooarray => fooarray[0] */
16072 else if (TREE_CODE (optype) == ARRAY_TYPE
16073 && type == TREE_TYPE (optype))
16075 tree type_domain = TYPE_DOMAIN (optype);
16076 tree min_val = size_zero_node;
16077 if (type_domain && TYPE_MIN_VALUE (type_domain))
16078 min_val = TYPE_MIN_VALUE (type_domain);
16079 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16080 SET_EXPR_LOCATION (op0, loc);
16083 /* *(foo *)&complexfoo => __real__ complexfoo */
16084 else if (TREE_CODE (optype) == COMPLEX_TYPE
16085 && type == TREE_TYPE (optype))
16086 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16087 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16088 else if (TREE_CODE (optype) == VECTOR_TYPE
16089 && type == TREE_TYPE (optype))
16091 tree part_width = TYPE_SIZE (type);
16092 tree index = bitsize_int (0);
16093 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16097 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16098 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16099 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16101 tree op00 = TREE_OPERAND (sub, 0);
16102 tree op01 = TREE_OPERAND (sub, 1);
16106 op00type = TREE_TYPE (op00);
16107 if (TREE_CODE (op00) == ADDR_EXPR
16108 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16109 && type == TREE_TYPE (TREE_TYPE (op00type)))
16111 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16112 tree part_width = TYPE_SIZE (type);
16113 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16114 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16115 tree index = bitsize_int (indexi);
16117 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16118 return fold_build3_loc (loc,
16119 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16120 part_width, index);
16126 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16127 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16128 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16130 tree op00 = TREE_OPERAND (sub, 0);
16131 tree op01 = TREE_OPERAND (sub, 1);
16135 op00type = TREE_TYPE (op00);
16136 if (TREE_CODE (op00) == ADDR_EXPR
16137 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16138 && type == TREE_TYPE (TREE_TYPE (op00type)))
16140 tree size = TYPE_SIZE_UNIT (type);
16141 if (tree_int_cst_equal (size, op01))
16142 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16143 TREE_OPERAND (op00, 0));
16147 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16148 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16149 && type == TREE_TYPE (TREE_TYPE (subtype)))
16152 tree min_val = size_zero_node;
16153 sub = build_fold_indirect_ref_loc (loc, sub);
16154 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16155 if (type_domain && TYPE_MIN_VALUE (type_domain))
16156 min_val = TYPE_MIN_VALUE (type_domain);
16157 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16158 SET_EXPR_LOCATION (op0, loc);
16165 /* Builds an expression for an indirection through T, simplifying some
16169 build_fold_indirect_ref_loc (location_t loc, tree t)
16171 tree type = TREE_TYPE (TREE_TYPE (t));
16172 tree sub = fold_indirect_ref_1 (loc, type, t);
16177 t = build1 (INDIRECT_REF, type, t);
16178 SET_EXPR_LOCATION (t, loc);
16182 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16185 fold_indirect_ref_loc (location_t loc, tree t)
16187 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16195 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16196 whose result is ignored. The type of the returned tree need not be
16197 the same as the original expression. */
16200 fold_ignored_result (tree t)
16202 if (!TREE_SIDE_EFFECTS (t))
16203 return integer_zero_node;
16206 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16209 t = TREE_OPERAND (t, 0);
16213 case tcc_comparison:
16214 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16215 t = TREE_OPERAND (t, 0);
16216 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16217 t = TREE_OPERAND (t, 1);
16222 case tcc_expression:
16223 switch (TREE_CODE (t))
16225 case COMPOUND_EXPR:
16226 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16228 t = TREE_OPERAND (t, 0);
16232 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16233 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16235 t = TREE_OPERAND (t, 0);
16248 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16249 This can only be applied to objects of a sizetype. */
16252 round_up_loc (location_t loc, tree value, int divisor)
16254 tree div = NULL_TREE;
16256 gcc_assert (divisor > 0);
16260 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16261 have to do anything. Only do this when we are not given a const,
16262 because in that case, this check is more expensive than just
16264 if (TREE_CODE (value) != INTEGER_CST)
16266 div = build_int_cst (TREE_TYPE (value), divisor);
16268 if (multiple_of_p (TREE_TYPE (value), value, div))
16272 /* If divisor is a power of two, simplify this to bit manipulation. */
16273 if (divisor == (divisor & -divisor))
16275 if (TREE_CODE (value) == INTEGER_CST)
16277 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16278 unsigned HOST_WIDE_INT high;
16281 if ((low & (divisor - 1)) == 0)
16284 overflow_p = TREE_OVERFLOW (value);
16285 high = TREE_INT_CST_HIGH (value);
16286 low &= ~(divisor - 1);
16295 return force_fit_type_double (TREE_TYPE (value), low, high,
16302 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16303 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16304 t = build_int_cst (TREE_TYPE (value), -divisor);
16305 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16311 div = build_int_cst (TREE_TYPE (value), divisor);
16312 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16313 value = size_binop_loc (loc, MULT_EXPR, value, div);
16319 /* Likewise, but round down. */
16322 round_down_loc (location_t loc, tree value, int divisor)
16324 tree div = NULL_TREE;
16326 gcc_assert (divisor > 0);
16330 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16331 have to do anything. Only do this when we are not given a const,
16332 because in that case, this check is more expensive than just
16334 if (TREE_CODE (value) != INTEGER_CST)
16336 div = build_int_cst (TREE_TYPE (value), divisor);
16338 if (multiple_of_p (TREE_TYPE (value), value, div))
16342 /* If divisor is a power of two, simplify this to bit manipulation. */
16343 if (divisor == (divisor & -divisor))
16347 t = build_int_cst (TREE_TYPE (value), -divisor);
16348 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16353 div = build_int_cst (TREE_TYPE (value), divisor);
16354 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16355 value = size_binop_loc (loc, MULT_EXPR, value, div);
16361 /* Returns the pointer to the base of the object addressed by EXP and
16362 extracts the information about the offset of the access, storing it
16363 to PBITPOS and POFFSET. */
16366 split_address_to_core_and_offset (tree exp,
16367 HOST_WIDE_INT *pbitpos, tree *poffset)
16370 enum machine_mode mode;
16371 int unsignedp, volatilep;
16372 HOST_WIDE_INT bitsize;
16373 location_t loc = EXPR_LOCATION (exp);
16375 if (TREE_CODE (exp) == ADDR_EXPR)
16377 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16378 poffset, &mode, &unsignedp, &volatilep,
16380 core = build_fold_addr_expr_loc (loc, core);
16386 *poffset = NULL_TREE;
16392 /* Returns true if addresses of E1 and E2 differ by a constant, false
16393 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16396 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16399 HOST_WIDE_INT bitpos1, bitpos2;
16400 tree toffset1, toffset2, tdiff, type;
16402 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16403 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16405 if (bitpos1 % BITS_PER_UNIT != 0
16406 || bitpos2 % BITS_PER_UNIT != 0
16407 || !operand_equal_p (core1, core2, 0))
16410 if (toffset1 && toffset2)
16412 type = TREE_TYPE (toffset1);
16413 if (type != TREE_TYPE (toffset2))
16414 toffset2 = fold_convert (type, toffset2);
16416 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16417 if (!cst_and_fits_in_hwi (tdiff))
16420 *diff = int_cst_value (tdiff);
16422 else if (toffset1 || toffset2)
16424 /* If only one of the offsets is non-constant, the difference cannot
16431 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16435 /* Simplify the floating point expression EXP when the sign of the
16436 result is not significant. Return NULL_TREE if no simplification
16440 fold_strip_sign_ops (tree exp)
16443 location_t loc = EXPR_LOCATION (exp);
16445 switch (TREE_CODE (exp))
16449 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16450 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16454 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16456 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16457 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16458 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16459 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16460 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16461 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16464 case COMPOUND_EXPR:
16465 arg0 = TREE_OPERAND (exp, 0);
16466 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16468 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16472 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16473 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16475 return fold_build3_loc (loc,
16476 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16477 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16478 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16483 const enum built_in_function fcode = builtin_mathfn_code (exp);
16486 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16487 /* Strip copysign function call, return the 1st argument. */
16488 arg0 = CALL_EXPR_ARG (exp, 0);
16489 arg1 = CALL_EXPR_ARG (exp, 1);
16490 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16493 /* Strip sign ops from the argument of "odd" math functions. */
16494 if (negate_mathfn_p (fcode))
16496 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16498 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);