1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123 static tree range_predecessor (tree);
124 static tree range_successor (tree);
125 extern tree make_range (tree, int *, tree *, tree *, bool *);
126 extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
128 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130 static tree unextend (tree, int, int, tree);
131 static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132 static tree optimize_minmax_comparison (location_t, enum tree_code,
134 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136 static tree fold_binary_op_with_conditional_arg (location_t,
137 enum tree_code, tree,
140 static tree fold_mathfn_compare (location_t,
141 enum built_in_function, enum tree_code,
143 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
152 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
154 and SUM1. Then this yields nonzero if overflow occurred during the
157 Overflow occurs if A and B have the same sign, but A and SUM differ in
158 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
160 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
162 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163 We do that by representing the two-word integer in 4 words, with only
164 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165 number. The value of the word is LOWPART + HIGHPART * BASE. */
168 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169 #define HIGHPART(x) \
170 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
173 /* Unpack a two-word integer into 4 words.
174 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175 WORDS points to the array of HOST_WIDE_INTs. */
178 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
180 words[0] = LOWPART (low);
181 words[1] = HIGHPART (low);
182 words[2] = LOWPART (hi);
183 words[3] = HIGHPART (hi);
186 /* Pack an array of 4 words into a two-word integer.
187 WORDS points to the array of words.
188 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
191 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
194 *low = words[0] + words[1] * BASE;
195 *hi = words[2] + words[3] * BASE;
198 /* Force the double-word integer L1, H1 to be within the range of the
199 integer type TYPE. Stores the properly truncated and sign-extended
200 double-word integer in *LV, *HV. Returns true if the operation
201 overflows, that is, argument and result are different. */
204 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
207 unsigned HOST_WIDE_INT low0 = l1;
208 HOST_WIDE_INT high0 = h1;
209 unsigned int prec = TYPE_PRECISION (type);
210 int sign_extended_type;
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
283 int sign_extended_type;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 unsigned HOST_WIDE_INT l;
329 h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330 + (unsigned HOST_WIDE_INT) h2
337 return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
341 return OVERFLOW_SUM_SIGN (h1, h2, h);
344 /* Negate a doubleword integer with doubleword result.
345 Return nonzero if the operation overflows, assuming it's signed.
346 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
350 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
357 return (*hv & h1) < 0;
367 /* Multiply two doubleword integers with doubleword result.
368 Return nonzero if the operation overflows according to UNSIGNED_P.
369 Each argument is given as two `HOST_WIDE_INT' pieces.
370 One argument is L1 and H1; the other, L2 and H2.
371 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
374 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
379 HOST_WIDE_INT arg1[4];
380 HOST_WIDE_INT arg2[4];
381 HOST_WIDE_INT prod[4 * 2];
382 unsigned HOST_WIDE_INT carry;
384 unsigned HOST_WIDE_INT toplow, neglow;
385 HOST_WIDE_INT tophigh, neghigh;
387 encode (arg1, l1, h1);
388 encode (arg2, l2, h2);
390 memset (prod, 0, sizeof prod);
392 for (i = 0; i < 4; i++)
395 for (j = 0; j < 4; j++)
398 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
399 carry += arg1[i] * arg2[j];
400 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
402 prod[k] = LOWPART (carry);
403 carry = HIGHPART (carry);
408 decode (prod, lv, hv);
409 decode (prod + 4, &toplow, &tophigh);
411 /* Unsigned overflow is immediate. */
413 return (toplow | tophigh) != 0;
415 /* Check for signed overflow by calculating the signed representation of the
416 top half of the result; it should agree with the low half's sign bit. */
419 neg_double (l2, h2, &neglow, &neghigh);
420 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 neg_double (l1, h1, &neglow, &neghigh);
425 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
427 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
430 /* Shift the doubleword integer in L1, H1 left by COUNT places
431 keeping only PREC bits of result.
432 Shift right if COUNT is negative.
433 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
437 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438 HOST_WIDE_INT count, unsigned int prec,
439 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
441 unsigned HOST_WIDE_INT signmask;
445 rshift_double (l1, h1, -count, prec, lv, hv, arith);
449 if (SHIFT_COUNT_TRUNCATED)
452 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
454 /* Shifting by the host word size is undefined according to the
455 ANSI standard, so we must handle this as a special case. */
459 else if (count >= HOST_BITS_PER_WIDE_INT)
461 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
466 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
471 /* Sign extend all bits that are beyond the precision. */
473 signmask = -((prec > HOST_BITS_PER_WIDE_INT
474 ? ((unsigned HOST_WIDE_INT) *hv
475 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476 : (*lv >> (prec - 1))) & 1);
478 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
480 else if (prec >= HOST_BITS_PER_WIDE_INT)
482 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
488 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489 *lv |= signmask << prec;
493 /* Shift the doubleword integer in L1, H1 right by COUNT places
494 keeping only PREC bits of result. COUNT must be positive.
495 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
499 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500 HOST_WIDE_INT count, unsigned int prec,
501 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
504 unsigned HOST_WIDE_INT signmask;
507 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
510 if (SHIFT_COUNT_TRUNCATED)
513 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
515 /* Shifting by the host word size is undefined according to the
516 ANSI standard, so we must handle this as a special case. */
520 else if (count >= HOST_BITS_PER_WIDE_INT)
523 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
527 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
529 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
532 /* Zero / sign extend all bits that are beyond the precision. */
534 if (count >= (HOST_WIDE_INT)prec)
539 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
541 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
543 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
549 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550 *lv |= signmask << (prec - count);
554 /* Rotate the doubleword integer in L1, H1 left by COUNT places
555 keeping only PREC bits of result.
556 Rotate right if COUNT is negative.
557 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
560 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561 HOST_WIDE_INT count, unsigned int prec,
562 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
564 unsigned HOST_WIDE_INT s1l, s2l;
565 HOST_WIDE_INT s1h, s2h;
571 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
577 /* Rotate the doubleword integer in L1, H1 left by COUNT places
578 keeping only PREC bits of result. COUNT must be positive.
579 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
582 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583 HOST_WIDE_INT count, unsigned int prec,
584 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
586 unsigned HOST_WIDE_INT s1l, s2l;
587 HOST_WIDE_INT s1h, s2h;
593 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
599 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601 CODE is a tree code for a kind of division, one of
602 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
604 It controls how the quotient is rounded to an integer.
605 Return nonzero if the operation overflows.
606 UNS nonzero says do unsigned division. */
609 div_and_round_double (enum tree_code code, int uns,
610 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611 HOST_WIDE_INT hnum_orig,
612 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613 HOST_WIDE_INT hden_orig,
614 unsigned HOST_WIDE_INT *lquo,
615 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
619 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
620 HOST_WIDE_INT den[4], quo[4];
622 unsigned HOST_WIDE_INT work;
623 unsigned HOST_WIDE_INT carry = 0;
624 unsigned HOST_WIDE_INT lnum = lnum_orig;
625 HOST_WIDE_INT hnum = hnum_orig;
626 unsigned HOST_WIDE_INT lden = lden_orig;
627 HOST_WIDE_INT hden = hden_orig;
630 if (hden == 0 && lden == 0)
631 overflow = 1, lden = 1;
633 /* Calculate quotient sign and convert operands to unsigned. */
639 /* (minimum integer) / (-1) is the only overflow case. */
640 if (neg_double (lnum, hnum, &lnum, &hnum)
641 && ((HOST_WIDE_INT) lden & hden) == -1)
647 neg_double (lden, hden, &lden, &hden);
651 if (hnum == 0 && hden == 0)
652 { /* single precision */
654 /* This unsigned division rounds toward zero. */
660 { /* trivial case: dividend < divisor */
661 /* hden != 0 already checked. */
668 memset (quo, 0, sizeof quo);
670 memset (num, 0, sizeof num); /* to zero 9th element */
671 memset (den, 0, sizeof den);
673 encode (num, lnum, hnum);
674 encode (den, lden, hden);
676 /* Special code for when the divisor < BASE. */
677 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
679 /* hnum != 0 already checked. */
680 for (i = 4 - 1; i >= 0; i--)
682 work = num[i] + carry * BASE;
683 quo[i] = work / lden;
689 /* Full double precision division,
690 with thanks to Don Knuth's "Seminumerical Algorithms". */
691 int num_hi_sig, den_hi_sig;
692 unsigned HOST_WIDE_INT quo_est, scale;
694 /* Find the highest nonzero divisor digit. */
695 for (i = 4 - 1;; i--)
702 /* Insure that the first digit of the divisor is at least BASE/2.
703 This is required by the quotient digit estimation algorithm. */
705 scale = BASE / (den[den_hi_sig] + 1);
707 { /* scale divisor and dividend */
709 for (i = 0; i <= 4 - 1; i++)
711 work = (num[i] * scale) + carry;
712 num[i] = LOWPART (work);
713 carry = HIGHPART (work);
718 for (i = 0; i <= 4 - 1; i++)
720 work = (den[i] * scale) + carry;
721 den[i] = LOWPART (work);
722 carry = HIGHPART (work);
723 if (den[i] != 0) den_hi_sig = i;
730 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
732 /* Guess the next quotient digit, quo_est, by dividing the first
733 two remaining dividend digits by the high order quotient digit.
734 quo_est is never low and is at most 2 high. */
735 unsigned HOST_WIDE_INT tmp;
737 num_hi_sig = i + den_hi_sig + 1;
738 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739 if (num[num_hi_sig] != den[den_hi_sig])
740 quo_est = work / den[den_hi_sig];
744 /* Refine quo_est so it's usually correct, and at most one high. */
745 tmp = work - quo_est * den[den_hi_sig];
747 && (den[den_hi_sig - 1] * quo_est
748 > (tmp * BASE + num[num_hi_sig - 2])))
751 /* Try QUO_EST as the quotient digit, by multiplying the
752 divisor by QUO_EST and subtracting from the remaining dividend.
753 Keep in mind that QUO_EST is the I - 1st digit. */
756 for (j = 0; j <= den_hi_sig; j++)
758 work = quo_est * den[j] + carry;
759 carry = HIGHPART (work);
760 work = num[i + j] - LOWPART (work);
761 num[i + j] = LOWPART (work);
762 carry += HIGHPART (work) != 0;
765 /* If quo_est was high by one, then num[i] went negative and
766 we need to correct things. */
767 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
770 carry = 0; /* add divisor back in */
771 for (j = 0; j <= den_hi_sig; j++)
773 work = num[i + j] + den[j] + carry;
774 carry = HIGHPART (work);
775 num[i + j] = LOWPART (work);
778 num [num_hi_sig] += carry;
781 /* Store the quotient digit. */
786 decode (quo, lquo, hquo);
789 /* If result is negative, make it so. */
791 neg_double (*lquo, *hquo, lquo, hquo);
793 /* Compute trial remainder: rem = num - (quo * den) */
794 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795 neg_double (*lrem, *hrem, lrem, hrem);
796 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
801 case TRUNC_MOD_EXPR: /* round toward zero */
802 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
806 case FLOOR_MOD_EXPR: /* round toward negative infinity */
807 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
810 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
818 case CEIL_MOD_EXPR: /* round toward positive infinity */
819 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
821 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
829 case ROUND_MOD_EXPR: /* round to closest integer */
831 unsigned HOST_WIDE_INT labs_rem = *lrem;
832 HOST_WIDE_INT habs_rem = *hrem;
833 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834 HOST_WIDE_INT habs_den = hden, htwice;
836 /* Get absolute values. */
838 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
840 neg_double (lden, hden, &labs_den, &habs_den);
842 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
843 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844 labs_rem, habs_rem, <wice, &htwice);
846 if (((unsigned HOST_WIDE_INT) habs_den
847 < (unsigned HOST_WIDE_INT) htwice)
848 || (((unsigned HOST_WIDE_INT) habs_den
849 == (unsigned HOST_WIDE_INT) htwice)
850 && (labs_den <= ltwice)))
854 add_double (*lquo, *hquo,
855 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
858 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
870 /* Compute true remainder: rem = num - (quo * den) */
871 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872 neg_double (*lrem, *hrem, lrem, hrem);
873 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
877 /* If ARG2 divides ARG1 with zero remainder, carries out the division
878 of type CODE and returns the quotient.
879 Otherwise returns NULL_TREE. */
882 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
884 unsigned HOST_WIDE_INT int1l, int2l;
885 HOST_WIDE_INT int1h, int2h;
886 unsigned HOST_WIDE_INT quol, reml;
887 HOST_WIDE_INT quoh, remh;
890 /* The sign of the division is according to operand two, that
891 does the correct thing for POINTER_PLUS_EXPR where we want
892 a signed division. */
893 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894 if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895 && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
898 int1l = TREE_INT_CST_LOW (arg1);
899 int1h = TREE_INT_CST_HIGH (arg1);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
908 return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
975 if (gimple_no_warning_p (stmt))
978 /* Use the smallest code level when deciding to issue the
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
987 locus = input_location;
989 locus = gimple_location (stmt);
990 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1136 /* We want to canonicalize to positive real constants. Pretend
1137 that only negative ones can be easily negated. */
1138 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1141 return negate_expr_p (TREE_REALPART (t))
1142 && negate_expr_p (TREE_IMAGPART (t));
1145 return negate_expr_p (TREE_OPERAND (t, 0))
1146 && negate_expr_p (TREE_OPERAND (t, 1));
1149 return negate_expr_p (TREE_OPERAND (t, 0));
1152 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1155 /* -(A + B) -> (-B) - A. */
1156 if (negate_expr_p (TREE_OPERAND (t, 1))
1157 && reorder_operands_p (TREE_OPERAND (t, 0),
1158 TREE_OPERAND (t, 1)))
1160 /* -(A + B) -> (-A) - B. */
1161 return negate_expr_p (TREE_OPERAND (t, 0));
1164 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1165 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167 && reorder_operands_p (TREE_OPERAND (t, 0),
1168 TREE_OPERAND (t, 1));
1171 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1177 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178 return negate_expr_p (TREE_OPERAND (t, 1))
1179 || negate_expr_p (TREE_OPERAND (t, 0));
1182 case TRUNC_DIV_EXPR:
1183 case ROUND_DIV_EXPR:
1184 case FLOOR_DIV_EXPR:
1186 case EXACT_DIV_EXPR:
1187 /* In general we can't negate A / B, because if A is INT_MIN and
1188 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189 and actually traps on some architectures. But if overflow is
1190 undefined, we can negate, because - (INT_MIN / 1) is an
1192 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1195 return negate_expr_p (TREE_OPERAND (t, 1))
1196 || negate_expr_p (TREE_OPERAND (t, 0));
1199 /* Negate -((double)float) as (double)(-float). */
1200 if (TREE_CODE (type) == REAL_TYPE)
1202 tree tem = strip_float_extensions (t);
1204 return negate_expr_p (tem);
1209 /* Negate -f(x) as f(-x). */
1210 if (negate_mathfn_p (builtin_mathfn_code (t)))
1211 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1215 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1216 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1218 tree op1 = TREE_OPERAND (t, 1);
1219 if (TREE_INT_CST_HIGH (op1) == 0
1220 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221 == TREE_INT_CST_LOW (op1))
1232 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233 simplification is possible.
1234 If negate_expr_p would return true for T, NULL_TREE will never be
1238 fold_negate_expr (location_t loc, tree t)
1240 tree type = TREE_TYPE (t);
1243 switch (TREE_CODE (t))
1245 /* Convert - (~A) to A + 1. */
1247 if (INTEGRAL_TYPE_P (type))
1248 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249 build_int_cst (type, 1));
1253 tem = fold_negate_const (t, type);
1254 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255 || !TYPE_OVERFLOW_TRAPS (type))
1260 tem = fold_negate_const (t, type);
1261 /* Two's complement FP formats, such as c4x, may overflow. */
1262 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1267 tem = fold_negate_const (t, type);
1272 tree rpart = negate_expr (TREE_REALPART (t));
1273 tree ipart = negate_expr (TREE_IMAGPART (t));
1275 if ((TREE_CODE (rpart) == REAL_CST
1276 && TREE_CODE (ipart) == REAL_CST)
1277 || (TREE_CODE (rpart) == INTEGER_CST
1278 && TREE_CODE (ipart) == INTEGER_CST))
1279 return build_complex (type, rpart, ipart);
1284 if (negate_expr_p (t))
1285 return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1291 if (negate_expr_p (t))
1292 return fold_build1_loc (loc, CONJ_EXPR, type,
1293 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1297 return TREE_OPERAND (t, 0);
1300 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1303 /* -(A + B) -> (-B) - A. */
1304 if (negate_expr_p (TREE_OPERAND (t, 1))
1305 && reorder_operands_p (TREE_OPERAND (t, 0),
1306 TREE_OPERAND (t, 1)))
1308 tem = negate_expr (TREE_OPERAND (t, 1));
1309 return fold_build2_loc (loc, MINUS_EXPR, type,
1310 tem, TREE_OPERAND (t, 0));
1313 /* -(A + B) -> (-A) - B. */
1314 if (negate_expr_p (TREE_OPERAND (t, 0)))
1316 tem = negate_expr (TREE_OPERAND (t, 0));
1317 return fold_build2_loc (loc, MINUS_EXPR, type,
1318 tem, TREE_OPERAND (t, 1));
1324 /* - (A - B) -> B - A */
1325 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328 return fold_build2_loc (loc, MINUS_EXPR, type,
1329 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1333 if (TYPE_UNSIGNED (type))
1339 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1341 tem = TREE_OPERAND (t, 1);
1342 if (negate_expr_p (tem))
1343 return fold_build2_loc (loc, TREE_CODE (t), type,
1344 TREE_OPERAND (t, 0), negate_expr (tem));
1345 tem = TREE_OPERAND (t, 0);
1346 if (negate_expr_p (tem))
1347 return fold_build2_loc (loc, TREE_CODE (t), type,
1348 negate_expr (tem), TREE_OPERAND (t, 1));
1352 case TRUNC_DIV_EXPR:
1353 case ROUND_DIV_EXPR:
1354 case FLOOR_DIV_EXPR:
1356 case EXACT_DIV_EXPR:
1357 /* In general we can't negate A / B, because if A is INT_MIN and
1358 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359 and actually traps on some architectures. But if overflow is
1360 undefined, we can negate, because - (INT_MIN / 1) is an
1362 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1364 const char * const warnmsg = G_("assuming signed overflow does not "
1365 "occur when negating a division");
1366 tem = TREE_OPERAND (t, 1);
1367 if (negate_expr_p (tem))
1369 if (INTEGRAL_TYPE_P (type)
1370 && (TREE_CODE (tem) != INTEGER_CST
1371 || integer_onep (tem)))
1372 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373 return fold_build2_loc (loc, TREE_CODE (t), type,
1374 TREE_OPERAND (t, 0), negate_expr (tem));
1376 tem = TREE_OPERAND (t, 0);
1377 if (negate_expr_p (tem))
1379 if (INTEGRAL_TYPE_P (type)
1380 && (TREE_CODE (tem) != INTEGER_CST
1381 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383 return fold_build2_loc (loc, TREE_CODE (t), type,
1384 negate_expr (tem), TREE_OPERAND (t, 1));
1390 /* Convert -((double)float) into (double)(-float). */
1391 if (TREE_CODE (type) == REAL_TYPE)
1393 tem = strip_float_extensions (t);
1394 if (tem != t && negate_expr_p (tem))
1395 return fold_convert_loc (loc, type, negate_expr (tem));
1400 /* Negate -f(x) as f(-x). */
1401 if (negate_mathfn_p (builtin_mathfn_code (t))
1402 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1406 fndecl = get_callee_fndecl (t);
1407 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408 return build_call_expr_loc (loc, fndecl, 1, arg);
1413 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1414 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1416 tree op1 = TREE_OPERAND (t, 1);
1417 if (TREE_INT_CST_HIGH (op1) == 0
1418 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419 == TREE_INT_CST_LOW (op1))
1421 tree ntype = TYPE_UNSIGNED (type)
1422 ? signed_type_for (type)
1423 : unsigned_type_for (type);
1424 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426 return fold_convert_loc (loc, type, temp);
1438 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1440 return NULL_TREE. */
1443 negate_expr (tree t)
1451 loc = EXPR_LOCATION (t);
1452 type = TREE_TYPE (t);
1453 STRIP_SIGN_NOPS (t);
1455 tem = fold_negate_expr (loc, t);
1458 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459 SET_EXPR_LOCATION (tem, loc);
1461 return fold_convert_loc (loc, type, tem);
1464 /* Split a tree IN into a constant, literal and variable parts that could be
1465 combined with CODE to make IN. "constant" means an expression with
1466 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1467 commutative arithmetic operation. Store the constant part into *CONP,
1468 the literal in *LITP and return the variable part. If a part isn't
1469 present, set it to null. If the tree does not decompose in this way,
1470 return the entire tree as the variable part and the other parts as null.
1472 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1473 case, we negate an operand that was subtracted. Except if it is a
1474 literal for which we use *MINUS_LITP instead.
1476 If NEGATE_P is true, we are negating all of IN, again except a literal
1477 for which we use *MINUS_LITP instead.
1479 If IN is itself a literal or constant, return it as appropriate.
1481 Note that we do not guarantee that any of the three values will be the
1482 same type as IN, but they will have the same signedness and mode. */
1485 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486 tree *minus_litp, int negate_p)
1494 /* Strip any conversions that don't change the machine mode or signedness. */
1495 STRIP_SIGN_NOPS (in);
1497 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498 || TREE_CODE (in) == FIXED_CST)
1500 else if (TREE_CODE (in) == code
1501 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503 /* We can associate addition and subtraction together (even
1504 though the C standard doesn't say so) for integers because
1505 the value is not affected. For reals, the value might be
1506 affected, so we can't. */
1507 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1510 tree op0 = TREE_OPERAND (in, 0);
1511 tree op1 = TREE_OPERAND (in, 1);
1512 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1515 /* First see if either of the operands is a literal, then a constant. */
1516 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517 || TREE_CODE (op0) == FIXED_CST)
1518 *litp = op0, op0 = 0;
1519 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520 || TREE_CODE (op1) == FIXED_CST)
1521 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1523 if (op0 != 0 && TREE_CONSTANT (op0))
1524 *conp = op0, op0 = 0;
1525 else if (op1 != 0 && TREE_CONSTANT (op1))
1526 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1528 /* If we haven't dealt with either operand, this is not a case we can
1529 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1530 if (op0 != 0 && op1 != 0)
1535 var = op1, neg_var_p = neg1_p;
1537 /* Now do any needed negations. */
1539 *minus_litp = *litp, *litp = 0;
1541 *conp = negate_expr (*conp);
1543 var = negate_expr (var);
1545 else if (TREE_CONSTANT (in))
1553 *minus_litp = *litp, *litp = 0;
1554 else if (*minus_litp)
1555 *litp = *minus_litp, *minus_litp = 0;
1556 *conp = negate_expr (*conp);
1557 var = negate_expr (var);
1563 /* Re-associate trees split by the above function. T1 and T2 are
1564 either expressions to associate or null. Return the new
1565 expression, if any. LOC is the location of the new expression. If
1566 we build an operation, do it in TYPE and with CODE. */
1569 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1578 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579 try to fold this since we will have infinite recursion. But do
1580 deal with any NEGATE_EXPRs. */
1581 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1584 if (code == PLUS_EXPR)
1586 if (TREE_CODE (t1) == NEGATE_EXPR)
1587 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588 fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589 else if (TREE_CODE (t2) == NEGATE_EXPR)
1590 tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591 fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592 else if (integer_zerop (t2))
1593 return fold_convert_loc (loc, type, t1);
1595 else if (code == MINUS_EXPR)
1597 if (integer_zerop (t2))
1598 return fold_convert_loc (loc, type, t1);
1601 tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602 fold_convert_loc (loc, type, t2));
1603 goto associate_trees_exit;
1606 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607 fold_convert_loc (loc, type, t2));
1608 associate_trees_exit:
1609 protected_set_expr_location (tem, loc);
1613 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614 for use in int_const_binop, size_binop and size_diffop. */
1617 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1619 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1621 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1636 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638 && TYPE_MODE (type1) == TYPE_MODE (type2);
1642 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1643 to produce a new constant. Return NULL_TREE if we don't know how
1644 to evaluate CODE at compile-time.
1646 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1649 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1651 unsigned HOST_WIDE_INT int1l, int2l;
1652 HOST_WIDE_INT int1h, int2h;
1653 unsigned HOST_WIDE_INT low;
1655 unsigned HOST_WIDE_INT garbagel;
1656 HOST_WIDE_INT garbageh;
1658 tree type = TREE_TYPE (arg1);
1659 int uns = TYPE_UNSIGNED (type);
1661 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1664 int1l = TREE_INT_CST_LOW (arg1);
1665 int1h = TREE_INT_CST_HIGH (arg1);
1666 int2l = TREE_INT_CST_LOW (arg2);
1667 int2h = TREE_INT_CST_HIGH (arg2);
1672 low = int1l | int2l, hi = int1h | int2h;
1676 low = int1l ^ int2l, hi = int1h ^ int2h;
1680 low = int1l & int2l, hi = int1h & int2h;
1686 /* It's unclear from the C standard whether shifts can overflow.
1687 The following code ignores overflow; perhaps a C standard
1688 interpretation ruling is needed. */
1689 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1696 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1701 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1705 neg_double (int2l, int2h, &low, &hi);
1706 add_double (int1l, int1h, low, hi, &low, &hi);
1707 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1711 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1714 case TRUNC_DIV_EXPR:
1715 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716 case EXACT_DIV_EXPR:
1717 /* This is a shortcut for a common special case. */
1718 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719 && !TREE_OVERFLOW (arg1)
1720 && !TREE_OVERFLOW (arg2)
1721 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1723 if (code == CEIL_DIV_EXPR)
1726 low = int1l / int2l, hi = 0;
1730 /* ... fall through ... */
1732 case ROUND_DIV_EXPR:
1733 if (int2h == 0 && int2l == 0)
1735 if (int2h == 0 && int2l == 1)
1737 low = int1l, hi = int1h;
1740 if (int1l == int2l && int1h == int2h
1741 && ! (int1l == 0 && int1h == 0))
1746 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747 &low, &hi, &garbagel, &garbageh);
1750 case TRUNC_MOD_EXPR:
1751 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752 /* This is a shortcut for a common special case. */
1753 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754 && !TREE_OVERFLOW (arg1)
1755 && !TREE_OVERFLOW (arg2)
1756 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1758 if (code == CEIL_MOD_EXPR)
1760 low = int1l % int2l, hi = 0;
1764 /* ... fall through ... */
1766 case ROUND_MOD_EXPR:
1767 if (int2h == 0 && int2l == 0)
1769 overflow = div_and_round_double (code, uns,
1770 int1l, int1h, int2l, int2h,
1771 &garbagel, &garbageh, &low, &hi);
1777 low = (((unsigned HOST_WIDE_INT) int1h
1778 < (unsigned HOST_WIDE_INT) int2h)
1779 || (((unsigned HOST_WIDE_INT) int1h
1780 == (unsigned HOST_WIDE_INT) int2h)
1783 low = (int1h < int2h
1784 || (int1h == int2h && int1l < int2l));
1786 if (low == (code == MIN_EXPR))
1787 low = int1l, hi = int1h;
1789 low = int2l, hi = int2h;
1798 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1800 /* Propagate overflow flags ourselves. */
1801 if (((!uns || is_sizetype) && overflow)
1802 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1805 TREE_OVERFLOW (t) = 1;
1809 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810 ((!uns || is_sizetype) && overflow)
1811 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1816 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817 constant. We assume ARG1 and ARG2 have the same data type, or at least
1818 are the same kind of constant and the same machine mode. Return zero if
1819 combining the constants is not allowed in the current operating mode.
1821 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1824 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1826 /* Sanity check for the recursive cases. */
1833 if (TREE_CODE (arg1) == INTEGER_CST)
1834 return int_const_binop (code, arg1, arg2, notrunc);
1836 if (TREE_CODE (arg1) == REAL_CST)
1838 enum machine_mode mode;
1841 REAL_VALUE_TYPE value;
1842 REAL_VALUE_TYPE result;
1846 /* The following codes are handled by real_arithmetic. */
1861 d1 = TREE_REAL_CST (arg1);
1862 d2 = TREE_REAL_CST (arg2);
1864 type = TREE_TYPE (arg1);
1865 mode = TYPE_MODE (type);
1867 /* Don't perform operation if we honor signaling NaNs and
1868 either operand is a NaN. */
1869 if (HONOR_SNANS (mode)
1870 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1873 /* Don't perform operation if it would raise a division
1874 by zero exception. */
1875 if (code == RDIV_EXPR
1876 && REAL_VALUES_EQUAL (d2, dconst0)
1877 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1880 /* If either operand is a NaN, just return it. Otherwise, set up
1881 for floating-point trap; we return an overflow. */
1882 if (REAL_VALUE_ISNAN (d1))
1884 else if (REAL_VALUE_ISNAN (d2))
1887 inexact = real_arithmetic (&value, code, &d1, &d2);
1888 real_convert (&result, mode, &value);
1890 /* Don't constant fold this floating point operation if
1891 the result has overflowed and flag_trapping_math. */
1892 if (flag_trapping_math
1893 && MODE_HAS_INFINITIES (mode)
1894 && REAL_VALUE_ISINF (result)
1895 && !REAL_VALUE_ISINF (d1)
1896 && !REAL_VALUE_ISINF (d2))
1899 /* Don't constant fold this floating point operation if the
1900 result may dependent upon the run-time rounding mode and
1901 flag_rounding_math is set, or if GCC's software emulation
1902 is unable to accurately represent the result. */
1903 if ((flag_rounding_math
1904 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905 && (inexact || !real_identical (&result, &value)))
1908 t = build_real (type, result);
1910 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1914 if (TREE_CODE (arg1) == FIXED_CST)
1916 FIXED_VALUE_TYPE f1;
1917 FIXED_VALUE_TYPE f2;
1918 FIXED_VALUE_TYPE result;
1923 /* The following codes are handled by fixed_arithmetic. */
1929 case TRUNC_DIV_EXPR:
1930 f2 = TREE_FIXED_CST (arg2);
1935 f2.data.high = TREE_INT_CST_HIGH (arg2);
1936 f2.data.low = TREE_INT_CST_LOW (arg2);
1944 f1 = TREE_FIXED_CST (arg1);
1945 type = TREE_TYPE (arg1);
1946 sat_p = TYPE_SATURATING (type);
1947 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948 t = build_fixed (type, result);
1949 /* Propagate overflow flags. */
1950 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951 TREE_OVERFLOW (t) = 1;
1955 if (TREE_CODE (arg1) == COMPLEX_CST)
1957 tree type = TREE_TYPE (arg1);
1958 tree r1 = TREE_REALPART (arg1);
1959 tree i1 = TREE_IMAGPART (arg1);
1960 tree r2 = TREE_REALPART (arg2);
1961 tree i2 = TREE_IMAGPART (arg2);
1968 real = const_binop (code, r1, r2, notrunc);
1969 imag = const_binop (code, i1, i2, notrunc);
1973 if (COMPLEX_FLOAT_TYPE_P (type))
1974 return do_mpc_arg2 (arg1, arg2, type,
1975 /* do_nonfinite= */ folding_initializer,
1978 real = const_binop (MINUS_EXPR,
1979 const_binop (MULT_EXPR, r1, r2, notrunc),
1980 const_binop (MULT_EXPR, i1, i2, notrunc),
1982 imag = const_binop (PLUS_EXPR,
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1984 const_binop (MULT_EXPR, i1, r2, notrunc),
1989 if (COMPLEX_FLOAT_TYPE_P (type))
1990 return do_mpc_arg2 (arg1, arg2, type,
1991 /* do_nonfinite= */ folding_initializer,
1994 case TRUNC_DIV_EXPR:
1996 case FLOOR_DIV_EXPR:
1997 case ROUND_DIV_EXPR:
1998 if (flag_complex_method == 0)
2000 /* Keep this algorithm in sync with
2001 tree-complex.c:expand_complex_div_straight().
2003 Expand complex division to scalars, straightforward algorithm.
2004 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2008 = const_binop (PLUS_EXPR,
2009 const_binop (MULT_EXPR, r2, r2, notrunc),
2010 const_binop (MULT_EXPR, i2, i2, notrunc),
2013 = const_binop (PLUS_EXPR,
2014 const_binop (MULT_EXPR, r1, r2, notrunc),
2015 const_binop (MULT_EXPR, i1, i2, notrunc),
2018 = const_binop (MINUS_EXPR,
2019 const_binop (MULT_EXPR, i1, r2, notrunc),
2020 const_binop (MULT_EXPR, r1, i2, notrunc),
2023 real = const_binop (code, t1, magsquared, notrunc);
2024 imag = const_binop (code, t2, magsquared, notrunc);
2028 /* Keep this algorithm in sync with
2029 tree-complex.c:expand_complex_div_wide().
2031 Expand complex division to scalars, modified algorithm to minimize
2032 overflow with wide input ranges. */
2033 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034 fold_abs_const (r2, TREE_TYPE (type)),
2035 fold_abs_const (i2, TREE_TYPE (type)));
2037 if (integer_nonzerop (compare))
2039 /* In the TRUE branch, we compute
2041 div = (br * ratio) + bi;
2042 tr = (ar * ratio) + ai;
2043 ti = (ai * ratio) - ar;
2046 tree ratio = const_binop (code, r2, i2, notrunc);
2047 tree div = const_binop (PLUS_EXPR, i2,
2048 const_binop (MULT_EXPR, r2, ratio,
2051 real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052 real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053 real = const_binop (code, real, div, notrunc);
2055 imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056 imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057 imag = const_binop (code, imag, div, notrunc);
2061 /* In the FALSE branch, we compute
2063 divisor = (d * ratio) + c;
2064 tr = (b * ratio) + a;
2065 ti = b - (a * ratio);
2068 tree ratio = const_binop (code, i2, r2, notrunc);
2069 tree div = const_binop (PLUS_EXPR, r2,
2070 const_binop (MULT_EXPR, i2, ratio,
2074 real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075 real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076 real = const_binop (code, real, div, notrunc);
2078 imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079 imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080 imag = const_binop (code, imag, div, notrunc);
2090 return build_complex (type, real, imag);
2093 if (TREE_CODE (arg1) == VECTOR_CST)
2095 tree type = TREE_TYPE(arg1);
2096 int count = TYPE_VECTOR_SUBPARTS (type), i;
2097 tree elements1, elements2, list = NULL_TREE;
2099 if(TREE_CODE(arg2) != VECTOR_CST)
2102 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2105 for (i = 0; i < count; i++)
2107 tree elem1, elem2, elem;
2109 /* The trailing elements can be empty and should be treated as 0 */
2111 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2114 elem1 = TREE_VALUE(elements1);
2115 elements1 = TREE_CHAIN (elements1);
2119 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2122 elem2 = TREE_VALUE(elements2);
2123 elements2 = TREE_CHAIN (elements2);
2126 elem = const_binop (code, elem1, elem2, notrunc);
2128 /* It is possible that const_binop cannot handle the given
2129 code and return NULL_TREE */
2130 if(elem == NULL_TREE)
2133 list = tree_cons (NULL_TREE, elem, list);
2135 return build_vector(type, nreverse(list));
2140 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2141 indicates which particular sizetype to create. */
2144 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2146 return build_int_cst (sizetype_tab[(int) kind], number);
2149 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2150 is a tree code. The type of the result is taken from the operands.
2151 Both must be equivalent integer types, ala int_binop_types_match_p.
2152 If the operands are constant, so is the result. */
2155 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2157 tree type = TREE_TYPE (arg0);
2159 if (arg0 == error_mark_node || arg1 == error_mark_node)
2160 return error_mark_node;
2162 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2165 /* Handle the special case of two integer constants faster. */
2166 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2168 /* And some specific cases even faster than that. */
2169 if (code == PLUS_EXPR)
2171 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2173 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2176 else if (code == MINUS_EXPR)
2178 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2181 else if (code == MULT_EXPR)
2183 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2187 /* Handle general case of two integer constants. */
2188 return int_const_binop (code, arg0, arg1, 0);
2191 return fold_build2_loc (loc, code, type, arg0, arg1);
2194 /* Given two values, either both of sizetype or both of bitsizetype,
2195 compute the difference between the two values. Return the value
2196 in signed type corresponding to the type of the operands. */
2199 size_diffop_loc (location_t loc, tree arg0, tree arg1)
2201 tree type = TREE_TYPE (arg0);
2204 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2207 /* If the type is already signed, just do the simple thing. */
2208 if (!TYPE_UNSIGNED (type))
2209 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2211 if (type == sizetype)
2213 else if (type == bitsizetype)
2214 ctype = sbitsizetype;
2216 ctype = signed_type_for (type);
2218 /* If either operand is not a constant, do the conversions to the signed
2219 type and subtract. The hardware will do the right thing with any
2220 overflow in the subtraction. */
2221 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222 return size_binop_loc (loc, MINUS_EXPR,
2223 fold_convert_loc (loc, ctype, arg0),
2224 fold_convert_loc (loc, ctype, arg1));
2226 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228 overflow) and negate (which can't either). Special-case a result
2229 of zero while we're here. */
2230 if (tree_int_cst_equal (arg0, arg1))
2231 return build_int_cst (ctype, 0);
2232 else if (tree_int_cst_lt (arg1, arg0))
2233 return fold_convert_loc (loc, ctype,
2234 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2236 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237 fold_convert_loc (loc, ctype,
2238 size_binop_loc (loc,
2243 /* A subroutine of fold_convert_const handling conversions of an
2244 INTEGER_CST to another integer type. */
2247 fold_convert_const_int_from_int (tree type, const_tree arg1)
2251 /* Given an integer constant, make new constant with new type,
2252 appropriately sign-extended or truncated. */
2253 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254 TREE_INT_CST_HIGH (arg1),
2255 /* Don't set the overflow when
2256 converting from a pointer, */
2257 !POINTER_TYPE_P (TREE_TYPE (arg1))
2258 /* or to a sizetype with same signedness
2259 and the precision is unchanged.
2260 ??? sizetype is always sign-extended,
2261 but its signedness depends on the
2262 frontend. Thus we see spurious overflows
2263 here if we do not check this. */
2264 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2265 == TYPE_PRECISION (type))
2266 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2267 == TYPE_UNSIGNED (type))
2268 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2269 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2270 || (TREE_CODE (type) == INTEGER_TYPE
2271 && TYPE_IS_SIZETYPE (type)))),
2272 (TREE_INT_CST_HIGH (arg1) < 0
2273 && (TYPE_UNSIGNED (type)
2274 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275 | TREE_OVERFLOW (arg1));
2280 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2281 to an integer type. */
2284 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2289 /* The following code implements the floating point to integer
2290 conversion rules required by the Java Language Specification,
2291 that IEEE NaNs are mapped to zero and values that overflow
2292 the target precision saturate, i.e. values greater than
2293 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294 are mapped to INT_MIN. These semantics are allowed by the
2295 C and C++ standards that simply state that the behavior of
2296 FP-to-integer conversion is unspecified upon overflow. */
2298 HOST_WIDE_INT high, low;
2300 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2304 case FIX_TRUNC_EXPR:
2305 real_trunc (&r, VOIDmode, &x);
2312 /* If R is NaN, return zero and show we have an overflow. */
2313 if (REAL_VALUE_ISNAN (r))
2320 /* See if R is less than the lower bound or greater than the
2325 tree lt = TYPE_MIN_VALUE (type);
2326 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2327 if (REAL_VALUES_LESS (r, l))
2330 high = TREE_INT_CST_HIGH (lt);
2331 low = TREE_INT_CST_LOW (lt);
2337 tree ut = TYPE_MAX_VALUE (type);
2340 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2341 if (REAL_VALUES_LESS (u, r))
2344 high = TREE_INT_CST_HIGH (ut);
2345 low = TREE_INT_CST_LOW (ut);
2351 REAL_VALUE_TO_INT (&low, &high, r);
2353 t = force_fit_type_double (type, low, high, -1,
2354 overflow | TREE_OVERFLOW (arg1));
2358 /* A subroutine of fold_convert_const handling conversions of a
2359 FIXED_CST to an integer type. */
2362 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2365 double_int temp, temp_trunc;
2368 /* Right shift FIXED_CST to temp by fbit. */
2369 temp = TREE_FIXED_CST (arg1).data;
2370 mode = TREE_FIXED_CST (arg1).mode;
2371 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2373 lshift_double (temp.low, temp.high,
2374 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2375 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2377 /* Left shift temp to temp_trunc by fbit. */
2378 lshift_double (temp.low, temp.high,
2379 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2380 &temp_trunc.low, &temp_trunc.high,
2381 SIGNED_FIXED_POINT_MODE_P (mode));
2388 temp_trunc.high = 0;
2391 /* If FIXED_CST is negative, we need to round the value toward 0.
2392 By checking if the fractional bits are not zero to add 1 to temp. */
2393 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2394 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2399 temp = double_int_add (temp, one);
2402 /* Given a fixed-point constant, make new constant with new type,
2403 appropriately sign-extended or truncated. */
2404 t = force_fit_type_double (type, temp.low, temp.high, -1,
2406 && (TYPE_UNSIGNED (type)
2407 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2408 | TREE_OVERFLOW (arg1));
2413 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2414 to another floating point type. */
2417 fold_convert_const_real_from_real (tree type, const_tree arg1)
2419 REAL_VALUE_TYPE value;
2422 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2423 t = build_real (type, value);
2425 /* If converting an infinity or NAN to a representation that doesn't
2426 have one, set the overflow bit so that we can produce some kind of
2427 error message at the appropriate point if necessary. It's not the
2428 most user-friendly message, but it's better than nothing. */
2429 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2430 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2431 TREE_OVERFLOW (t) = 1;
2432 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2433 && !MODE_HAS_NANS (TYPE_MODE (type)))
2434 TREE_OVERFLOW (t) = 1;
2435 /* Regular overflow, conversion produced an infinity in a mode that
2436 can't represent them. */
2437 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2438 && REAL_VALUE_ISINF (value)
2439 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2440 TREE_OVERFLOW (t) = 1;
2442 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2446 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447 to a floating point type. */
2450 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2452 REAL_VALUE_TYPE value;
2455 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2456 t = build_real (type, value);
2458 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2462 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463 to another fixed-point type. */
2466 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2468 FIXED_VALUE_TYPE value;
2472 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2473 TYPE_SATURATING (type));
2474 t = build_fixed (type, value);
2476 /* Propagate overflow flags. */
2477 if (overflow_p | TREE_OVERFLOW (arg1))
2478 TREE_OVERFLOW (t) = 1;
2482 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483 to a fixed-point type. */
2486 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2488 FIXED_VALUE_TYPE value;
2492 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2493 TREE_INT_CST (arg1),
2494 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2495 TYPE_SATURATING (type));
2496 t = build_fixed (type, value);
2498 /* Propagate overflow flags. */
2499 if (overflow_p | TREE_OVERFLOW (arg1))
2500 TREE_OVERFLOW (t) = 1;
2504 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2505 to a fixed-point type. */
2508 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2510 FIXED_VALUE_TYPE value;
2514 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2515 &TREE_REAL_CST (arg1),
2516 TYPE_SATURATING (type));
2517 t = build_fixed (type, value);
2519 /* Propagate overflow flags. */
2520 if (overflow_p | TREE_OVERFLOW (arg1))
2521 TREE_OVERFLOW (t) = 1;
2525 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2526 type TYPE. If no simplification can be done return NULL_TREE. */
2529 fold_convert_const (enum tree_code code, tree type, tree arg1)
2531 if (TREE_TYPE (arg1) == type)
2534 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2535 || TREE_CODE (type) == OFFSET_TYPE)
2537 if (TREE_CODE (arg1) == INTEGER_CST)
2538 return fold_convert_const_int_from_int (type, arg1);
2539 else if (TREE_CODE (arg1) == REAL_CST)
2540 return fold_convert_const_int_from_real (code, type, arg1);
2541 else if (TREE_CODE (arg1) == FIXED_CST)
2542 return fold_convert_const_int_from_fixed (type, arg1);
2544 else if (TREE_CODE (type) == REAL_TYPE)
2546 if (TREE_CODE (arg1) == INTEGER_CST)
2547 return build_real_from_int_cst (type, arg1);
2548 else if (TREE_CODE (arg1) == REAL_CST)
2549 return fold_convert_const_real_from_real (type, arg1);
2550 else if (TREE_CODE (arg1) == FIXED_CST)
2551 return fold_convert_const_real_from_fixed (type, arg1);
2553 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2555 if (TREE_CODE (arg1) == FIXED_CST)
2556 return fold_convert_const_fixed_from_fixed (type, arg1);
2557 else if (TREE_CODE (arg1) == INTEGER_CST)
2558 return fold_convert_const_fixed_from_int (type, arg1);
2559 else if (TREE_CODE (arg1) == REAL_CST)
2560 return fold_convert_const_fixed_from_real (type, arg1);
2565 /* Construct a vector of zero elements of vector type TYPE. */
2568 build_zero_vector (tree type)
2573 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2574 units = TYPE_VECTOR_SUBPARTS (type);
2577 for (i = 0; i < units; i++)
2578 list = tree_cons (NULL_TREE, elem, list);
2579 return build_vector (type, list);
2582 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2585 fold_convertible_p (const_tree type, const_tree arg)
2587 tree orig = TREE_TYPE (arg);
2592 if (TREE_CODE (arg) == ERROR_MARK
2593 || TREE_CODE (type) == ERROR_MARK
2594 || TREE_CODE (orig) == ERROR_MARK)
2597 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2600 switch (TREE_CODE (type))
2602 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2605 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2606 || TREE_CODE (orig) == OFFSET_TYPE)
2608 return (TREE_CODE (orig) == VECTOR_TYPE
2609 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2612 case FIXED_POINT_TYPE:
2616 return TREE_CODE (type) == TREE_CODE (orig);
2623 /* Convert expression ARG to type TYPE. Used by the middle-end for
2624 simple conversions in preference to calling the front-end's convert. */
2627 fold_convert_loc (location_t loc, tree type, tree arg)
2629 tree orig = TREE_TYPE (arg);
2635 if (TREE_CODE (arg) == ERROR_MARK
2636 || TREE_CODE (type) == ERROR_MARK
2637 || TREE_CODE (orig) == ERROR_MARK)
2638 return error_mark_node;
2640 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2641 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2643 switch (TREE_CODE (type))
2646 case REFERENCE_TYPE:
2647 /* Handle conversions between pointers to different address spaces. */
2648 if (POINTER_TYPE_P (orig)
2649 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2650 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2651 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2654 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2656 if (TREE_CODE (arg) == INTEGER_CST)
2658 tem = fold_convert_const (NOP_EXPR, type, arg);
2659 if (tem != NULL_TREE)
2662 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2663 || TREE_CODE (orig) == OFFSET_TYPE)
2664 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665 if (TREE_CODE (orig) == COMPLEX_TYPE)
2666 return fold_convert_loc (loc, type,
2667 fold_build1_loc (loc, REALPART_EXPR,
2668 TREE_TYPE (orig), arg));
2669 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2670 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2671 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2674 if (TREE_CODE (arg) == INTEGER_CST)
2676 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2677 if (tem != NULL_TREE)
2680 else if (TREE_CODE (arg) == REAL_CST)
2682 tem = fold_convert_const (NOP_EXPR, type, arg);
2683 if (tem != NULL_TREE)
2686 else if (TREE_CODE (arg) == FIXED_CST)
2688 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2689 if (tem != NULL_TREE)
2693 switch (TREE_CODE (orig))
2696 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2697 case POINTER_TYPE: case REFERENCE_TYPE:
2698 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2701 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2703 case FIXED_POINT_TYPE:
2704 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2707 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2708 return fold_convert_loc (loc, type, tem);
2714 case FIXED_POINT_TYPE:
2715 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2716 || TREE_CODE (arg) == REAL_CST)
2718 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2719 if (tem != NULL_TREE)
2720 goto fold_convert_exit;
2723 switch (TREE_CODE (orig))
2725 case FIXED_POINT_TYPE:
2730 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2733 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2734 return fold_convert_loc (loc, type, tem);
2741 switch (TREE_CODE (orig))
2744 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2745 case POINTER_TYPE: case REFERENCE_TYPE:
2747 case FIXED_POINT_TYPE:
2748 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2749 fold_convert_loc (loc, TREE_TYPE (type), arg),
2750 fold_convert_loc (loc, TREE_TYPE (type),
2751 integer_zero_node));
2756 if (TREE_CODE (arg) == COMPLEX_EXPR)
2758 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2759 TREE_OPERAND (arg, 0));
2760 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2761 TREE_OPERAND (arg, 1));
2762 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2765 arg = save_expr (arg);
2766 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2767 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2768 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2769 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2770 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2778 if (integer_zerop (arg))
2779 return build_zero_vector (type);
2780 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2781 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2782 || TREE_CODE (orig) == VECTOR_TYPE);
2783 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2786 tem = fold_ignored_result (arg);
2787 if (TREE_CODE (tem) == MODIFY_EXPR)
2788 goto fold_convert_exit;
2789 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2795 protected_set_expr_location (tem, loc);
2799 /* Return false if expr can be assumed not to be an lvalue, true
2803 maybe_lvalue_p (const_tree x)
2805 /* We only need to wrap lvalue tree codes. */
2806 switch (TREE_CODE (x))
2817 case ALIGN_INDIRECT_REF:
2818 case MISALIGNED_INDIRECT_REF:
2820 case ARRAY_RANGE_REF:
2826 case PREINCREMENT_EXPR:
2827 case PREDECREMENT_EXPR:
2829 case TRY_CATCH_EXPR:
2830 case WITH_CLEANUP_EXPR:
2839 /* Assume the worst for front-end tree codes. */
2840 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2848 /* Return an expr equal to X but certainly not valid as an lvalue. */
2851 non_lvalue_loc (location_t loc, tree x)
2853 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2858 if (! maybe_lvalue_p (x))
2860 x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2861 SET_EXPR_LOCATION (x, loc);
2865 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866 Zero means allow extended lvalues. */
2868 int pedantic_lvalues;
2870 /* When pedantic, return an expr equal to X but certainly not valid as a
2871 pedantic lvalue. Otherwise, return X. */
2874 pedantic_non_lvalue_loc (location_t loc, tree x)
2876 if (pedantic_lvalues)
2877 return non_lvalue_loc (loc, x);
2878 protected_set_expr_location (x, loc);
2882 /* Given a tree comparison code, return the code that is the logical inverse
2883 of the given code. It is not safe to do this for floating-point
2884 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2885 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2888 invert_tree_comparison (enum tree_code code, bool honor_nans)
2890 if (honor_nans && flag_trapping_math)
2900 return honor_nans ? UNLE_EXPR : LE_EXPR;
2902 return honor_nans ? UNLT_EXPR : LT_EXPR;
2904 return honor_nans ? UNGE_EXPR : GE_EXPR;
2906 return honor_nans ? UNGT_EXPR : GT_EXPR;
2920 return UNORDERED_EXPR;
2921 case UNORDERED_EXPR:
2922 return ORDERED_EXPR;
2928 /* Similar, but return the comparison that results if the operands are
2929 swapped. This is safe for floating-point. */
2932 swap_tree_comparison (enum tree_code code)
2939 case UNORDERED_EXPR:
2965 /* Convert a comparison tree code from an enum tree_code representation
2966 into a compcode bit-based encoding. This function is the inverse of
2967 compcode_to_comparison. */
2969 static enum comparison_code
2970 comparison_to_compcode (enum tree_code code)
2987 return COMPCODE_ORD;
2988 case UNORDERED_EXPR:
2989 return COMPCODE_UNORD;
2991 return COMPCODE_UNLT;
2993 return COMPCODE_UNEQ;
2995 return COMPCODE_UNLE;
2997 return COMPCODE_UNGT;
2999 return COMPCODE_LTGT;
3001 return COMPCODE_UNGE;
3007 /* Convert a compcode bit-based encoding of a comparison operator back
3008 to GCC's enum tree_code representation. This function is the
3009 inverse of comparison_to_compcode. */
3011 static enum tree_code
3012 compcode_to_comparison (enum comparison_code code)
3029 return ORDERED_EXPR;
3030 case COMPCODE_UNORD:
3031 return UNORDERED_EXPR;
3049 /* Return a tree for the comparison which is the combination of
3050 doing the AND or OR (depending on CODE) of the two operations LCODE
3051 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
3052 the possibility of trapping if the mode has NaNs, and return NULL_TREE
3053 if this makes the transformation invalid. */
3056 combine_comparisons (location_t loc,
3057 enum tree_code code, enum tree_code lcode,
3058 enum tree_code rcode, tree truth_type,
3059 tree ll_arg, tree lr_arg)
3061 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3062 enum comparison_code lcompcode = comparison_to_compcode (lcode);
3063 enum comparison_code rcompcode = comparison_to_compcode (rcode);
3068 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3069 compcode = lcompcode & rcompcode;
3072 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3073 compcode = lcompcode | rcompcode;
3082 /* Eliminate unordered comparisons, as well as LTGT and ORD
3083 which are not used unless the mode has NaNs. */
3084 compcode &= ~COMPCODE_UNORD;
3085 if (compcode == COMPCODE_LTGT)
3086 compcode = COMPCODE_NE;
3087 else if (compcode == COMPCODE_ORD)
3088 compcode = COMPCODE_TRUE;
3090 else if (flag_trapping_math)
3092 /* Check that the original operation and the optimized ones will trap
3093 under the same condition. */
3094 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3095 && (lcompcode != COMPCODE_EQ)
3096 && (lcompcode != COMPCODE_ORD);
3097 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3098 && (rcompcode != COMPCODE_EQ)
3099 && (rcompcode != COMPCODE_ORD);
3100 bool trap = (compcode & COMPCODE_UNORD) == 0
3101 && (compcode != COMPCODE_EQ)
3102 && (compcode != COMPCODE_ORD);
3104 /* In a short-circuited boolean expression the LHS might be
3105 such that the RHS, if evaluated, will never trap. For
3106 example, in ORD (x, y) && (x < y), we evaluate the RHS only
3107 if neither x nor y is NaN. (This is a mixed blessing: for
3108 example, the expression above will never trap, hence
3109 optimizing it to x < y would be invalid). */
3110 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3111 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3114 /* If the comparison was short-circuited, and only the RHS
3115 trapped, we may now generate a spurious trap. */
3117 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3120 /* If we changed the conditions that cause a trap, we lose. */
3121 if ((ltrap || rtrap) != trap)
3125 if (compcode == COMPCODE_TRUE)
3126 return constant_boolean_node (true, truth_type);
3127 else if (compcode == COMPCODE_FALSE)
3128 return constant_boolean_node (false, truth_type);
3131 enum tree_code tcode;
3133 tcode = compcode_to_comparison ((enum comparison_code) compcode);
3134 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3138 /* Return nonzero if two operands (typically of the same tree node)
3139 are necessarily equal. If either argument has side-effects this
3140 function returns zero. FLAGS modifies behavior as follows:
3142 If OEP_ONLY_CONST is set, only return nonzero for constants.
3143 This function tests whether the operands are indistinguishable;
3144 it does not test whether they are equal using C's == operation.
3145 The distinction is important for IEEE floating point, because
3146 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3147 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3149 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3150 even though it may hold multiple values during a function.
3151 This is because a GCC tree node guarantees that nothing else is
3152 executed between the evaluation of its "operands" (which may often
3153 be evaluated in arbitrary order). Hence if the operands themselves
3154 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3155 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3156 unset means assuming isochronic (or instantaneous) tree equivalence.
3157 Unless comparing arbitrary expression trees, such as from different
3158 statements, this flag can usually be left unset.
3160 If OEP_PURE_SAME is set, then pure functions with identical arguments
3161 are considered the same. It is used when the caller has other ways
3162 to ensure that global memory is unchanged in between. */
3165 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3167 /* If either is ERROR_MARK, they aren't equal. */
3168 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3171 /* Check equality of integer constants before bailing out due to
3172 precision differences. */
3173 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3174 return tree_int_cst_equal (arg0, arg1);
3176 /* If both types don't have the same signedness, then we can't consider
3177 them equal. We must check this before the STRIP_NOPS calls
3178 because they may change the signedness of the arguments. As pointers
3179 strictly don't have a signedness, require either two pointers or
3180 two non-pointers as well. */
3181 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3182 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3185 /* We cannot consider pointers to different address space equal. */
3186 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3187 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3188 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3191 /* If both types don't have the same precision, then it is not safe
3193 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3199 /* In case both args are comparisons but with different comparison
3200 code, try to swap the comparison operands of one arg to produce
3201 a match and compare that variant. */
3202 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3203 && COMPARISON_CLASS_P (arg0)
3204 && COMPARISON_CLASS_P (arg1))
3206 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3208 if (TREE_CODE (arg0) == swap_code)
3209 return operand_equal_p (TREE_OPERAND (arg0, 0),
3210 TREE_OPERAND (arg1, 1), flags)
3211 && operand_equal_p (TREE_OPERAND (arg0, 1),
3212 TREE_OPERAND (arg1, 0), flags);
3215 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3216 /* This is needed for conversions and for COMPONENT_REF.
3217 Might as well play it safe and always test this. */
3218 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3219 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3220 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3223 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3224 We don't care about side effects in that case because the SAVE_EXPR
3225 takes care of that for us. In all other cases, two expressions are
3226 equal if they have no side effects. If we have two identical
3227 expressions with side effects that should be treated the same due
3228 to the only side effects being identical SAVE_EXPR's, that will
3229 be detected in the recursive calls below. */
3230 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3231 && (TREE_CODE (arg0) == SAVE_EXPR
3232 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3235 /* Next handle constant cases, those for which we can return 1 even
3236 if ONLY_CONST is set. */
3237 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3238 switch (TREE_CODE (arg0))
3241 return tree_int_cst_equal (arg0, arg1);
3244 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3245 TREE_FIXED_CST (arg1));
3248 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3249 TREE_REAL_CST (arg1)))
3253 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3255 /* If we do not distinguish between signed and unsigned zero,
3256 consider them equal. */
3257 if (real_zerop (arg0) && real_zerop (arg1))
3266 v1 = TREE_VECTOR_CST_ELTS (arg0);
3267 v2 = TREE_VECTOR_CST_ELTS (arg1);
3270 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3273 v1 = TREE_CHAIN (v1);
3274 v2 = TREE_CHAIN (v2);
3281 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3283 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3287 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3288 && ! memcmp (TREE_STRING_POINTER (arg0),
3289 TREE_STRING_POINTER (arg1),
3290 TREE_STRING_LENGTH (arg0)));
3293 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3299 if (flags & OEP_ONLY_CONST)
3302 /* Define macros to test an operand from arg0 and arg1 for equality and a
3303 variant that allows null and views null as being different from any
3304 non-null value. In the latter case, if either is null, the both
3305 must be; otherwise, do the normal comparison. */
3306 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3307 TREE_OPERAND (arg1, N), flags)
3309 #define OP_SAME_WITH_NULL(N) \
3310 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3311 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3313 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3316 /* Two conversions are equal only if signedness and modes match. */
3317 switch (TREE_CODE (arg0))
3320 case FIX_TRUNC_EXPR:
3321 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3322 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3332 case tcc_comparison:
3334 if (OP_SAME (0) && OP_SAME (1))
3337 /* For commutative ops, allow the other order. */
3338 return (commutative_tree_code (TREE_CODE (arg0))
3339 && operand_equal_p (TREE_OPERAND (arg0, 0),
3340 TREE_OPERAND (arg1, 1), flags)
3341 && operand_equal_p (TREE_OPERAND (arg0, 1),
3342 TREE_OPERAND (arg1, 0), flags));
3345 /* If either of the pointer (or reference) expressions we are
3346 dereferencing contain a side effect, these cannot be equal. */
3347 if (TREE_SIDE_EFFECTS (arg0)
3348 || TREE_SIDE_EFFECTS (arg1))
3351 switch (TREE_CODE (arg0))
3354 case ALIGN_INDIRECT_REF:
3355 case MISALIGNED_INDIRECT_REF:
3361 case ARRAY_RANGE_REF:
3362 /* Operands 2 and 3 may be null.
3363 Compare the array index by value if it is constant first as we
3364 may have different types but same value here. */
3366 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3367 TREE_OPERAND (arg1, 1))
3369 && OP_SAME_WITH_NULL (2)
3370 && OP_SAME_WITH_NULL (3));
3373 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3374 may be NULL when we're called to compare MEM_EXPRs. */
3375 return OP_SAME_WITH_NULL (0)
3377 && OP_SAME_WITH_NULL (2);
3380 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3386 case tcc_expression:
3387 switch (TREE_CODE (arg0))
3390 case TRUTH_NOT_EXPR:
3393 case TRUTH_ANDIF_EXPR:
3394 case TRUTH_ORIF_EXPR:
3395 return OP_SAME (0) && OP_SAME (1);
3397 case TRUTH_AND_EXPR:
3399 case TRUTH_XOR_EXPR:
3400 if (OP_SAME (0) && OP_SAME (1))
3403 /* Otherwise take into account this is a commutative operation. */
3404 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3405 TREE_OPERAND (arg1, 1), flags)
3406 && operand_equal_p (TREE_OPERAND (arg0, 1),
3407 TREE_OPERAND (arg1, 0), flags));
3410 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3417 switch (TREE_CODE (arg0))
3420 /* If the CALL_EXPRs call different functions, then they
3421 clearly can not be equal. */
3422 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3427 unsigned int cef = call_expr_flags (arg0);
3428 if (flags & OEP_PURE_SAME)
3429 cef &= ECF_CONST | ECF_PURE;
3436 /* Now see if all the arguments are the same. */
3438 const_call_expr_arg_iterator iter0, iter1;
3440 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3441 a1 = first_const_call_expr_arg (arg1, &iter1);
3443 a0 = next_const_call_expr_arg (&iter0),
3444 a1 = next_const_call_expr_arg (&iter1))
3445 if (! operand_equal_p (a0, a1, flags))
3448 /* If we get here and both argument lists are exhausted
3449 then the CALL_EXPRs are equal. */
3450 return ! (a0 || a1);
3456 case tcc_declaration:
3457 /* Consider __builtin_sqrt equal to sqrt. */
3458 return (TREE_CODE (arg0) == FUNCTION_DECL
3459 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3460 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3461 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3468 #undef OP_SAME_WITH_NULL
3471 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3472 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3474 When in doubt, return 0. */
3477 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3479 int unsignedp1, unsignedpo;
3480 tree primarg0, primarg1, primother;
3481 unsigned int correct_width;
3483 if (operand_equal_p (arg0, arg1, 0))
3486 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3487 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3490 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3491 and see if the inner values are the same. This removes any
3492 signedness comparison, which doesn't matter here. */
3493 primarg0 = arg0, primarg1 = arg1;
3494 STRIP_NOPS (primarg0);
3495 STRIP_NOPS (primarg1);
3496 if (operand_equal_p (primarg0, primarg1, 0))
3499 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3500 actual comparison operand, ARG0.
3502 First throw away any conversions to wider types
3503 already present in the operands. */
3505 primarg1 = get_narrower (arg1, &unsignedp1);
3506 primother = get_narrower (other, &unsignedpo);
3508 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3509 if (unsignedp1 == unsignedpo
3510 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3511 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3513 tree type = TREE_TYPE (arg0);
3515 /* Make sure shorter operand is extended the right way
3516 to match the longer operand. */
3517 primarg1 = fold_convert (signed_or_unsigned_type_for
3518 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3520 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3527 /* See if ARG is an expression that is either a comparison or is performing
3528 arithmetic on comparisons. The comparisons must only be comparing
3529 two different values, which will be stored in *CVAL1 and *CVAL2; if
3530 they are nonzero it means that some operands have already been found.
3531 No variables may be used anywhere else in the expression except in the
3532 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3533 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3535 If this is true, return 1. Otherwise, return zero. */
3538 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3540 enum tree_code code = TREE_CODE (arg);
3541 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3543 /* We can handle some of the tcc_expression cases here. */
3544 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3546 else if (tclass == tcc_expression
3547 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3548 || code == COMPOUND_EXPR))
3549 tclass = tcc_binary;
3551 else if (tclass == tcc_expression && code == SAVE_EXPR
3552 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3554 /* If we've already found a CVAL1 or CVAL2, this expression is
3555 two complex to handle. */
3556 if (*cval1 || *cval2)
3566 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3569 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3570 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3571 cval1, cval2, save_p));
3576 case tcc_expression:
3577 if (code == COND_EXPR)
3578 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3579 cval1, cval2, save_p)
3580 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3581 cval1, cval2, save_p)
3582 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3583 cval1, cval2, save_p));
3586 case tcc_comparison:
3587 /* First see if we can handle the first operand, then the second. For
3588 the second operand, we know *CVAL1 can't be zero. It must be that
3589 one side of the comparison is each of the values; test for the
3590 case where this isn't true by failing if the two operands
3593 if (operand_equal_p (TREE_OPERAND (arg, 0),
3594 TREE_OPERAND (arg, 1), 0))
3598 *cval1 = TREE_OPERAND (arg, 0);
3599 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3601 else if (*cval2 == 0)
3602 *cval2 = TREE_OPERAND (arg, 0);
3603 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3608 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3610 else if (*cval2 == 0)
3611 *cval2 = TREE_OPERAND (arg, 1);
3612 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3624 /* ARG is a tree that is known to contain just arithmetic operations and
3625 comparisons. Evaluate the operations in the tree substituting NEW0 for
3626 any occurrence of OLD0 as an operand of a comparison and likewise for
3630 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3631 tree old1, tree new1)
3633 tree type = TREE_TYPE (arg);
3634 enum tree_code code = TREE_CODE (arg);
3635 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3637 /* We can handle some of the tcc_expression cases here. */
3638 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3640 else if (tclass == tcc_expression
3641 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3642 tclass = tcc_binary;
3647 return fold_build1_loc (loc, code, type,
3648 eval_subst (loc, TREE_OPERAND (arg, 0),
3649 old0, new0, old1, new1));
3652 return fold_build2_loc (loc, code, type,
3653 eval_subst (loc, TREE_OPERAND (arg, 0),
3654 old0, new0, old1, new1),
3655 eval_subst (loc, TREE_OPERAND (arg, 1),
3656 old0, new0, old1, new1));
3658 case tcc_expression:
3662 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3666 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3670 return fold_build3_loc (loc, code, type,
3671 eval_subst (loc, TREE_OPERAND (arg, 0),
3672 old0, new0, old1, new1),
3673 eval_subst (loc, TREE_OPERAND (arg, 1),
3674 old0, new0, old1, new1),
3675 eval_subst (loc, TREE_OPERAND (arg, 2),
3676 old0, new0, old1, new1));
3680 /* Fall through - ??? */
3682 case tcc_comparison:
3684 tree arg0 = TREE_OPERAND (arg, 0);
3685 tree arg1 = TREE_OPERAND (arg, 1);
3687 /* We need to check both for exact equality and tree equality. The
3688 former will be true if the operand has a side-effect. In that
3689 case, we know the operand occurred exactly once. */
3691 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3693 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3696 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3698 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3701 return fold_build2_loc (loc, code, type, arg0, arg1);
3709 /* Return a tree for the case when the result of an expression is RESULT
3710 converted to TYPE and OMITTED was previously an operand of the expression
3711 but is now not needed (e.g., we folded OMITTED * 0).
3713 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3714 the conversion of RESULT to TYPE. */
3717 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3719 tree t = fold_convert_loc (loc, type, result);
3721 /* If the resulting operand is an empty statement, just return the omitted
3722 statement casted to void. */
3723 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3725 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3726 goto omit_one_operand_exit;
3729 if (TREE_SIDE_EFFECTS (omitted))
3731 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3732 goto omit_one_operand_exit;
3735 return non_lvalue_loc (loc, t);
3737 omit_one_operand_exit:
3738 protected_set_expr_location (t, loc);
3742 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3745 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3748 tree t = fold_convert_loc (loc, type, result);
3750 /* If the resulting operand is an empty statement, just return the omitted
3751 statement casted to void. */
3752 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3754 t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3755 goto pedantic_omit_one_operand_exit;
3758 if (TREE_SIDE_EFFECTS (omitted))
3760 t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3761 goto pedantic_omit_one_operand_exit;
3764 return pedantic_non_lvalue_loc (loc, t);
3766 pedantic_omit_one_operand_exit:
3767 protected_set_expr_location (t, loc);
3771 /* Return a tree for the case when the result of an expression is RESULT
3772 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3773 of the expression but are now not needed.
3775 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3776 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3777 evaluated before OMITTED2. Otherwise, if neither has side effects,
3778 just do the conversion of RESULT to TYPE. */
3781 omit_two_operands_loc (location_t loc, tree type, tree result,
3782 tree omitted1, tree omitted2)
3784 tree t = fold_convert_loc (loc, type, result);
3786 if (TREE_SIDE_EFFECTS (omitted2))
3788 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3789 SET_EXPR_LOCATION (t, loc);
3791 if (TREE_SIDE_EFFECTS (omitted1))
3793 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3794 SET_EXPR_LOCATION (t, loc);
3797 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3801 /* Return a simplified tree node for the truth-negation of ARG. This
3802 never alters ARG itself. We assume that ARG is an operation that
3803 returns a truth value (0 or 1).
3805 FIXME: one would think we would fold the result, but it causes
3806 problems with the dominator optimizer. */
3809 fold_truth_not_expr (location_t loc, tree arg)
3811 tree t, type = TREE_TYPE (arg);
3812 enum tree_code code = TREE_CODE (arg);
3813 location_t loc1, loc2;
3815 /* If this is a comparison, we can simply invert it, except for
3816 floating-point non-equality comparisons, in which case we just
3817 enclose a TRUTH_NOT_EXPR around what we have. */
3819 if (TREE_CODE_CLASS (code) == tcc_comparison)
3821 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3822 if (FLOAT_TYPE_P (op_type)
3823 && flag_trapping_math
3824 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3825 && code != NE_EXPR && code != EQ_EXPR)
3828 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3829 if (code == ERROR_MARK)
3832 t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3833 SET_EXPR_LOCATION (t, loc);
3840 return constant_boolean_node (integer_zerop (arg), type);
3842 case TRUTH_AND_EXPR:
3843 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3844 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3845 if (loc1 == UNKNOWN_LOCATION)
3847 if (loc2 == UNKNOWN_LOCATION)
3849 t = build2 (TRUTH_OR_EXPR, type,
3850 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3851 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3855 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3856 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3857 if (loc1 == UNKNOWN_LOCATION)
3859 if (loc2 == UNKNOWN_LOCATION)
3861 t = build2 (TRUTH_AND_EXPR, type,
3862 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3863 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3866 case TRUTH_XOR_EXPR:
3867 /* Here we can invert either operand. We invert the first operand
3868 unless the second operand is a TRUTH_NOT_EXPR in which case our
3869 result is the XOR of the first operand with the inside of the
3870 negation of the second operand. */
3872 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3873 t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3874 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3876 t = build2 (TRUTH_XOR_EXPR, type,
3877 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3878 TREE_OPERAND (arg, 1));
3881 case TRUTH_ANDIF_EXPR:
3882 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3883 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3884 if (loc1 == UNKNOWN_LOCATION)
3886 if (loc2 == UNKNOWN_LOCATION)
3888 t = build2 (TRUTH_ORIF_EXPR, type,
3889 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3890 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3893 case TRUTH_ORIF_EXPR:
3894 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3895 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3896 if (loc1 == UNKNOWN_LOCATION)
3898 if (loc2 == UNKNOWN_LOCATION)
3900 t = build2 (TRUTH_ANDIF_EXPR, type,
3901 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3902 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3905 case TRUTH_NOT_EXPR:
3906 return TREE_OPERAND (arg, 0);
3910 tree arg1 = TREE_OPERAND (arg, 1);
3911 tree arg2 = TREE_OPERAND (arg, 2);
3913 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3914 loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3915 if (loc1 == UNKNOWN_LOCATION)
3917 if (loc2 == UNKNOWN_LOCATION)
3920 /* A COND_EXPR may have a throw as one operand, which
3921 then has void type. Just leave void operands
3923 t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3924 VOID_TYPE_P (TREE_TYPE (arg1))
3925 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3926 VOID_TYPE_P (TREE_TYPE (arg2))
3927 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3932 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3933 if (loc1 == UNKNOWN_LOCATION)
3935 t = build2 (COMPOUND_EXPR, type,
3936 TREE_OPERAND (arg, 0),
3937 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3940 case NON_LVALUE_EXPR:
3941 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3942 if (loc1 == UNKNOWN_LOCATION)
3944 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3947 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3949 t = build1 (TRUTH_NOT_EXPR, type, arg);
3953 /* ... fall through ... */
3956 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3957 if (loc1 == UNKNOWN_LOCATION)
3959 t = build1 (TREE_CODE (arg), type,
3960 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3964 if (!integer_onep (TREE_OPERAND (arg, 1)))
3966 t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3970 t = build1 (TRUTH_NOT_EXPR, type, arg);
3973 case CLEANUP_POINT_EXPR:
3974 loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3975 if (loc1 == UNKNOWN_LOCATION)
3977 t = build1 (CLEANUP_POINT_EXPR, type,
3978 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3987 SET_EXPR_LOCATION (t, loc);
3992 /* Return a simplified tree node for the truth-negation of ARG. This
3993 never alters ARG itself. We assume that ARG is an operation that
3994 returns a truth value (0 or 1).
3996 FIXME: one would think we would fold the result, but it causes
3997 problems with the dominator optimizer. */
4000 invert_truthvalue_loc (location_t loc, tree arg)
4004 if (TREE_CODE (arg) == ERROR_MARK)
4007 tem = fold_truth_not_expr (loc, arg);
4010 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4011 SET_EXPR_LOCATION (tem, loc);
4017 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4018 operands are another bit-wise operation with a common input. If so,
4019 distribute the bit operations to save an operation and possibly two if
4020 constants are involved. For example, convert
4021 (A | B) & (A | C) into A | (B & C)
4022 Further simplification will occur if B and C are constants.
4024 If this optimization cannot be done, 0 will be returned. */
4027 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4028 tree arg0, tree arg1)
4033 if (TREE_CODE (arg0) != TREE_CODE (arg1)
4034 || TREE_CODE (arg0) == code
4035 || (TREE_CODE (arg0) != BIT_AND_EXPR
4036 && TREE_CODE (arg0) != BIT_IOR_EXPR))
4039 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4041 common = TREE_OPERAND (arg0, 0);
4042 left = TREE_OPERAND (arg0, 1);
4043 right = TREE_OPERAND (arg1, 1);
4045 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4047 common = TREE_OPERAND (arg0, 0);
4048 left = TREE_OPERAND (arg0, 1);
4049 right = TREE_OPERAND (arg1, 0);
4051 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4053 common = TREE_OPERAND (arg0, 1);
4054 left = TREE_OPERAND (arg0, 0);
4055 right = TREE_OPERAND (arg1, 1);
4057 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4059 common = TREE_OPERAND (arg0, 1);
4060 left = TREE_OPERAND (arg0, 0);
4061 right = TREE_OPERAND (arg1, 0);
4066 common = fold_convert_loc (loc, type, common);
4067 left = fold_convert_loc (loc, type, left);
4068 right = fold_convert_loc (loc, type, right);
4069 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4070 fold_build2_loc (loc, code, type, left, right));
4073 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4074 with code CODE. This optimization is unsafe. */
4076 distribute_real_division (location_t loc, enum tree_code code, tree type,
4077 tree arg0, tree arg1)
4079 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4080 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4082 /* (A / C) +- (B / C) -> (A +- B) / C. */
4084 && operand_equal_p (TREE_OPERAND (arg0, 1),
4085 TREE_OPERAND (arg1, 1), 0))
4086 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4087 fold_build2_loc (loc, code, type,
4088 TREE_OPERAND (arg0, 0),
4089 TREE_OPERAND (arg1, 0)),
4090 TREE_OPERAND (arg0, 1));
4092 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
4093 if (operand_equal_p (TREE_OPERAND (arg0, 0),
4094 TREE_OPERAND (arg1, 0), 0)
4095 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4096 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4098 REAL_VALUE_TYPE r0, r1;
4099 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4100 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4102 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4104 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4105 real_arithmetic (&r0, code, &r0, &r1);
4106 return fold_build2_loc (loc, MULT_EXPR, type,
4107 TREE_OPERAND (arg0, 0),
4108 build_real (type, r0));
4114 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4115 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
4118 make_bit_field_ref (location_t loc, tree inner, tree type,
4119 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4121 tree result, bftype;
4125 tree size = TYPE_SIZE (TREE_TYPE (inner));
4126 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4127 || POINTER_TYPE_P (TREE_TYPE (inner)))
4128 && host_integerp (size, 0)
4129 && tree_low_cst (size, 0) == bitsize)
4130 return fold_convert_loc (loc, type, inner);
4134 if (TYPE_PRECISION (bftype) != bitsize
4135 || TYPE_UNSIGNED (bftype) == !unsignedp)
4136 bftype = build_nonstandard_integer_type (bitsize, 0);
4138 result = build3 (BIT_FIELD_REF, bftype, inner,
4139 size_int (bitsize), bitsize_int (bitpos));
4140 SET_EXPR_LOCATION (result, loc);
4143 result = fold_convert_loc (loc, type, result);
4148 /* Optimize a bit-field compare.
4150 There are two cases: First is a compare against a constant and the
4151 second is a comparison of two items where the fields are at the same
4152 bit position relative to the start of a chunk (byte, halfword, word)
4153 large enough to contain it. In these cases we can avoid the shift
4154 implicit in bitfield extractions.
4156 For constants, we emit a compare of the shifted constant with the
4157 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4158 compared. For two fields at the same position, we do the ANDs with the
4159 similar mask and compare the result of the ANDs.
4161 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4162 COMPARE_TYPE is the type of the comparison, and LHS and RHS
4163 are the left and right operands of the comparison, respectively.
4165 If the optimization described above can be done, we return the resulting
4166 tree. Otherwise we return zero. */
4169 optimize_bit_field_compare (location_t loc, enum tree_code code,
4170 tree compare_type, tree lhs, tree rhs)
4172 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4173 tree type = TREE_TYPE (lhs);
4174 tree signed_type, unsigned_type;
4175 int const_p = TREE_CODE (rhs) == INTEGER_CST;
4176 enum machine_mode lmode, rmode, nmode;
4177 int lunsignedp, runsignedp;
4178 int lvolatilep = 0, rvolatilep = 0;
4179 tree linner, rinner = NULL_TREE;
4183 /* Get all the information about the extractions being done. If the bit size
4184 if the same as the size of the underlying object, we aren't doing an
4185 extraction at all and so can do nothing. We also don't want to
4186 do anything if the inner expression is a PLACEHOLDER_EXPR since we
4187 then will no longer be able to replace it. */
4188 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4189 &lunsignedp, &lvolatilep, false);
4190 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4191 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4196 /* If this is not a constant, we can only do something if bit positions,
4197 sizes, and signedness are the same. */
4198 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4199 &runsignedp, &rvolatilep, false);
4201 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4202 || lunsignedp != runsignedp || offset != 0
4203 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4207 /* See if we can find a mode to refer to this field. We should be able to,
4208 but fail if we can't. */
4209 nmode = get_best_mode (lbitsize, lbitpos,
4210 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4211 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4212 TYPE_ALIGN (TREE_TYPE (rinner))),
4213 word_mode, lvolatilep || rvolatilep);
4214 if (nmode == VOIDmode)
4217 /* Set signed and unsigned types of the precision of this mode for the
4219 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4220 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4222 /* Compute the bit position and size for the new reference and our offset
4223 within it. If the new reference is the same size as the original, we
4224 won't optimize anything, so return zero. */
4225 nbitsize = GET_MODE_BITSIZE (nmode);
4226 nbitpos = lbitpos & ~ (nbitsize - 1);
4228 if (nbitsize == lbitsize)
4231 if (BYTES_BIG_ENDIAN)
4232 lbitpos = nbitsize - lbitsize - lbitpos;
4234 /* Make the mask to be used against the extracted field. */
4235 mask = build_int_cst_type (unsigned_type, -1);
4236 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4237 mask = const_binop (RSHIFT_EXPR, mask,
4238 size_int (nbitsize - lbitsize - lbitpos), 0);
4241 /* If not comparing with constant, just rework the comparison
4243 return fold_build2_loc (loc, code, compare_type,
4244 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4245 make_bit_field_ref (loc, linner,
4250 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4251 make_bit_field_ref (loc, rinner,
4257 /* Otherwise, we are handling the constant case. See if the constant is too
4258 big for the field. Warn and return a tree of for 0 (false) if so. We do
4259 this not only for its own sake, but to avoid having to test for this
4260 error case below. If we didn't, we might generate wrong code.
4262 For unsigned fields, the constant shifted right by the field length should
4263 be all zero. For signed fields, the high-order bits should agree with
4268 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4269 fold_convert_loc (loc,
4270 unsigned_type, rhs),
4271 size_int (lbitsize), 0)))
4273 warning (0, "comparison is always %d due to width of bit-field",
4275 return constant_boolean_node (code == NE_EXPR, compare_type);
4280 tree tem = const_binop (RSHIFT_EXPR,
4281 fold_convert_loc (loc, signed_type, rhs),
4282 size_int (lbitsize - 1), 0);
4283 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4285 warning (0, "comparison is always %d due to width of bit-field",
4287 return constant_boolean_node (code == NE_EXPR, compare_type);
4291 /* Single-bit compares should always be against zero. */
4292 if (lbitsize == 1 && ! integer_zerop (rhs))
4294 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4295 rhs = build_int_cst (type, 0);
4298 /* Make a new bitfield reference, shift the constant over the
4299 appropriate number of bits and mask it with the computed mask
4300 (in case this was a signed field). If we changed it, make a new one. */
4301 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4304 TREE_SIDE_EFFECTS (lhs) = 1;
4305 TREE_THIS_VOLATILE (lhs) = 1;
4308 rhs = const_binop (BIT_AND_EXPR,
4309 const_binop (LSHIFT_EXPR,
4310 fold_convert_loc (loc, unsigned_type, rhs),
4311 size_int (lbitpos), 0),
4314 lhs = build2 (code, compare_type,
4315 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4317 SET_EXPR_LOCATION (lhs, loc);
4321 /* Subroutine for fold_truthop: decode a field reference.
4323 If EXP is a comparison reference, we return the innermost reference.
4325 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4326 set to the starting bit number.
4328 If the innermost field can be completely contained in a mode-sized
4329 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4331 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4332 otherwise it is not changed.
4334 *PUNSIGNEDP is set to the signedness of the field.
4336 *PMASK is set to the mask used. This is either contained in a
4337 BIT_AND_EXPR or derived from the width of the field.
4339 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4341 Return 0 if this is not a component reference or is one that we can't
4342 do anything with. */
4345 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4346 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4347 int *punsignedp, int *pvolatilep,
4348 tree *pmask, tree *pand_mask)
4350 tree outer_type = 0;
4352 tree mask, inner, offset;
4354 unsigned int precision;
4356 /* All the optimizations using this function assume integer fields.
4357 There are problems with FP fields since the type_for_size call
4358 below can fail for, e.g., XFmode. */
4359 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4362 /* We are interested in the bare arrangement of bits, so strip everything
4363 that doesn't affect the machine mode. However, record the type of the
4364 outermost expression if it may matter below. */
4365 if (CONVERT_EXPR_P (exp)
4366 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4367 outer_type = TREE_TYPE (exp);
4370 if (TREE_CODE (exp) == BIT_AND_EXPR)
4372 and_mask = TREE_OPERAND (exp, 1);
4373 exp = TREE_OPERAND (exp, 0);
4374 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4375 if (TREE_CODE (and_mask) != INTEGER_CST)
4379 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4380 punsignedp, pvolatilep, false);
4381 if ((inner == exp && and_mask == 0)
4382 || *pbitsize < 0 || offset != 0
4383 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4386 /* If the number of bits in the reference is the same as the bitsize of
4387 the outer type, then the outer type gives the signedness. Otherwise
4388 (in case of a small bitfield) the signedness is unchanged. */
4389 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4390 *punsignedp = TYPE_UNSIGNED (outer_type);
4392 /* Compute the mask to access the bitfield. */
4393 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4394 precision = TYPE_PRECISION (unsigned_type);
4396 mask = build_int_cst_type (unsigned_type, -1);
4398 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4399 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4401 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4403 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4404 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4407 *pand_mask = and_mask;
4411 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4415 all_ones_mask_p (const_tree mask, int size)
4417 tree type = TREE_TYPE (mask);
4418 unsigned int precision = TYPE_PRECISION (type);
4421 tmask = build_int_cst_type (signed_type_for (type), -1);
4424 tree_int_cst_equal (mask,
4425 const_binop (RSHIFT_EXPR,
4426 const_binop (LSHIFT_EXPR, tmask,
4427 size_int (precision - size),
4429 size_int (precision - size), 0));
4432 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4433 represents the sign bit of EXP's type. If EXP represents a sign
4434 or zero extension, also test VAL against the unextended type.
4435 The return value is the (sub)expression whose sign bit is VAL,
4436 or NULL_TREE otherwise. */
4439 sign_bit_p (tree exp, const_tree val)
4441 unsigned HOST_WIDE_INT mask_lo, lo;
4442 HOST_WIDE_INT mask_hi, hi;
4446 /* Tree EXP must have an integral type. */
4447 t = TREE_TYPE (exp);
4448 if (! INTEGRAL_TYPE_P (t))
4451 /* Tree VAL must be an integer constant. */
4452 if (TREE_CODE (val) != INTEGER_CST
4453 || TREE_OVERFLOW (val))
4456 width = TYPE_PRECISION (t);
4457 if (width > HOST_BITS_PER_WIDE_INT)
4459 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4462 mask_hi = ((unsigned HOST_WIDE_INT) -1
4463 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4469 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4472 mask_lo = ((unsigned HOST_WIDE_INT) -1
4473 >> (HOST_BITS_PER_WIDE_INT - width));
4476 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4477 treat VAL as if it were unsigned. */
4478 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4479 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4482 /* Handle extension from a narrower type. */
4483 if (TREE_CODE (exp) == NOP_EXPR
4484 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4485 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4490 /* Subroutine for fold_truthop: determine if an operand is simple enough
4491 to be evaluated unconditionally. */
4494 simple_operand_p (const_tree exp)
4496 /* Strip any conversions that don't change the machine mode. */
4499 return (CONSTANT_CLASS_P (exp)
4500 || TREE_CODE (exp) == SSA_NAME
4502 && ! TREE_ADDRESSABLE (exp)
4503 && ! TREE_THIS_VOLATILE (exp)
4504 && ! DECL_NONLOCAL (exp)
4505 /* Don't regard global variables as simple. They may be
4506 allocated in ways unknown to the compiler (shared memory,
4507 #pragma weak, etc). */
4508 && ! TREE_PUBLIC (exp)
4509 && ! DECL_EXTERNAL (exp)
4510 /* Loading a static variable is unduly expensive, but global
4511 registers aren't expensive. */
4512 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4515 /* The following functions are subroutines to fold_range_test and allow it to
4516 try to change a logical combination of comparisons into a range test.
4519 X == 2 || X == 3 || X == 4 || X == 5
4523 (unsigned) (X - 2) <= 3
4525 We describe each set of comparisons as being either inside or outside
4526 a range, using a variable named like IN_P, and then describe the
4527 range with a lower and upper bound. If one of the bounds is omitted,
4528 it represents either the highest or lowest value of the type.
4530 In the comments below, we represent a range by two numbers in brackets
4531 preceded by a "+" to designate being inside that range, or a "-" to
4532 designate being outside that range, so the condition can be inverted by
4533 flipping the prefix. An omitted bound is represented by a "-". For
4534 example, "- [-, 10]" means being outside the range starting at the lowest
4535 possible value and ending at 10, in other words, being greater than 10.
4536 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4539 We set up things so that the missing bounds are handled in a consistent
4540 manner so neither a missing bound nor "true" and "false" need to be
4541 handled using a special case. */
4543 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4544 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4545 and UPPER1_P are nonzero if the respective argument is an upper bound
4546 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4547 must be specified for a comparison. ARG1 will be converted to ARG0's
4548 type if both are specified. */
4551 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4552 tree arg1, int upper1_p)
4558 /* If neither arg represents infinity, do the normal operation.
4559 Else, if not a comparison, return infinity. Else handle the special
4560 comparison rules. Note that most of the cases below won't occur, but
4561 are handled for consistency. */
4563 if (arg0 != 0 && arg1 != 0)
4565 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4566 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4568 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4571 if (TREE_CODE_CLASS (code) != tcc_comparison)
4574 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4575 for neither. In real maths, we cannot assume open ended ranges are
4576 the same. But, this is computer arithmetic, where numbers are finite.
4577 We can therefore make the transformation of any unbounded range with
4578 the value Z, Z being greater than any representable number. This permits
4579 us to treat unbounded ranges as equal. */
4580 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4581 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4585 result = sgn0 == sgn1;
4588 result = sgn0 != sgn1;
4591 result = sgn0 < sgn1;
4594 result = sgn0 <= sgn1;
4597 result = sgn0 > sgn1;
4600 result = sgn0 >= sgn1;
4606 return constant_boolean_node (result, type);
4609 /* Given EXP, a logical expression, set the range it is testing into
4610 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4611 actually being tested. *PLOW and *PHIGH will be made of the same
4612 type as the returned expression. If EXP is not a comparison, we
4613 will most likely not be returning a useful value and range. Set
4614 *STRICT_OVERFLOW_P to true if the return value is only valid
4615 because signed overflow is undefined; otherwise, do not change
4616 *STRICT_OVERFLOW_P. */
4619 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4620 bool *strict_overflow_p)
4622 enum tree_code code;
4623 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4624 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4626 tree low, high, n_low, n_high;
4627 location_t loc = EXPR_LOCATION (exp);
4629 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4630 and see if we can refine the range. Some of the cases below may not
4631 happen, but it doesn't seem worth worrying about this. We "continue"
4632 the outer loop when we've changed something; otherwise we "break"
4633 the switch, which will "break" the while. */
4636 low = high = build_int_cst (TREE_TYPE (exp), 0);
4640 code = TREE_CODE (exp);
4641 exp_type = TREE_TYPE (exp);
4643 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4645 if (TREE_OPERAND_LENGTH (exp) > 0)
4646 arg0 = TREE_OPERAND (exp, 0);
4647 if (TREE_CODE_CLASS (code) == tcc_comparison
4648 || TREE_CODE_CLASS (code) == tcc_unary
4649 || TREE_CODE_CLASS (code) == tcc_binary)
4650 arg0_type = TREE_TYPE (arg0);
4651 if (TREE_CODE_CLASS (code) == tcc_binary
4652 || TREE_CODE_CLASS (code) == tcc_comparison
4653 || (TREE_CODE_CLASS (code) == tcc_expression
4654 && TREE_OPERAND_LENGTH (exp) > 1))
4655 arg1 = TREE_OPERAND (exp, 1);
4660 case TRUTH_NOT_EXPR:
4661 in_p = ! in_p, exp = arg0;
4664 case EQ_EXPR: case NE_EXPR:
4665 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4666 /* We can only do something if the range is testing for zero
4667 and if the second operand is an integer constant. Note that
4668 saying something is "in" the range we make is done by
4669 complementing IN_P since it will set in the initial case of
4670 being not equal to zero; "out" is leaving it alone. */
4671 if (low == 0 || high == 0
4672 || ! integer_zerop (low) || ! integer_zerop (high)
4673 || TREE_CODE (arg1) != INTEGER_CST)
4678 case NE_EXPR: /* - [c, c] */
4681 case EQ_EXPR: /* + [c, c] */
4682 in_p = ! in_p, low = high = arg1;
4684 case GT_EXPR: /* - [-, c] */
4685 low = 0, high = arg1;
4687 case GE_EXPR: /* + [c, -] */
4688 in_p = ! in_p, low = arg1, high = 0;
4690 case LT_EXPR: /* - [c, -] */
4691 low = arg1, high = 0;
4693 case LE_EXPR: /* + [-, c] */
4694 in_p = ! in_p, low = 0, high = arg1;
4700 /* If this is an unsigned comparison, we also know that EXP is
4701 greater than or equal to zero. We base the range tests we make
4702 on that fact, so we record it here so we can parse existing
4703 range tests. We test arg0_type since often the return type
4704 of, e.g. EQ_EXPR, is boolean. */
4705 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4707 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4709 build_int_cst (arg0_type, 0),
4713 in_p = n_in_p, low = n_low, high = n_high;
4715 /* If the high bound is missing, but we have a nonzero low
4716 bound, reverse the range so it goes from zero to the low bound
4718 if (high == 0 && low && ! integer_zerop (low))
4721 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4722 integer_one_node, 0);
4723 low = build_int_cst (arg0_type, 0);
4731 /* (-x) IN [a,b] -> x in [-b, -a] */
4732 n_low = range_binop (MINUS_EXPR, exp_type,
4733 build_int_cst (exp_type, 0),
4735 n_high = range_binop (MINUS_EXPR, exp_type,
4736 build_int_cst (exp_type, 0),
4738 low = n_low, high = n_high;
4744 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4745 build_int_cst (exp_type, 1));
4746 SET_EXPR_LOCATION (exp, loc);
4749 case PLUS_EXPR: case MINUS_EXPR:
4750 if (TREE_CODE (arg1) != INTEGER_CST)
4753 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4754 move a constant to the other side. */
4755 if (!TYPE_UNSIGNED (arg0_type)
4756 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4759 /* If EXP is signed, any overflow in the computation is undefined,
4760 so we don't worry about it so long as our computations on
4761 the bounds don't overflow. For unsigned, overflow is defined
4762 and this is exactly the right thing. */
4763 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4764 arg0_type, low, 0, arg1, 0);
4765 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4766 arg0_type, high, 1, arg1, 0);
4767 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4768 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4771 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4772 *strict_overflow_p = true;
4774 /* Check for an unsigned range which has wrapped around the maximum
4775 value thus making n_high < n_low, and normalize it. */
4776 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4778 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4779 integer_one_node, 0);
4780 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4781 integer_one_node, 0);
4783 /* If the range is of the form +/- [ x+1, x ], we won't
4784 be able to normalize it. But then, it represents the
4785 whole range or the empty set, so make it
4787 if (tree_int_cst_equal (n_low, low)
4788 && tree_int_cst_equal (n_high, high))
4794 low = n_low, high = n_high;
4799 CASE_CONVERT: case NON_LVALUE_EXPR:
4800 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4803 if (! INTEGRAL_TYPE_P (arg0_type)
4804 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4805 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4808 n_low = low, n_high = high;
4811 n_low = fold_convert_loc (loc, arg0_type, n_low);
4814 n_high = fold_convert_loc (loc, arg0_type, n_high);
4817 /* If we're converting arg0 from an unsigned type, to exp,
4818 a signed type, we will be doing the comparison as unsigned.
4819 The tests above have already verified that LOW and HIGH
4822 So we have to ensure that we will handle large unsigned
4823 values the same way that the current signed bounds treat
4826 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4830 /* For fixed-point modes, we need to pass the saturating flag
4831 as the 2nd parameter. */
4832 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4833 equiv_type = lang_hooks.types.type_for_mode
4834 (TYPE_MODE (arg0_type),
4835 TYPE_SATURATING (arg0_type));
4837 equiv_type = lang_hooks.types.type_for_mode
4838 (TYPE_MODE (arg0_type), 1);
4840 /* A range without an upper bound is, naturally, unbounded.
4841 Since convert would have cropped a very large value, use
4842 the max value for the destination type. */
4844 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4845 : TYPE_MAX_VALUE (arg0_type);
4847 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4848 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4849 fold_convert_loc (loc, arg0_type,
4851 build_int_cst (arg0_type, 1));
4853 /* If the low bound is specified, "and" the range with the
4854 range for which the original unsigned value will be
4858 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4859 1, n_low, n_high, 1,
4860 fold_convert_loc (loc, arg0_type,
4865 in_p = (n_in_p == in_p);
4869 /* Otherwise, "or" the range with the range of the input
4870 that will be interpreted as negative. */
4871 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4872 0, n_low, n_high, 1,
4873 fold_convert_loc (loc, arg0_type,
4878 in_p = (in_p != n_in_p);
4883 low = n_low, high = n_high;
4893 /* If EXP is a constant, we can evaluate whether this is true or false. */
4894 if (TREE_CODE (exp) == INTEGER_CST)
4896 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4898 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4904 *pin_p = in_p, *plow = low, *phigh = high;
4908 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4909 type, TYPE, return an expression to test if EXP is in (or out of, depending
4910 on IN_P) the range. Return 0 if the test couldn't be created. */
4913 build_range_check (location_t loc, tree type, tree exp, int in_p,
4914 tree low, tree high)
4916 tree etype = TREE_TYPE (exp), value;
4918 #ifdef HAVE_canonicalize_funcptr_for_compare
4919 /* Disable this optimization for function pointer expressions
4920 on targets that require function pointer canonicalization. */
4921 if (HAVE_canonicalize_funcptr_for_compare
4922 && TREE_CODE (etype) == POINTER_TYPE
4923 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4929 value = build_range_check (loc, type, exp, 1, low, high);
4931 return invert_truthvalue_loc (loc, value);
4936 if (low == 0 && high == 0)
4937 return build_int_cst (type, 1);
4940 return fold_build2_loc (loc, LE_EXPR, type, exp,
4941 fold_convert_loc (loc, etype, high));
4944 return fold_build2_loc (loc, GE_EXPR, type, exp,
4945 fold_convert_loc (loc, etype, low));
4947 if (operand_equal_p (low, high, 0))
4948 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4949 fold_convert_loc (loc, etype, low));
4951 if (integer_zerop (low))
4953 if (! TYPE_UNSIGNED (etype))
4955 etype = unsigned_type_for (etype);
4956 high = fold_convert_loc (loc, etype, high);
4957 exp = fold_convert_loc (loc, etype, exp);
4959 return build_range_check (loc, type, exp, 1, 0, high);
4962 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4963 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4965 unsigned HOST_WIDE_INT lo;
4969 prec = TYPE_PRECISION (etype);
4970 if (prec <= HOST_BITS_PER_WIDE_INT)
4973 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4977 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4978 lo = (unsigned HOST_WIDE_INT) -1;
4981 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4983 if (TYPE_UNSIGNED (etype))
4985 tree signed_etype = signed_type_for (etype);
4986 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4988 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4990 etype = signed_etype;
4991 exp = fold_convert_loc (loc, etype, exp);
4993 return fold_build2_loc (loc, GT_EXPR, type, exp,
4994 build_int_cst (etype, 0));
4998 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4999 This requires wrap-around arithmetics for the type of the expression.
5000 First make sure that arithmetics in this type is valid, then make sure
5001 that it wraps around. */
5002 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5003 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5004 TYPE_UNSIGNED (etype));
5006 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5008 tree utype, minv, maxv;
5010 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5011 for the type in question, as we rely on this here. */
5012 utype = unsigned_type_for (etype);
5013 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5014 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5015 integer_one_node, 1);
5016 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5018 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5025 high = fold_convert_loc (loc, etype, high);
5026 low = fold_convert_loc (loc, etype, low);
5027 exp = fold_convert_loc (loc, etype, exp);
5029 value = const_binop (MINUS_EXPR, high, low, 0);
5032 if (POINTER_TYPE_P (etype))
5034 if (value != 0 && !TREE_OVERFLOW (value))
5036 low = fold_convert_loc (loc, sizetype, low);
5037 low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5038 return build_range_check (loc, type,
5039 fold_build2_loc (loc, POINTER_PLUS_EXPR,
5041 1, build_int_cst (etype, 0), value);
5046 if (value != 0 && !TREE_OVERFLOW (value))
5047 return build_range_check (loc, type,
5048 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5049 1, build_int_cst (etype, 0), value);
5054 /* Return the predecessor of VAL in its type, handling the infinite case. */
5057 range_predecessor (tree val)
5059 tree type = TREE_TYPE (val);
5061 if (INTEGRAL_TYPE_P (type)
5062 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5065 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5068 /* Return the successor of VAL in its type, handling the infinite case. */
5071 range_successor (tree val)
5073 tree type = TREE_TYPE (val);
5075 if (INTEGRAL_TYPE_P (type)
5076 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5079 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5082 /* Given two ranges, see if we can merge them into one. Return 1 if we
5083 can, 0 if we can't. Set the output range into the specified parameters. */
5086 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5087 tree high0, int in1_p, tree low1, tree high1)
5095 int lowequal = ((low0 == 0 && low1 == 0)
5096 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5097 low0, 0, low1, 0)));
5098 int highequal = ((high0 == 0 && high1 == 0)
5099 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5100 high0, 1, high1, 1)));
5102 /* Make range 0 be the range that starts first, or ends last if they
5103 start at the same value. Swap them if it isn't. */
5104 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5107 && integer_onep (range_binop (GT_EXPR, integer_type_node,
5108 high1, 1, high0, 1))))
5110 temp = in0_p, in0_p = in1_p, in1_p = temp;
5111 tem = low0, low0 = low1, low1 = tem;
5112 tem = high0, high0 = high1, high1 = tem;
5115 /* Now flag two cases, whether the ranges are disjoint or whether the
5116 second range is totally subsumed in the first. Note that the tests
5117 below are simplified by the ones above. */
5118 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5119 high0, 1, low1, 0));
5120 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5121 high1, 1, high0, 1));
5123 /* We now have four cases, depending on whether we are including or
5124 excluding the two ranges. */
5127 /* If they don't overlap, the result is false. If the second range
5128 is a subset it is the result. Otherwise, the range is from the start
5129 of the second to the end of the first. */
5131 in_p = 0, low = high = 0;
5133 in_p = 1, low = low1, high = high1;
5135 in_p = 1, low = low1, high = high0;
5138 else if (in0_p && ! in1_p)
5140 /* If they don't overlap, the result is the first range. If they are
5141 equal, the result is false. If the second range is a subset of the
5142 first, and the ranges begin at the same place, we go from just after
5143 the end of the second range to the end of the first. If the second
5144 range is not a subset of the first, or if it is a subset and both
5145 ranges end at the same place, the range starts at the start of the
5146 first range and ends just before the second range.
5147 Otherwise, we can't describe this as a single range. */
5149 in_p = 1, low = low0, high = high0;
5150 else if (lowequal && highequal)
5151 in_p = 0, low = high = 0;
5152 else if (subset && lowequal)
5154 low = range_successor (high1);
5159 /* We are in the weird situation where high0 > high1 but
5160 high1 has no successor. Punt. */
5164 else if (! subset || highequal)
5167 high = range_predecessor (low1);
5171 /* low0 < low1 but low1 has no predecessor. Punt. */
5179 else if (! in0_p && in1_p)
5181 /* If they don't overlap, the result is the second range. If the second
5182 is a subset of the first, the result is false. Otherwise,
5183 the range starts just after the first range and ends at the
5184 end of the second. */
5186 in_p = 1, low = low1, high = high1;
5187 else if (subset || highequal)
5188 in_p = 0, low = high = 0;
5191 low = range_successor (high0);
5196 /* high1 > high0 but high0 has no successor. Punt. */
5204 /* The case where we are excluding both ranges. Here the complex case
5205 is if they don't overlap. In that case, the only time we have a
5206 range is if they are adjacent. If the second is a subset of the
5207 first, the result is the first. Otherwise, the range to exclude
5208 starts at the beginning of the first range and ends at the end of the
5212 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5213 range_successor (high0),
5215 in_p = 0, low = low0, high = high1;
5218 /* Canonicalize - [min, x] into - [-, x]. */
5219 if (low0 && TREE_CODE (low0) == INTEGER_CST)
5220 switch (TREE_CODE (TREE_TYPE (low0)))
5223 if (TYPE_PRECISION (TREE_TYPE (low0))
5224 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5228 if (tree_int_cst_equal (low0,
5229 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5233 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5234 && integer_zerop (low0))
5241 /* Canonicalize - [x, max] into - [x, -]. */
5242 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5243 switch (TREE_CODE (TREE_TYPE (high1)))
5246 if (TYPE_PRECISION (TREE_TYPE (high1))
5247 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5251 if (tree_int_cst_equal (high1,
5252 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5256 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5257 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5259 integer_one_node, 1)))
5266 /* The ranges might be also adjacent between the maximum and
5267 minimum values of the given type. For
5268 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5269 return + [x + 1, y - 1]. */
5270 if (low0 == 0 && high1 == 0)
5272 low = range_successor (high0);
5273 high = range_predecessor (low1);
5274 if (low == 0 || high == 0)
5284 in_p = 0, low = low0, high = high0;
5286 in_p = 0, low = low0, high = high1;
5289 *pin_p = in_p, *plow = low, *phigh = high;
5294 /* Subroutine of fold, looking inside expressions of the form
5295 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5296 of the COND_EXPR. This function is being used also to optimize
5297 A op B ? C : A, by reversing the comparison first.
5299 Return a folded expression whose code is not a COND_EXPR
5300 anymore, or NULL_TREE if no folding opportunity is found. */
5303 fold_cond_expr_with_comparison (location_t loc, tree type,
5304 tree arg0, tree arg1, tree arg2)
5306 enum tree_code comp_code = TREE_CODE (arg0);
5307 tree arg00 = TREE_OPERAND (arg0, 0);
5308 tree arg01 = TREE_OPERAND (arg0, 1);
5309 tree arg1_type = TREE_TYPE (arg1);
5315 /* If we have A op 0 ? A : -A, consider applying the following
5318 A == 0? A : -A same as -A
5319 A != 0? A : -A same as A
5320 A >= 0? A : -A same as abs (A)
5321 A > 0? A : -A same as abs (A)
5322 A <= 0? A : -A same as -abs (A)
5323 A < 0? A : -A same as -abs (A)
5325 None of these transformations work for modes with signed
5326 zeros. If A is +/-0, the first two transformations will
5327 change the sign of the result (from +0 to -0, or vice
5328 versa). The last four will fix the sign of the result,
5329 even though the original expressions could be positive or
5330 negative, depending on the sign of A.
5332 Note that all these transformations are correct if A is
5333 NaN, since the two alternatives (A and -A) are also NaNs. */
5334 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5335 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5336 ? real_zerop (arg01)
5337 : integer_zerop (arg01))
5338 && ((TREE_CODE (arg2) == NEGATE_EXPR
5339 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5340 /* In the case that A is of the form X-Y, '-A' (arg2) may
5341 have already been folded to Y-X, check for that. */
5342 || (TREE_CODE (arg1) == MINUS_EXPR
5343 && TREE_CODE (arg2) == MINUS_EXPR
5344 && operand_equal_p (TREE_OPERAND (arg1, 0),
5345 TREE_OPERAND (arg2, 1), 0)
5346 && operand_equal_p (TREE_OPERAND (arg1, 1),
5347 TREE_OPERAND (arg2, 0), 0))))
5352 tem = fold_convert_loc (loc, arg1_type, arg1);
5353 return pedantic_non_lvalue_loc (loc,
5354 fold_convert_loc (loc, type,
5355 negate_expr (tem)));
5358 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5361 if (flag_trapping_math)
5366 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5367 arg1 = fold_convert_loc (loc, signed_type_for
5368 (TREE_TYPE (arg1)), arg1);
5369 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5370 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5373 if (flag_trapping_math)
5377 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5378 arg1 = fold_convert_loc (loc, signed_type_for
5379 (TREE_TYPE (arg1)), arg1);
5380 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5381 return negate_expr (fold_convert_loc (loc, type, tem));
5383 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5387 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5388 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5389 both transformations are correct when A is NaN: A != 0
5390 is then true, and A == 0 is false. */
5392 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5393 && integer_zerop (arg01) && integer_zerop (arg2))
5395 if (comp_code == NE_EXPR)
5396 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5397 else if (comp_code == EQ_EXPR)
5398 return build_int_cst (type, 0);
5401 /* Try some transformations of A op B ? A : B.
5403 A == B? A : B same as B
5404 A != B? A : B same as A
5405 A >= B? A : B same as max (A, B)
5406 A > B? A : B same as max (B, A)
5407 A <= B? A : B same as min (A, B)
5408 A < B? A : B same as min (B, A)
5410 As above, these transformations don't work in the presence
5411 of signed zeros. For example, if A and B are zeros of
5412 opposite sign, the first two transformations will change
5413 the sign of the result. In the last four, the original
5414 expressions give different results for (A=+0, B=-0) and
5415 (A=-0, B=+0), but the transformed expressions do not.
5417 The first two transformations are correct if either A or B
5418 is a NaN. In the first transformation, the condition will
5419 be false, and B will indeed be chosen. In the case of the
5420 second transformation, the condition A != B will be true,
5421 and A will be chosen.
5423 The conversions to max() and min() are not correct if B is
5424 a number and A is not. The conditions in the original
5425 expressions will be false, so all four give B. The min()
5426 and max() versions would give a NaN instead. */
5427 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5428 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5429 /* Avoid these transformations if the COND_EXPR may be used
5430 as an lvalue in the C++ front-end. PR c++/19199. */
5432 || (strcmp (lang_hooks.name, "GNU C++") != 0
5433 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5434 || ! maybe_lvalue_p (arg1)
5435 || ! maybe_lvalue_p (arg2)))
5437 tree comp_op0 = arg00;
5438 tree comp_op1 = arg01;
5439 tree comp_type = TREE_TYPE (comp_op0);
5441 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5442 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5452 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5454 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5459 /* In C++ a ?: expression can be an lvalue, so put the
5460 operand which will be used if they are equal first
5461 so that we can convert this back to the
5462 corresponding COND_EXPR. */
5463 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5465 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5466 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5467 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5468 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5469 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5470 comp_op1, comp_op0);
5471 return pedantic_non_lvalue_loc (loc,
5472 fold_convert_loc (loc, type, tem));
5479 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5481 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5482 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5483 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5484 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5485 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5486 comp_op1, comp_op0);
5487 return pedantic_non_lvalue_loc (loc,
5488 fold_convert_loc (loc, type, tem));
5492 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5493 return pedantic_non_lvalue_loc (loc,
5494 fold_convert_loc (loc, type, arg2));
5497 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5498 return pedantic_non_lvalue_loc (loc,
5499 fold_convert_loc (loc, type, arg1));
5502 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5507 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5508 we might still be able to simplify this. For example,
5509 if C1 is one less or one more than C2, this might have started
5510 out as a MIN or MAX and been transformed by this function.
5511 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5513 if (INTEGRAL_TYPE_P (type)
5514 && TREE_CODE (arg01) == INTEGER_CST
5515 && TREE_CODE (arg2) == INTEGER_CST)
5519 if (TREE_CODE (arg1) == INTEGER_CST)
5521 /* We can replace A with C1 in this case. */
5522 arg1 = fold_convert_loc (loc, type, arg01);
5523 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5526 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5527 MIN_EXPR, to preserve the signedness of the comparison. */
5528 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5530 && operand_equal_p (arg01,
5531 const_binop (PLUS_EXPR, arg2,
5532 build_int_cst (type, 1), 0),
5535 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5536 fold_convert_loc (loc, TREE_TYPE (arg00),
5538 return pedantic_non_lvalue_loc (loc,
5539 fold_convert_loc (loc, type, tem));
5544 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5546 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5548 && operand_equal_p (arg01,
5549 const_binop (MINUS_EXPR, arg2,
5550 build_int_cst (type, 1), 0),
5553 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5554 fold_convert_loc (loc, TREE_TYPE (arg00),
5556 return pedantic_non_lvalue_loc (loc,
5557 fold_convert_loc (loc, type, tem));
5562 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5563 MAX_EXPR, to preserve the signedness of the comparison. */
5564 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5566 && operand_equal_p (arg01,
5567 const_binop (MINUS_EXPR, arg2,
5568 build_int_cst (type, 1), 0),
5571 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5572 fold_convert_loc (loc, TREE_TYPE (arg00),
5574 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5579 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5580 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5582 && operand_equal_p (arg01,
5583 const_binop (PLUS_EXPR, arg2,
5584 build_int_cst (type, 1), 0),
5587 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5588 fold_convert_loc (loc, TREE_TYPE (arg00),
5590 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5604 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5605 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5606 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5610 /* EXP is some logical combination of boolean tests. See if we can
5611 merge it into some range test. Return the new tree if so. */
5614 fold_range_test (location_t loc, enum tree_code code, tree type,
5617 int or_op = (code == TRUTH_ORIF_EXPR
5618 || code == TRUTH_OR_EXPR);
5619 int in0_p, in1_p, in_p;
5620 tree low0, low1, low, high0, high1, high;
5621 bool strict_overflow_p = false;
5622 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5623 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5625 const char * const warnmsg = G_("assuming signed overflow does not occur "
5626 "when simplifying range test");
5628 /* If this is an OR operation, invert both sides; we will invert
5629 again at the end. */
5631 in0_p = ! in0_p, in1_p = ! in1_p;
5633 /* If both expressions are the same, if we can merge the ranges, and we
5634 can build the range test, return it or it inverted. If one of the
5635 ranges is always true or always false, consider it to be the same
5636 expression as the other. */
5637 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5638 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5640 && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5642 : rhs != 0 ? rhs : integer_zero_node,
5645 if (strict_overflow_p)
5646 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5647 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5650 /* On machines where the branch cost is expensive, if this is a
5651 short-circuited branch and the underlying object on both sides
5652 is the same, make a non-short-circuit operation. */
5653 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5654 && lhs != 0 && rhs != 0
5655 && (code == TRUTH_ANDIF_EXPR
5656 || code == TRUTH_ORIF_EXPR)
5657 && operand_equal_p (lhs, rhs, 0))
5659 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5660 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5661 which cases we can't do this. */
5662 if (simple_operand_p (lhs))
5664 tem = build2 (code == TRUTH_ANDIF_EXPR
5665 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5667 SET_EXPR_LOCATION (tem, loc);
5671 else if (lang_hooks.decls.global_bindings_p () == 0
5672 && ! CONTAINS_PLACEHOLDER_P (lhs))
5674 tree common = save_expr (lhs);
5676 if (0 != (lhs = build_range_check (loc, type, common,
5677 or_op ? ! in0_p : in0_p,
5679 && (0 != (rhs = build_range_check (loc, type, common,
5680 or_op ? ! in1_p : in1_p,
5683 if (strict_overflow_p)
5684 fold_overflow_warning (warnmsg,
5685 WARN_STRICT_OVERFLOW_COMPARISON);
5686 tem = build2 (code == TRUTH_ANDIF_EXPR
5687 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5689 SET_EXPR_LOCATION (tem, loc);
5698 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5699 bit value. Arrange things so the extra bits will be set to zero if and
5700 only if C is signed-extended to its full width. If MASK is nonzero,
5701 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5704 unextend (tree c, int p, int unsignedp, tree mask)
5706 tree type = TREE_TYPE (c);
5707 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5710 if (p == modesize || unsignedp)
5713 /* We work by getting just the sign bit into the low-order bit, then
5714 into the high-order bit, then sign-extend. We then XOR that value
5716 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5717 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5719 /* We must use a signed type in order to get an arithmetic right shift.
5720 However, we must also avoid introducing accidental overflows, so that
5721 a subsequent call to integer_zerop will work. Hence we must
5722 do the type conversion here. At this point, the constant is either
5723 zero or one, and the conversion to a signed type can never overflow.
5724 We could get an overflow if this conversion is done anywhere else. */
5725 if (TYPE_UNSIGNED (type))
5726 temp = fold_convert (signed_type_for (type), temp);
5728 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5729 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5731 temp = const_binop (BIT_AND_EXPR, temp,
5732 fold_convert (TREE_TYPE (c), mask),
5734 /* If necessary, convert the type back to match the type of C. */
5735 if (TYPE_UNSIGNED (type))
5736 temp = fold_convert (type, temp);
5738 return fold_convert (type,
5739 const_binop (BIT_XOR_EXPR, c, temp, 0));
5742 /* Find ways of folding logical expressions of LHS and RHS:
5743 Try to merge two comparisons to the same innermost item.
5744 Look for range tests like "ch >= '0' && ch <= '9'".
5745 Look for combinations of simple terms on machines with expensive branches
5746 and evaluate the RHS unconditionally.
5748 For example, if we have p->a == 2 && p->b == 4 and we can make an
5749 object large enough to span both A and B, we can do this with a comparison
5750 against the object ANDed with the a mask.
5752 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5753 operations to do this with one comparison.
5755 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5756 function and the one above.
5758 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5759 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5761 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5764 We return the simplified tree or 0 if no optimization is possible. */
5767 fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5770 /* If this is the "or" of two comparisons, we can do something if
5771 the comparisons are NE_EXPR. If this is the "and", we can do something
5772 if the comparisons are EQ_EXPR. I.e.,
5773 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5775 WANTED_CODE is this operation code. For single bit fields, we can
5776 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5777 comparison for one-bit fields. */
5779 enum tree_code wanted_code;
5780 enum tree_code lcode, rcode;
5781 tree ll_arg, lr_arg, rl_arg, rr_arg;
5782 tree ll_inner, lr_inner, rl_inner, rr_inner;
5783 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5784 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5785 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5786 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5787 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5788 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5789 enum machine_mode lnmode, rnmode;
5790 tree ll_mask, lr_mask, rl_mask, rr_mask;
5791 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5792 tree l_const, r_const;
5793 tree lntype, rntype, result;
5794 HOST_WIDE_INT first_bit, end_bit;
5796 tree orig_lhs = lhs, orig_rhs = rhs;
5797 enum tree_code orig_code = code;
5799 /* Start by getting the comparison codes. Fail if anything is volatile.
5800 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5801 it were surrounded with a NE_EXPR. */
5803 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5806 lcode = TREE_CODE (lhs);
5807 rcode = TREE_CODE (rhs);
5809 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5811 lhs = build2 (NE_EXPR, truth_type, lhs,
5812 build_int_cst (TREE_TYPE (lhs), 0));
5816 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5818 rhs = build2 (NE_EXPR, truth_type, rhs,
5819 build_int_cst (TREE_TYPE (rhs), 0));
5823 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5824 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5827 ll_arg = TREE_OPERAND (lhs, 0);
5828 lr_arg = TREE_OPERAND (lhs, 1);
5829 rl_arg = TREE_OPERAND (rhs, 0);
5830 rr_arg = TREE_OPERAND (rhs, 1);
5832 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5833 if (simple_operand_p (ll_arg)
5834 && simple_operand_p (lr_arg))
5837 if (operand_equal_p (ll_arg, rl_arg, 0)
5838 && operand_equal_p (lr_arg, rr_arg, 0))
5840 result = combine_comparisons (loc, code, lcode, rcode,
5841 truth_type, ll_arg, lr_arg);
5845 else if (operand_equal_p (ll_arg, rr_arg, 0)
5846 && operand_equal_p (lr_arg, rl_arg, 0))
5848 result = combine_comparisons (loc, code, lcode,
5849 swap_tree_comparison (rcode),
5850 truth_type, ll_arg, lr_arg);
5856 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5857 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5859 /* If the RHS can be evaluated unconditionally and its operands are
5860 simple, it wins to evaluate the RHS unconditionally on machines
5861 with expensive branches. In this case, this isn't a comparison
5862 that can be merged. Avoid doing this if the RHS is a floating-point
5863 comparison since those can trap. */
5865 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5867 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5868 && simple_operand_p (rl_arg)
5869 && simple_operand_p (rr_arg))
5871 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5872 if (code == TRUTH_OR_EXPR
5873 && lcode == NE_EXPR && integer_zerop (lr_arg)
5874 && rcode == NE_EXPR && integer_zerop (rr_arg)
5875 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5876 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5878 result = build2 (NE_EXPR, truth_type,
5879 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5881 build_int_cst (TREE_TYPE (ll_arg), 0));
5882 goto fold_truthop_exit;
5885 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5886 if (code == TRUTH_AND_EXPR
5887 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5888 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5889 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5890 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5892 result = build2 (EQ_EXPR, truth_type,
5893 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5895 build_int_cst (TREE_TYPE (ll_arg), 0));
5896 goto fold_truthop_exit;
5899 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5901 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5903 result = build2 (code, truth_type, lhs, rhs);
5904 goto fold_truthop_exit;
5910 /* See if the comparisons can be merged. Then get all the parameters for
5913 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5914 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5918 ll_inner = decode_field_reference (loc, ll_arg,
5919 &ll_bitsize, &ll_bitpos, &ll_mode,
5920 &ll_unsignedp, &volatilep, &ll_mask,
5922 lr_inner = decode_field_reference (loc, lr_arg,
5923 &lr_bitsize, &lr_bitpos, &lr_mode,
5924 &lr_unsignedp, &volatilep, &lr_mask,
5926 rl_inner = decode_field_reference (loc, rl_arg,
5927 &rl_bitsize, &rl_bitpos, &rl_mode,
5928 &rl_unsignedp, &volatilep, &rl_mask,
5930 rr_inner = decode_field_reference (loc, rr_arg,
5931 &rr_bitsize, &rr_bitpos, &rr_mode,
5932 &rr_unsignedp, &volatilep, &rr_mask,
5935 /* It must be true that the inner operation on the lhs of each
5936 comparison must be the same if we are to be able to do anything.
5937 Then see if we have constants. If not, the same must be true for
5939 if (volatilep || ll_inner == 0 || rl_inner == 0
5940 || ! operand_equal_p (ll_inner, rl_inner, 0))
5943 if (TREE_CODE (lr_arg) == INTEGER_CST
5944 && TREE_CODE (rr_arg) == INTEGER_CST)
5945 l_const = lr_arg, r_const = rr_arg;
5946 else if (lr_inner == 0 || rr_inner == 0
5947 || ! operand_equal_p (lr_inner, rr_inner, 0))
5950 l_const = r_const = 0;
5952 /* If either comparison code is not correct for our logical operation,
5953 fail. However, we can convert a one-bit comparison against zero into
5954 the opposite comparison against that bit being set in the field. */
5956 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5957 if (lcode != wanted_code)
5959 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5961 /* Make the left operand unsigned, since we are only interested
5962 in the value of one bit. Otherwise we are doing the wrong
5971 /* This is analogous to the code for l_const above. */
5972 if (rcode != wanted_code)
5974 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5983 /* See if we can find a mode that contains both fields being compared on
5984 the left. If we can't, fail. Otherwise, update all constants and masks
5985 to be relative to a field of that size. */
5986 first_bit = MIN (ll_bitpos, rl_bitpos);
5987 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5988 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5989 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5991 if (lnmode == VOIDmode)
5994 lnbitsize = GET_MODE_BITSIZE (lnmode);
5995 lnbitpos = first_bit & ~ (lnbitsize - 1);
5996 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5997 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5999 if (BYTES_BIG_ENDIAN)
6001 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6002 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6005 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6006 size_int (xll_bitpos), 0);
6007 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6008 size_int (xrl_bitpos), 0);
6012 l_const = fold_convert_loc (loc, lntype, l_const);
6013 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6014 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6015 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6016 fold_build1_loc (loc, BIT_NOT_EXPR,
6020 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6022 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6027 r_const = fold_convert_loc (loc, lntype, r_const);
6028 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6029 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6030 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6031 fold_build1_loc (loc, BIT_NOT_EXPR,
6035 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6037 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6041 /* If the right sides are not constant, do the same for it. Also,
6042 disallow this optimization if a size or signedness mismatch occurs
6043 between the left and right sides. */
6046 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6047 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6048 /* Make sure the two fields on the right
6049 correspond to the left without being swapped. */
6050 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6053 first_bit = MIN (lr_bitpos, rr_bitpos);
6054 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6055 rnmode = get_best_mode (end_bit - first_bit, first_bit,
6056 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6058 if (rnmode == VOIDmode)
6061 rnbitsize = GET_MODE_BITSIZE (rnmode);
6062 rnbitpos = first_bit & ~ (rnbitsize - 1);
6063 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6064 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6066 if (BYTES_BIG_ENDIAN)
6068 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6069 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6072 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6074 size_int (xlr_bitpos), 0);
6075 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6077 size_int (xrr_bitpos), 0);
6079 /* Make a mask that corresponds to both fields being compared.
6080 Do this for both items being compared. If the operands are the
6081 same size and the bits being compared are in the same position
6082 then we can do this by masking both and comparing the masked
6084 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6085 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6086 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6088 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6089 ll_unsignedp || rl_unsignedp);
6090 if (! all_ones_mask_p (ll_mask, lnbitsize))
6091 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6093 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6094 lr_unsignedp || rr_unsignedp);
6095 if (! all_ones_mask_p (lr_mask, rnbitsize))
6096 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6098 result = build2 (wanted_code, truth_type, lhs, rhs);
6099 goto fold_truthop_exit;
6102 /* There is still another way we can do something: If both pairs of
6103 fields being compared are adjacent, we may be able to make a wider
6104 field containing them both.
6106 Note that we still must mask the lhs/rhs expressions. Furthermore,
6107 the mask must be shifted to account for the shift done by
6108 make_bit_field_ref. */
6109 if ((ll_bitsize + ll_bitpos == rl_bitpos
6110 && lr_bitsize + lr_bitpos == rr_bitpos)
6111 || (ll_bitpos == rl_bitpos + rl_bitsize
6112 && lr_bitpos == rr_bitpos + rr_bitsize))
6116 lhs = make_bit_field_ref (loc, ll_inner, lntype,
6117 ll_bitsize + rl_bitsize,
6118 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6119 rhs = make_bit_field_ref (loc, lr_inner, rntype,
6120 lr_bitsize + rr_bitsize,
6121 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6123 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6124 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6125 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6126 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6128 /* Convert to the smaller type before masking out unwanted bits. */
6130 if (lntype != rntype)
6132 if (lnbitsize > rnbitsize)
6134 lhs = fold_convert_loc (loc, rntype, lhs);
6135 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6138 else if (lnbitsize < rnbitsize)
6140 rhs = fold_convert_loc (loc, lntype, rhs);
6141 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6146 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6147 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6149 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6150 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6152 result = build2 (wanted_code, truth_type, lhs, rhs);
6153 goto fold_truthop_exit;
6159 /* Handle the case of comparisons with constants. If there is something in
6160 common between the masks, those bits of the constants must be the same.
6161 If not, the condition is always false. Test for this to avoid generating
6162 incorrect code below. */
6163 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6164 if (! integer_zerop (result)
6165 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6166 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6168 if (wanted_code == NE_EXPR)
6170 warning (0, "%<or%> of unmatched not-equal tests is always 1");
6171 return constant_boolean_node (true, truth_type);
6175 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6176 return constant_boolean_node (false, truth_type);
6180 /* Construct the expression we will return. First get the component
6181 reference we will make. Unless the mask is all ones the width of
6182 that field, perform the mask operation. Then compare with the
6184 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6185 ll_unsignedp || rl_unsignedp);
6187 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6188 if (! all_ones_mask_p (ll_mask, lnbitsize))
6190 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6191 SET_EXPR_LOCATION (result, loc);
6194 result = build2 (wanted_code, truth_type, result,
6195 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6198 SET_EXPR_LOCATION (result, loc);
6202 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6206 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6210 enum tree_code op_code;
6213 int consts_equal, consts_lt;
6216 STRIP_SIGN_NOPS (arg0);
6218 op_code = TREE_CODE (arg0);
6219 minmax_const = TREE_OPERAND (arg0, 1);
6220 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6221 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6222 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6223 inner = TREE_OPERAND (arg0, 0);
6225 /* If something does not permit us to optimize, return the original tree. */
6226 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6227 || TREE_CODE (comp_const) != INTEGER_CST
6228 || TREE_OVERFLOW (comp_const)
6229 || TREE_CODE (minmax_const) != INTEGER_CST
6230 || TREE_OVERFLOW (minmax_const))
6233 /* Now handle all the various comparison codes. We only handle EQ_EXPR
6234 and GT_EXPR, doing the rest with recursive calls using logical
6238 case NE_EXPR: case LT_EXPR: case LE_EXPR:
6241 = optimize_minmax_comparison (loc,
6242 invert_tree_comparison (code, false),
6245 return invert_truthvalue_loc (loc, tem);
6251 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6252 optimize_minmax_comparison
6253 (loc, EQ_EXPR, type, arg0, comp_const),
6254 optimize_minmax_comparison
6255 (loc, GT_EXPR, type, arg0, comp_const));
6258 if (op_code == MAX_EXPR && consts_equal)
6259 /* MAX (X, 0) == 0 -> X <= 0 */
6260 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6262 else if (op_code == MAX_EXPR && consts_lt)
6263 /* MAX (X, 0) == 5 -> X == 5 */
6264 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6266 else if (op_code == MAX_EXPR)
6267 /* MAX (X, 0) == -1 -> false */
6268 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6270 else if (consts_equal)
6271 /* MIN (X, 0) == 0 -> X >= 0 */
6272 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6275 /* MIN (X, 0) == 5 -> false */
6276 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6279 /* MIN (X, 0) == -1 -> X == -1 */
6280 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6283 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6284 /* MAX (X, 0) > 0 -> X > 0
6285 MAX (X, 0) > 5 -> X > 5 */
6286 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6288 else if (op_code == MAX_EXPR)
6289 /* MAX (X, 0) > -1 -> true */
6290 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6292 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6293 /* MIN (X, 0) > 0 -> false
6294 MIN (X, 0) > 5 -> false */
6295 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6298 /* MIN (X, 0) > -1 -> X > -1 */
6299 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6306 /* T is an integer expression that is being multiplied, divided, or taken a
6307 modulus (CODE says which and what kind of divide or modulus) by a
6308 constant C. See if we can eliminate that operation by folding it with
6309 other operations already in T. WIDE_TYPE, if non-null, is a type that
6310 should be used for the computation if wider than our type.
6312 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6313 (X * 2) + (Y * 4). We must, however, be assured that either the original
6314 expression would not overflow or that overflow is undefined for the type
6315 in the language in question.
6317 If we return a non-null expression, it is an equivalent form of the
6318 original computation, but need not be in the original type.
6320 We set *STRICT_OVERFLOW_P to true if the return values depends on
6321 signed overflow being undefined. Otherwise we do not change
6322 *STRICT_OVERFLOW_P. */
6325 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6326 bool *strict_overflow_p)
6328 /* To avoid exponential search depth, refuse to allow recursion past
6329 three levels. Beyond that (1) it's highly unlikely that we'll find
6330 something interesting and (2) we've probably processed it before
6331 when we built the inner expression. */
6340 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6347 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6348 bool *strict_overflow_p)
6350 tree type = TREE_TYPE (t);
6351 enum tree_code tcode = TREE_CODE (t);
6352 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6353 > GET_MODE_SIZE (TYPE_MODE (type)))
6354 ? wide_type : type);
6356 int same_p = tcode == code;
6357 tree op0 = NULL_TREE, op1 = NULL_TREE;
6358 bool sub_strict_overflow_p;
6360 /* Don't deal with constants of zero here; they confuse the code below. */
6361 if (integer_zerop (c))
6364 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6365 op0 = TREE_OPERAND (t, 0);
6367 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6368 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6370 /* Note that we need not handle conditional operations here since fold
6371 already handles those cases. So just do arithmetic here. */
6375 /* For a constant, we can always simplify if we are a multiply
6376 or (for divide and modulus) if it is a multiple of our constant. */
6377 if (code == MULT_EXPR
6378 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6379 return const_binop (code, fold_convert (ctype, t),
6380 fold_convert (ctype, c), 0);
6383 CASE_CONVERT: case NON_LVALUE_EXPR:
6384 /* If op0 is an expression ... */
6385 if ((COMPARISON_CLASS_P (op0)
6386 || UNARY_CLASS_P (op0)
6387 || BINARY_CLASS_P (op0)
6388 || VL_EXP_CLASS_P (op0)
6389 || EXPRESSION_CLASS_P (op0))
6390 /* ... and has wrapping overflow, and its type is smaller
6391 than ctype, then we cannot pass through as widening. */
6392 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6393 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6394 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6395 && (TYPE_PRECISION (ctype)
6396 > TYPE_PRECISION (TREE_TYPE (op0))))
6397 /* ... or this is a truncation (t is narrower than op0),
6398 then we cannot pass through this narrowing. */
6399 || (TYPE_PRECISION (type)
6400 < TYPE_PRECISION (TREE_TYPE (op0)))
6401 /* ... or signedness changes for division or modulus,
6402 then we cannot pass through this conversion. */
6403 || (code != MULT_EXPR
6404 && (TYPE_UNSIGNED (ctype)
6405 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6406 /* ... or has undefined overflow while the converted to
6407 type has not, we cannot do the operation in the inner type
6408 as that would introduce undefined overflow. */
6409 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6410 && !TYPE_OVERFLOW_UNDEFINED (type))))
6413 /* Pass the constant down and see if we can make a simplification. If
6414 we can, replace this expression with the inner simplification for
6415 possible later conversion to our or some other type. */
6416 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6417 && TREE_CODE (t2) == INTEGER_CST
6418 && !TREE_OVERFLOW (t2)
6419 && (0 != (t1 = extract_muldiv (op0, t2, code,
6421 ? ctype : NULL_TREE,
6422 strict_overflow_p))))
6427 /* If widening the type changes it from signed to unsigned, then we
6428 must avoid building ABS_EXPR itself as unsigned. */
6429 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6431 tree cstype = (*signed_type_for) (ctype);
6432 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6435 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6436 return fold_convert (ctype, t1);
6440 /* If the constant is negative, we cannot simplify this. */
6441 if (tree_int_cst_sgn (c) == -1)
6445 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6447 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6450 case MIN_EXPR: case MAX_EXPR:
6451 /* If widening the type changes the signedness, then we can't perform
6452 this optimization as that changes the result. */
6453 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6456 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6457 sub_strict_overflow_p = false;
6458 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6459 &sub_strict_overflow_p)) != 0
6460 && (t2 = extract_muldiv (op1, c, code, wide_type,
6461 &sub_strict_overflow_p)) != 0)
6463 if (tree_int_cst_sgn (c) < 0)
6464 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6465 if (sub_strict_overflow_p)
6466 *strict_overflow_p = true;
6467 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6468 fold_convert (ctype, t2));
6472 case LSHIFT_EXPR: case RSHIFT_EXPR:
6473 /* If the second operand is constant, this is a multiplication
6474 or floor division, by a power of two, so we can treat it that
6475 way unless the multiplier or divisor overflows. Signed
6476 left-shift overflow is implementation-defined rather than
6477 undefined in C90, so do not convert signed left shift into
6479 if (TREE_CODE (op1) == INTEGER_CST
6480 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6481 /* const_binop may not detect overflow correctly,
6482 so check for it explicitly here. */
6483 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6484 && TREE_INT_CST_HIGH (op1) == 0
6485 && 0 != (t1 = fold_convert (ctype,
6486 const_binop (LSHIFT_EXPR,
6489 && !TREE_OVERFLOW (t1))
6490 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6491 ? MULT_EXPR : FLOOR_DIV_EXPR,
6493 fold_convert (ctype, op0),
6495 c, code, wide_type, strict_overflow_p);
6498 case PLUS_EXPR: case MINUS_EXPR:
6499 /* See if we can eliminate the operation on both sides. If we can, we
6500 can return a new PLUS or MINUS. If we can't, the only remaining
6501 cases where we can do anything are if the second operand is a
6503 sub_strict_overflow_p = false;
6504 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6505 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6506 if (t1 != 0 && t2 != 0
6507 && (code == MULT_EXPR
6508 /* If not multiplication, we can only do this if both operands
6509 are divisible by c. */
6510 || (multiple_of_p (ctype, op0, c)
6511 && multiple_of_p (ctype, op1, c))))
6513 if (sub_strict_overflow_p)
6514 *strict_overflow_p = true;
6515 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6516 fold_convert (ctype, t2));
6519 /* If this was a subtraction, negate OP1 and set it to be an addition.
6520 This simplifies the logic below. */
6521 if (tcode == MINUS_EXPR)
6523 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6524 /* If OP1 was not easily negatable, the constant may be OP0. */
6525 if (TREE_CODE (op0) == INTEGER_CST)
6536 if (TREE_CODE (op1) != INTEGER_CST)
6539 /* If either OP1 or C are negative, this optimization is not safe for
6540 some of the division and remainder types while for others we need
6541 to change the code. */
6542 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6544 if (code == CEIL_DIV_EXPR)
6545 code = FLOOR_DIV_EXPR;
6546 else if (code == FLOOR_DIV_EXPR)
6547 code = CEIL_DIV_EXPR;
6548 else if (code != MULT_EXPR
6549 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6553 /* If it's a multiply or a division/modulus operation of a multiple
6554 of our constant, do the operation and verify it doesn't overflow. */
6555 if (code == MULT_EXPR
6556 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6558 op1 = const_binop (code, fold_convert (ctype, op1),
6559 fold_convert (ctype, c), 0);
6560 /* We allow the constant to overflow with wrapping semantics. */
6562 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6568 /* If we have an unsigned type is not a sizetype, we cannot widen
6569 the operation since it will change the result if the original
6570 computation overflowed. */
6571 if (TYPE_UNSIGNED (ctype)
6572 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6576 /* If we were able to eliminate our operation from the first side,
6577 apply our operation to the second side and reform the PLUS. */
6578 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6579 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6581 /* The last case is if we are a multiply. In that case, we can
6582 apply the distributive law to commute the multiply and addition
6583 if the multiplication of the constants doesn't overflow. */
6584 if (code == MULT_EXPR)
6585 return fold_build2 (tcode, ctype,
6586 fold_build2 (code, ctype,
6587 fold_convert (ctype, op0),
6588 fold_convert (ctype, c)),
6594 /* We have a special case here if we are doing something like
6595 (C * 8) % 4 since we know that's zero. */
6596 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6597 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6598 /* If the multiplication can overflow we cannot optimize this.
6599 ??? Until we can properly mark individual operations as
6600 not overflowing we need to treat sizetype special here as
6601 stor-layout relies on this opimization to make
6602 DECL_FIELD_BIT_OFFSET always a constant. */
6603 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6604 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6605 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6606 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6607 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6609 *strict_overflow_p = true;
6610 return omit_one_operand (type, integer_zero_node, op0);
6613 /* ... fall through ... */
6615 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6616 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6617 /* If we can extract our operation from the LHS, do so and return a
6618 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6619 do something only if the second operand is a constant. */
6621 && (t1 = extract_muldiv (op0, c, code, wide_type,
6622 strict_overflow_p)) != 0)
6623 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6624 fold_convert (ctype, op1));
6625 else if (tcode == MULT_EXPR && code == MULT_EXPR
6626 && (t1 = extract_muldiv (op1, c, code, wide_type,
6627 strict_overflow_p)) != 0)
6628 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6629 fold_convert (ctype, t1));
6630 else if (TREE_CODE (op1) != INTEGER_CST)
6633 /* If these are the same operation types, we can associate them
6634 assuming no overflow. */
6636 && 0 != (t1 = int_const_binop (MULT_EXPR,
6637 fold_convert (ctype, op1),
6638 fold_convert (ctype, c), 1))
6639 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6640 TREE_INT_CST_HIGH (t1),
6641 (TYPE_UNSIGNED (ctype)
6642 && tcode != MULT_EXPR) ? -1 : 1,
6643 TREE_OVERFLOW (t1)))
6644 && !TREE_OVERFLOW (t1))
6645 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6647 /* If these operations "cancel" each other, we have the main
6648 optimizations of this pass, which occur when either constant is a
6649 multiple of the other, in which case we replace this with either an
6650 operation or CODE or TCODE.
6652 If we have an unsigned type that is not a sizetype, we cannot do
6653 this since it will change the result if the original computation
6655 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6656 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6657 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6658 || (tcode == MULT_EXPR
6659 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6660 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6661 && code != MULT_EXPR)))
6663 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6665 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6666 *strict_overflow_p = true;
6667 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6668 fold_convert (ctype,
6669 const_binop (TRUNC_DIV_EXPR,
6672 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6674 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6675 *strict_overflow_p = true;
6676 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6677 fold_convert (ctype,
6678 const_binop (TRUNC_DIV_EXPR,
6691 /* Return a node which has the indicated constant VALUE (either 0 or
6692 1), and is of the indicated TYPE. */
6695 constant_boolean_node (int value, tree type)
6697 if (type == integer_type_node)
6698 return value ? integer_one_node : integer_zero_node;
6699 else if (type == boolean_type_node)
6700 return value ? boolean_true_node : boolean_false_node;
6702 return build_int_cst (type, value);
6706 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6707 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6708 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6709 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6710 COND is the first argument to CODE; otherwise (as in the example
6711 given here), it is the second argument. TYPE is the type of the
6712 original expression. Return NULL_TREE if no simplification is
6716 fold_binary_op_with_conditional_arg (location_t loc,
6717 enum tree_code code,
6718 tree type, tree op0, tree op1,
6719 tree cond, tree arg, int cond_first_p)
6721 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6722 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6723 tree test, true_value, false_value;
6724 tree lhs = NULL_TREE;
6725 tree rhs = NULL_TREE;
6727 /* This transformation is only worthwhile if we don't have to wrap
6728 arg in a SAVE_EXPR, and the operation can be simplified on at least
6729 one of the branches once its pushed inside the COND_EXPR. */
6730 if (!TREE_CONSTANT (arg))
6733 if (TREE_CODE (cond) == COND_EXPR)
6735 test = TREE_OPERAND (cond, 0);
6736 true_value = TREE_OPERAND (cond, 1);
6737 false_value = TREE_OPERAND (cond, 2);
6738 /* If this operand throws an expression, then it does not make
6739 sense to try to perform a logical or arithmetic operation
6741 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6743 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6748 tree testtype = TREE_TYPE (cond);
6750 true_value = constant_boolean_node (true, testtype);
6751 false_value = constant_boolean_node (false, testtype);
6754 arg = fold_convert_loc (loc, arg_type, arg);
6757 true_value = fold_convert_loc (loc, cond_type, true_value);
6759 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6761 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6765 false_value = fold_convert_loc (loc, cond_type, false_value);
6767 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6769 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6772 test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6773 return fold_convert_loc (loc, type, test);
6777 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6779 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6780 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6781 ADDEND is the same as X.
6783 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6784 and finite. The problematic cases are when X is zero, and its mode
6785 has signed zeros. In the case of rounding towards -infinity,
6786 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6787 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6790 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6792 if (!real_zerop (addend))
6795 /* Don't allow the fold with -fsignaling-nans. */
6796 if (HONOR_SNANS (TYPE_MODE (type)))
6799 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6800 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6803 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6804 if (TREE_CODE (addend) == REAL_CST
6805 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6808 /* The mode has signed zeros, and we have to honor their sign.
6809 In this situation, there is only one case we can return true for.
6810 X - 0 is the same as X unless rounding towards -infinity is
6812 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6815 /* Subroutine of fold() that checks comparisons of built-in math
6816 functions against real constants.
6818 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6819 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6820 is the type of the result and ARG0 and ARG1 are the operands of the
6821 comparison. ARG1 must be a TREE_REAL_CST.
6823 The function returns the constant folded tree if a simplification
6824 can be made, and NULL_TREE otherwise. */
6827 fold_mathfn_compare (location_t loc,
6828 enum built_in_function fcode, enum tree_code code,
6829 tree type, tree arg0, tree arg1)
6833 if (BUILTIN_SQRT_P (fcode))
6835 tree arg = CALL_EXPR_ARG (arg0, 0);
6836 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6838 c = TREE_REAL_CST (arg1);
6839 if (REAL_VALUE_NEGATIVE (c))
6841 /* sqrt(x) < y is always false, if y is negative. */
6842 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6843 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6845 /* sqrt(x) > y is always true, if y is negative and we
6846 don't care about NaNs, i.e. negative values of x. */
6847 if (code == NE_EXPR || !HONOR_NANS (mode))
6848 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6850 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6851 return fold_build2_loc (loc, GE_EXPR, type, arg,
6852 build_real (TREE_TYPE (arg), dconst0));
6854 else if (code == GT_EXPR || code == GE_EXPR)
6858 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6859 real_convert (&c2, mode, &c2);
6861 if (REAL_VALUE_ISINF (c2))
6863 /* sqrt(x) > y is x == +Inf, when y is very large. */
6864 if (HONOR_INFINITIES (mode))
6865 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6866 build_real (TREE_TYPE (arg), c2));
6868 /* sqrt(x) > y is always false, when y is very large
6869 and we don't care about infinities. */
6870 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6873 /* sqrt(x) > c is the same as x > c*c. */
6874 return fold_build2_loc (loc, code, type, arg,
6875 build_real (TREE_TYPE (arg), c2));
6877 else if (code == LT_EXPR || code == LE_EXPR)
6881 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6882 real_convert (&c2, mode, &c2);
6884 if (REAL_VALUE_ISINF (c2))
6886 /* sqrt(x) < y is always true, when y is a very large
6887 value and we don't care about NaNs or Infinities. */
6888 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6889 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6891 /* sqrt(x) < y is x != +Inf when y is very large and we
6892 don't care about NaNs. */
6893 if (! HONOR_NANS (mode))
6894 return fold_build2_loc (loc, NE_EXPR, type, arg,
6895 build_real (TREE_TYPE (arg), c2));
6897 /* sqrt(x) < y is x >= 0 when y is very large and we
6898 don't care about Infinities. */
6899 if (! HONOR_INFINITIES (mode))
6900 return fold_build2_loc (loc, GE_EXPR, type, arg,
6901 build_real (TREE_TYPE (arg), dconst0));
6903 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6904 if (lang_hooks.decls.global_bindings_p () != 0
6905 || CONTAINS_PLACEHOLDER_P (arg))
6908 arg = save_expr (arg);
6909 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6910 fold_build2_loc (loc, GE_EXPR, type, arg,
6911 build_real (TREE_TYPE (arg),
6913 fold_build2_loc (loc, NE_EXPR, type, arg,
6914 build_real (TREE_TYPE (arg),
6918 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6919 if (! HONOR_NANS (mode))
6920 return fold_build2_loc (loc, code, type, arg,
6921 build_real (TREE_TYPE (arg), c2));
6923 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6924 if (lang_hooks.decls.global_bindings_p () == 0
6925 && ! CONTAINS_PLACEHOLDER_P (arg))
6927 arg = save_expr (arg);
6928 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6929 fold_build2_loc (loc, GE_EXPR, type, arg,
6930 build_real (TREE_TYPE (arg),
6932 fold_build2_loc (loc, code, type, arg,
6933 build_real (TREE_TYPE (arg),
6942 /* Subroutine of fold() that optimizes comparisons against Infinities,
6943 either +Inf or -Inf.
6945 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6946 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6947 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6949 The function returns the constant folded tree if a simplification
6950 can be made, and NULL_TREE otherwise. */
6953 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6954 tree arg0, tree arg1)
6956 enum machine_mode mode;
6957 REAL_VALUE_TYPE max;
6961 mode = TYPE_MODE (TREE_TYPE (arg0));
6963 /* For negative infinity swap the sense of the comparison. */
6964 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6966 code = swap_tree_comparison (code);
6971 /* x > +Inf is always false, if with ignore sNANs. */
6972 if (HONOR_SNANS (mode))
6974 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6977 /* x <= +Inf is always true, if we don't case about NaNs. */
6978 if (! HONOR_NANS (mode))
6979 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6981 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6982 if (lang_hooks.decls.global_bindings_p () == 0
6983 && ! CONTAINS_PLACEHOLDER_P (arg0))
6985 arg0 = save_expr (arg0);
6986 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6992 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6993 real_maxval (&max, neg, mode);
6994 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6995 arg0, build_real (TREE_TYPE (arg0), max));
6998 /* x < +Inf is always equal to x <= DBL_MAX. */
6999 real_maxval (&max, neg, mode);
7000 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7001 arg0, build_real (TREE_TYPE (arg0), max));
7004 /* x != +Inf is always equal to !(x > DBL_MAX). */
7005 real_maxval (&max, neg, mode);
7006 if (! HONOR_NANS (mode))
7007 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7008 arg0, build_real (TREE_TYPE (arg0), max));
7010 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7011 arg0, build_real (TREE_TYPE (arg0), max));
7012 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7021 /* Subroutine of fold() that optimizes comparisons of a division by
7022 a nonzero integer constant against an integer constant, i.e.
7025 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7026 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
7027 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
7029 The function returns the constant folded tree if a simplification
7030 can be made, and NULL_TREE otherwise. */
7033 fold_div_compare (location_t loc,
7034 enum tree_code code, tree type, tree arg0, tree arg1)
7036 tree prod, tmp, hi, lo;
7037 tree arg00 = TREE_OPERAND (arg0, 0);
7038 tree arg01 = TREE_OPERAND (arg0, 1);
7039 unsigned HOST_WIDE_INT lpart;
7040 HOST_WIDE_INT hpart;
7041 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7045 /* We have to do this the hard way to detect unsigned overflow.
7046 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
7047 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7048 TREE_INT_CST_HIGH (arg01),
7049 TREE_INT_CST_LOW (arg1),
7050 TREE_INT_CST_HIGH (arg1),
7051 &lpart, &hpart, unsigned_p);
7052 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7054 neg_overflow = false;
7058 tmp = int_const_binop (MINUS_EXPR, arg01,
7059 build_int_cst (TREE_TYPE (arg01), 1), 0);
7062 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
7063 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7064 TREE_INT_CST_HIGH (prod),
7065 TREE_INT_CST_LOW (tmp),
7066 TREE_INT_CST_HIGH (tmp),
7067 &lpart, &hpart, unsigned_p);
7068 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7069 -1, overflow | TREE_OVERFLOW (prod));
7071 else if (tree_int_cst_sgn (arg01) >= 0)
7073 tmp = int_const_binop (MINUS_EXPR, arg01,
7074 build_int_cst (TREE_TYPE (arg01), 1), 0);
7075 switch (tree_int_cst_sgn (arg1))
7078 neg_overflow = true;
7079 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7084 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7089 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7099 /* A negative divisor reverses the relational operators. */
7100 code = swap_tree_comparison (code);
7102 tmp = int_const_binop (PLUS_EXPR, arg01,
7103 build_int_cst (TREE_TYPE (arg01), 1), 0);
7104 switch (tree_int_cst_sgn (arg1))
7107 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7112 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7117 neg_overflow = true;
7118 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7130 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7131 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7132 if (TREE_OVERFLOW (hi))
7133 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7134 if (TREE_OVERFLOW (lo))
7135 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7136 return build_range_check (loc, type, arg00, 1, lo, hi);
7139 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7140 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7141 if (TREE_OVERFLOW (hi))
7142 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7143 if (TREE_OVERFLOW (lo))
7144 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7145 return build_range_check (loc, type, arg00, 0, lo, hi);
7148 if (TREE_OVERFLOW (lo))
7150 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7151 return omit_one_operand_loc (loc, type, tmp, arg00);
7153 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7156 if (TREE_OVERFLOW (hi))
7158 tmp = neg_overflow ? integer_zero_node : integer_one_node;
7159 return omit_one_operand_loc (loc, type, tmp, arg00);
7161 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7164 if (TREE_OVERFLOW (hi))
7166 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7167 return omit_one_operand_loc (loc, type, tmp, arg00);
7169 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7172 if (TREE_OVERFLOW (lo))
7174 tmp = neg_overflow ? integer_one_node : integer_zero_node;
7175 return omit_one_operand_loc (loc, type, tmp, arg00);
7177 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7187 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7188 equality/inequality test, then return a simplified form of the test
7189 using a sign testing. Otherwise return NULL. TYPE is the desired
7193 fold_single_bit_test_into_sign_test (location_t loc,
7194 enum tree_code code, tree arg0, tree arg1,
7197 /* If this is testing a single bit, we can optimize the test. */
7198 if ((code == NE_EXPR || code == EQ_EXPR)
7199 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7200 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7202 /* If we have (A & C) != 0 where C is the sign bit of A, convert
7203 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
7204 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7206 if (arg00 != NULL_TREE
7207 /* This is only a win if casting to a signed type is cheap,
7208 i.e. when arg00's type is not a partial mode. */
7209 && TYPE_PRECISION (TREE_TYPE (arg00))
7210 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7212 tree stype = signed_type_for (TREE_TYPE (arg00));
7213 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7215 fold_convert_loc (loc, stype, arg00),
7216 build_int_cst (stype, 0));
7223 /* If CODE with arguments ARG0 and ARG1 represents a single bit
7224 equality/inequality test, then return a simplified form of
7225 the test using shifts and logical operations. Otherwise return
7226 NULL. TYPE is the desired result type. */
7229 fold_single_bit_test (location_t loc, enum tree_code code,
7230 tree arg0, tree arg1, tree result_type)
7232 /* If this is testing a single bit, we can optimize the test. */
7233 if ((code == NE_EXPR || code == EQ_EXPR)
7234 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7235 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7237 tree inner = TREE_OPERAND (arg0, 0);
7238 tree type = TREE_TYPE (arg0);
7239 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7240 enum machine_mode operand_mode = TYPE_MODE (type);
7242 tree signed_type, unsigned_type, intermediate_type;
7245 /* First, see if we can fold the single bit test into a sign-bit
7247 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7252 /* Otherwise we have (A & C) != 0 where C is a single bit,
7253 convert that into ((A >> C2) & 1). Where C2 = log2(C).
7254 Similarly for (A & C) == 0. */
7256 /* If INNER is a right shift of a constant and it plus BITNUM does
7257 not overflow, adjust BITNUM and INNER. */
7258 if (TREE_CODE (inner) == RSHIFT_EXPR
7259 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7260 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7261 && bitnum < TYPE_PRECISION (type)
7262 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7263 bitnum - TYPE_PRECISION (type)))
7265 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7266 inner = TREE_OPERAND (inner, 0);
7269 /* If we are going to be able to omit the AND below, we must do our
7270 operations as unsigned. If we must use the AND, we have a choice.
7271 Normally unsigned is faster, but for some machines signed is. */
7272 #ifdef LOAD_EXTEND_OP
7273 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7274 && !flag_syntax_only) ? 0 : 1;
7279 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7280 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7281 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7282 inner = fold_convert_loc (loc, intermediate_type, inner);
7285 inner = build2 (RSHIFT_EXPR, intermediate_type,
7286 inner, size_int (bitnum));
7288 one = build_int_cst (intermediate_type, 1);
7290 if (code == EQ_EXPR)
7291 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7293 /* Put the AND last so it can combine with more things. */
7294 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7296 /* Make sure to return the proper type. */
7297 inner = fold_convert_loc (loc, result_type, inner);
7304 /* Check whether we are allowed to reorder operands arg0 and arg1,
7305 such that the evaluation of arg1 occurs before arg0. */
7308 reorder_operands_p (const_tree arg0, const_tree arg1)
7310 if (! flag_evaluation_order)
7312 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7314 return ! TREE_SIDE_EFFECTS (arg0)
7315 && ! TREE_SIDE_EFFECTS (arg1);
7318 /* Test whether it is preferable two swap two operands, ARG0 and
7319 ARG1, for example because ARG0 is an integer constant and ARG1
7320 isn't. If REORDER is true, only recommend swapping if we can
7321 evaluate the operands in reverse order. */
7324 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7326 STRIP_SIGN_NOPS (arg0);
7327 STRIP_SIGN_NOPS (arg1);
7329 if (TREE_CODE (arg1) == INTEGER_CST)
7331 if (TREE_CODE (arg0) == INTEGER_CST)
7334 if (TREE_CODE (arg1) == REAL_CST)
7336 if (TREE_CODE (arg0) == REAL_CST)
7339 if (TREE_CODE (arg1) == FIXED_CST)
7341 if (TREE_CODE (arg0) == FIXED_CST)
7344 if (TREE_CODE (arg1) == COMPLEX_CST)
7346 if (TREE_CODE (arg0) == COMPLEX_CST)
7349 if (TREE_CONSTANT (arg1))
7351 if (TREE_CONSTANT (arg0))
7354 if (optimize_function_for_size_p (cfun))
7357 if (reorder && flag_evaluation_order
7358 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7361 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7362 for commutative and comparison operators. Ensuring a canonical
7363 form allows the optimizers to find additional redundancies without
7364 having to explicitly check for both orderings. */
7365 if (TREE_CODE (arg0) == SSA_NAME
7366 && TREE_CODE (arg1) == SSA_NAME
7367 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7370 /* Put SSA_NAMEs last. */
7371 if (TREE_CODE (arg1) == SSA_NAME)
7373 if (TREE_CODE (arg0) == SSA_NAME)
7376 /* Put variables last. */
7385 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7386 ARG0 is extended to a wider type. */
7389 fold_widened_comparison (location_t loc, enum tree_code code,
7390 tree type, tree arg0, tree arg1)
7392 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7394 tree shorter_type, outer_type;
7398 if (arg0_unw == arg0)
7400 shorter_type = TREE_TYPE (arg0_unw);
7402 #ifdef HAVE_canonicalize_funcptr_for_compare
7403 /* Disable this optimization if we're casting a function pointer
7404 type on targets that require function pointer canonicalization. */
7405 if (HAVE_canonicalize_funcptr_for_compare
7406 && TREE_CODE (shorter_type) == POINTER_TYPE
7407 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7411 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7414 arg1_unw = get_unwidened (arg1, NULL_TREE);
7416 /* If possible, express the comparison in the shorter mode. */
7417 if ((code == EQ_EXPR || code == NE_EXPR
7418 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7419 && (TREE_TYPE (arg1_unw) == shorter_type
7420 || ((TYPE_PRECISION (shorter_type)
7421 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7422 && (TYPE_UNSIGNED (shorter_type)
7423 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7424 || (TREE_CODE (arg1_unw) == INTEGER_CST
7425 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7426 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7427 && int_fits_type_p (arg1_unw, shorter_type))))
7428 return fold_build2_loc (loc, code, type, arg0_unw,
7429 fold_convert_loc (loc, shorter_type, arg1_unw));
7431 if (TREE_CODE (arg1_unw) != INTEGER_CST
7432 || TREE_CODE (shorter_type) != INTEGER_TYPE
7433 || !int_fits_type_p (arg1_unw, shorter_type))
7436 /* If we are comparing with the integer that does not fit into the range
7437 of the shorter type, the result is known. */
7438 outer_type = TREE_TYPE (arg1_unw);
7439 min = lower_bound_in_type (outer_type, shorter_type);
7440 max = upper_bound_in_type (outer_type, shorter_type);
7442 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7444 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7451 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7456 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7462 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7464 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7471 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7480 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7481 ARG0 just the signedness is changed. */
7484 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7485 tree arg0, tree arg1)
7488 tree inner_type, outer_type;
7490 if (!CONVERT_EXPR_P (arg0))
7493 outer_type = TREE_TYPE (arg0);
7494 arg0_inner = TREE_OPERAND (arg0, 0);
7495 inner_type = TREE_TYPE (arg0_inner);
7497 #ifdef HAVE_canonicalize_funcptr_for_compare
7498 /* Disable this optimization if we're casting a function pointer
7499 type on targets that require function pointer canonicalization. */
7500 if (HAVE_canonicalize_funcptr_for_compare
7501 && TREE_CODE (inner_type) == POINTER_TYPE
7502 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7506 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7509 if (TREE_CODE (arg1) != INTEGER_CST
7510 && !(CONVERT_EXPR_P (arg1)
7511 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7514 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7515 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7520 if (TREE_CODE (arg1) == INTEGER_CST)
7521 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7522 TREE_INT_CST_HIGH (arg1), 0,
7523 TREE_OVERFLOW (arg1));
7525 arg1 = fold_convert_loc (loc, inner_type, arg1);
7527 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7530 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7531 step of the array. Reconstructs s and delta in the case of s *
7532 delta being an integer constant (and thus already folded). ADDR is
7533 the address. MULT is the multiplicative expression. If the
7534 function succeeds, the new address expression is returned.
7535 Otherwise NULL_TREE is returned. LOC is the location of the
7536 resulting expression. */
7539 try_move_mult_to_index (location_t loc, tree addr, tree op1)
7541 tree s, delta, step;
7542 tree ref = TREE_OPERAND (addr, 0), pref;
7547 /* Strip the nops that might be added when converting op1 to sizetype. */
7550 /* Canonicalize op1 into a possibly non-constant delta
7551 and an INTEGER_CST s. */
7552 if (TREE_CODE (op1) == MULT_EXPR)
7554 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7559 if (TREE_CODE (arg0) == INTEGER_CST)
7564 else if (TREE_CODE (arg1) == INTEGER_CST)
7572 else if (TREE_CODE (op1) == INTEGER_CST)
7579 /* Simulate we are delta * 1. */
7581 s = integer_one_node;
7584 for (;; ref = TREE_OPERAND (ref, 0))
7586 if (TREE_CODE (ref) == ARRAY_REF)
7590 /* Remember if this was a multi-dimensional array. */
7591 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7594 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7597 itype = TREE_TYPE (domain);
7599 step = array_ref_element_size (ref);
7600 if (TREE_CODE (step) != INTEGER_CST)
7605 if (! tree_int_cst_equal (step, s))
7610 /* Try if delta is a multiple of step. */
7611 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7617 /* Only fold here if we can verify we do not overflow one
7618 dimension of a multi-dimensional array. */
7623 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7624 || !TYPE_MAX_VALUE (domain)
7625 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7628 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7629 fold_convert_loc (loc, itype,
7630 TREE_OPERAND (ref, 1)),
7631 fold_convert_loc (loc, itype, delta));
7633 || TREE_CODE (tmp) != INTEGER_CST
7634 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7643 if (!handled_component_p (ref))
7647 /* We found the suitable array reference. So copy everything up to it,
7648 and replace the index. */
7650 pref = TREE_OPERAND (addr, 0);
7651 ret = copy_node (pref);
7652 SET_EXPR_LOCATION (ret, loc);
7657 pref = TREE_OPERAND (pref, 0);
7658 TREE_OPERAND (pos, 0) = copy_node (pref);
7659 pos = TREE_OPERAND (pos, 0);
7662 TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7663 fold_convert_loc (loc, itype,
7664 TREE_OPERAND (pos, 1)),
7665 fold_convert_loc (loc, itype, delta));
7667 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7671 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7672 means A >= Y && A != MAX, but in this case we know that
7673 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7676 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7678 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7680 if (TREE_CODE (bound) == LT_EXPR)
7681 a = TREE_OPERAND (bound, 0);
7682 else if (TREE_CODE (bound) == GT_EXPR)
7683 a = TREE_OPERAND (bound, 1);
7687 typea = TREE_TYPE (a);
7688 if (!INTEGRAL_TYPE_P (typea)
7689 && !POINTER_TYPE_P (typea))
7692 if (TREE_CODE (ineq) == LT_EXPR)
7694 a1 = TREE_OPERAND (ineq, 1);
7695 y = TREE_OPERAND (ineq, 0);
7697 else if (TREE_CODE (ineq) == GT_EXPR)
7699 a1 = TREE_OPERAND (ineq, 0);
7700 y = TREE_OPERAND (ineq, 1);
7705 if (TREE_TYPE (a1) != typea)
7708 if (POINTER_TYPE_P (typea))
7710 /* Convert the pointer types into integer before taking the difference. */
7711 tree ta = fold_convert_loc (loc, ssizetype, a);
7712 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7713 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7716 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7718 if (!diff || !integer_onep (diff))
7721 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7724 /* Fold a sum or difference of at least one multiplication.
7725 Returns the folded tree or NULL if no simplification could be made. */
7728 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7729 tree arg0, tree arg1)
7731 tree arg00, arg01, arg10, arg11;
7732 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7734 /* (A * C) +- (B * C) -> (A+-B) * C.
7735 (A * C) +- A -> A * (C+-1).
7736 We are most concerned about the case where C is a constant,
7737 but other combinations show up during loop reduction. Since
7738 it is not difficult, try all four possibilities. */
7740 if (TREE_CODE (arg0) == MULT_EXPR)
7742 arg00 = TREE_OPERAND (arg0, 0);
7743 arg01 = TREE_OPERAND (arg0, 1);
7745 else if (TREE_CODE (arg0) == INTEGER_CST)
7747 arg00 = build_one_cst (type);
7752 /* We cannot generate constant 1 for fract. */
7753 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7756 arg01 = build_one_cst (type);
7758 if (TREE_CODE (arg1) == MULT_EXPR)
7760 arg10 = TREE_OPERAND (arg1, 0);
7761 arg11 = TREE_OPERAND (arg1, 1);
7763 else if (TREE_CODE (arg1) == INTEGER_CST)
7765 arg10 = build_one_cst (type);
7766 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7767 the purpose of this canonicalization. */
7768 if (TREE_INT_CST_HIGH (arg1) == -1
7769 && negate_expr_p (arg1)
7770 && code == PLUS_EXPR)
7772 arg11 = negate_expr (arg1);
7780 /* We cannot generate constant 1 for fract. */
7781 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7784 arg11 = build_one_cst (type);
7788 if (operand_equal_p (arg01, arg11, 0))
7789 same = arg01, alt0 = arg00, alt1 = arg10;
7790 else if (operand_equal_p (arg00, arg10, 0))
7791 same = arg00, alt0 = arg01, alt1 = arg11;
7792 else if (operand_equal_p (arg00, arg11, 0))
7793 same = arg00, alt0 = arg01, alt1 = arg10;
7794 else if (operand_equal_p (arg01, arg10, 0))
7795 same = arg01, alt0 = arg00, alt1 = arg11;
7797 /* No identical multiplicands; see if we can find a common
7798 power-of-two factor in non-power-of-two multiplies. This
7799 can help in multi-dimensional array access. */
7800 else if (host_integerp (arg01, 0)
7801 && host_integerp (arg11, 0))
7803 HOST_WIDE_INT int01, int11, tmp;
7806 int01 = TREE_INT_CST_LOW (arg01);
7807 int11 = TREE_INT_CST_LOW (arg11);
7809 /* Move min of absolute values to int11. */
7810 if ((int01 >= 0 ? int01 : -int01)
7811 < (int11 >= 0 ? int11 : -int11))
7813 tmp = int01, int01 = int11, int11 = tmp;
7814 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7821 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7822 /* The remainder should not be a constant, otherwise we
7823 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7824 increased the number of multiplications necessary. */
7825 && TREE_CODE (arg10) != INTEGER_CST)
7827 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7828 build_int_cst (TREE_TYPE (arg00),
7833 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7838 return fold_build2_loc (loc, MULT_EXPR, type,
7839 fold_build2_loc (loc, code, type,
7840 fold_convert_loc (loc, type, alt0),
7841 fold_convert_loc (loc, type, alt1)),
7842 fold_convert_loc (loc, type, same));
7847 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7848 specified by EXPR into the buffer PTR of length LEN bytes.
7849 Return the number of bytes placed in the buffer, or zero
7853 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7855 tree type = TREE_TYPE (expr);
7856 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7857 int byte, offset, word, words;
7858 unsigned char value;
7860 if (total_bytes > len)
7862 words = total_bytes / UNITS_PER_WORD;
7864 for (byte = 0; byte < total_bytes; byte++)
7866 int bitpos = byte * BITS_PER_UNIT;
7867 if (bitpos < HOST_BITS_PER_WIDE_INT)
7868 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7870 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7871 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7873 if (total_bytes > UNITS_PER_WORD)
7875 word = byte / UNITS_PER_WORD;
7876 if (WORDS_BIG_ENDIAN)
7877 word = (words - 1) - word;
7878 offset = word * UNITS_PER_WORD;
7879 if (BYTES_BIG_ENDIAN)
7880 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7882 offset += byte % UNITS_PER_WORD;
7885 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7886 ptr[offset] = value;
7892 /* Subroutine of native_encode_expr. Encode the REAL_CST
7893 specified by EXPR into the buffer PTR of length LEN bytes.
7894 Return the number of bytes placed in the buffer, or zero
7898 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7900 tree type = TREE_TYPE (expr);
7901 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7902 int byte, offset, word, words, bitpos;
7903 unsigned char value;
7905 /* There are always 32 bits in each long, no matter the size of
7906 the hosts long. We handle floating point representations with
7910 if (total_bytes > len)
7912 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7914 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7916 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7917 bitpos += BITS_PER_UNIT)
7919 byte = (bitpos / BITS_PER_UNIT) & 3;
7920 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7922 if (UNITS_PER_WORD < 4)
7924 word = byte / UNITS_PER_WORD;
7925 if (WORDS_BIG_ENDIAN)
7926 word = (words - 1) - word;
7927 offset = word * UNITS_PER_WORD;
7928 if (BYTES_BIG_ENDIAN)
7929 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7931 offset += byte % UNITS_PER_WORD;
7934 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7935 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7940 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7941 specified by EXPR into the buffer PTR of length LEN bytes.
7942 Return the number of bytes placed in the buffer, or zero
7946 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7951 part = TREE_REALPART (expr);
7952 rsize = native_encode_expr (part, ptr, len);
7955 part = TREE_IMAGPART (expr);
7956 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7959 return rsize + isize;
7963 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7964 specified by EXPR into the buffer PTR of length LEN bytes.
7965 Return the number of bytes placed in the buffer, or zero
7969 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7971 int i, size, offset, count;
7972 tree itype, elem, elements;
7975 elements = TREE_VECTOR_CST_ELTS (expr);
7976 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7977 itype = TREE_TYPE (TREE_TYPE (expr));
7978 size = GET_MODE_SIZE (TYPE_MODE (itype));
7979 for (i = 0; i < count; i++)
7983 elem = TREE_VALUE (elements);
7984 elements = TREE_CHAIN (elements);
7991 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7996 if (offset + size > len)
7998 memset (ptr+offset, 0, size);
8006 /* Subroutine of native_encode_expr. Encode the STRING_CST
8007 specified by EXPR into the buffer PTR of length LEN bytes.
8008 Return the number of bytes placed in the buffer, or zero
8012 native_encode_string (const_tree expr, unsigned char *ptr, int len)
8014 tree type = TREE_TYPE (expr);
8015 HOST_WIDE_INT total_bytes;
8017 if (TREE_CODE (type) != ARRAY_TYPE
8018 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8019 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8020 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8022 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8023 if (total_bytes > len)
8025 if (TREE_STRING_LENGTH (expr) < total_bytes)
8027 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8028 memset (ptr + TREE_STRING_LENGTH (expr), 0,
8029 total_bytes - TREE_STRING_LENGTH (expr));
8032 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8037 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
8038 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8039 buffer PTR of length LEN bytes. Return the number of bytes
8040 placed in the buffer, or zero upon failure. */
8043 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8045 switch (TREE_CODE (expr))
8048 return native_encode_int (expr, ptr, len);
8051 return native_encode_real (expr, ptr, len);
8054 return native_encode_complex (expr, ptr, len);
8057 return native_encode_vector (expr, ptr, len);
8060 return native_encode_string (expr, ptr, len);
8068 /* Subroutine of native_interpret_expr. Interpret the contents of
8069 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8070 If the buffer cannot be interpreted, return NULL_TREE. */
8073 native_interpret_int (tree type, const unsigned char *ptr, int len)
8075 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8076 int byte, offset, word, words;
8077 unsigned char value;
8078 unsigned int HOST_WIDE_INT lo = 0;
8079 HOST_WIDE_INT hi = 0;
8081 if (total_bytes > len)
8083 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8085 words = total_bytes / UNITS_PER_WORD;
8087 for (byte = 0; byte < total_bytes; byte++)
8089 int bitpos = byte * BITS_PER_UNIT;
8090 if (total_bytes > UNITS_PER_WORD)
8092 word = byte / UNITS_PER_WORD;
8093 if (WORDS_BIG_ENDIAN)
8094 word = (words - 1) - word;
8095 offset = word * UNITS_PER_WORD;
8096 if (BYTES_BIG_ENDIAN)
8097 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8099 offset += byte % UNITS_PER_WORD;
8102 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8103 value = ptr[offset];
8105 if (bitpos < HOST_BITS_PER_WIDE_INT)
8106 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8108 hi |= (unsigned HOST_WIDE_INT) value
8109 << (bitpos - HOST_BITS_PER_WIDE_INT);
8112 return build_int_cst_wide_type (type, lo, hi);
8116 /* Subroutine of native_interpret_expr. Interpret the contents of
8117 the buffer PTR of length LEN as a REAL_CST of type TYPE.
8118 If the buffer cannot be interpreted, return NULL_TREE. */
8121 native_interpret_real (tree type, const unsigned char *ptr, int len)
8123 enum machine_mode mode = TYPE_MODE (type);
8124 int total_bytes = GET_MODE_SIZE (mode);
8125 int byte, offset, word, words, bitpos;
8126 unsigned char value;
8127 /* There are always 32 bits in each long, no matter the size of
8128 the hosts long. We handle floating point representations with
8133 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8134 if (total_bytes > len || total_bytes > 24)
8136 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8138 memset (tmp, 0, sizeof (tmp));
8139 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8140 bitpos += BITS_PER_UNIT)
8142 byte = (bitpos / BITS_PER_UNIT) & 3;
8143 if (UNITS_PER_WORD < 4)
8145 word = byte / UNITS_PER_WORD;
8146 if (WORDS_BIG_ENDIAN)
8147 word = (words - 1) - word;
8148 offset = word * UNITS_PER_WORD;
8149 if (BYTES_BIG_ENDIAN)
8150 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8152 offset += byte % UNITS_PER_WORD;
8155 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8156 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8158 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8161 real_from_target (&r, tmp, mode);
8162 return build_real (type, r);
8166 /* Subroutine of native_interpret_expr. Interpret the contents of
8167 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8168 If the buffer cannot be interpreted, return NULL_TREE. */
8171 native_interpret_complex (tree type, const unsigned char *ptr, int len)
8173 tree etype, rpart, ipart;
8176 etype = TREE_TYPE (type);
8177 size = GET_MODE_SIZE (TYPE_MODE (etype));
8180 rpart = native_interpret_expr (etype, ptr, size);
8183 ipart = native_interpret_expr (etype, ptr+size, size);
8186 return build_complex (type, rpart, ipart);
8190 /* Subroutine of native_interpret_expr. Interpret the contents of
8191 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8192 If the buffer cannot be interpreted, return NULL_TREE. */
8195 native_interpret_vector (tree type, const unsigned char *ptr, int len)
8197 tree etype, elem, elements;
8200 etype = TREE_TYPE (type);
8201 size = GET_MODE_SIZE (TYPE_MODE (etype));
8202 count = TYPE_VECTOR_SUBPARTS (type);
8203 if (size * count > len)
8206 elements = NULL_TREE;
8207 for (i = count - 1; i >= 0; i--)
8209 elem = native_interpret_expr (etype, ptr+(i*size), size);
8212 elements = tree_cons (NULL_TREE, elem, elements);
8214 return build_vector (type, elements);
8218 /* Subroutine of fold_view_convert_expr. Interpret the contents of
8219 the buffer PTR of length LEN as a constant of type TYPE. For
8220 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8221 we return a REAL_CST, etc... If the buffer cannot be interpreted,
8222 return NULL_TREE. */
8225 native_interpret_expr (tree type, const unsigned char *ptr, int len)
8227 switch (TREE_CODE (type))
8232 return native_interpret_int (type, ptr, len);
8235 return native_interpret_real (type, ptr, len);
8238 return native_interpret_complex (type, ptr, len);
8241 return native_interpret_vector (type, ptr, len);
8249 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8250 TYPE at compile-time. If we're unable to perform the conversion
8251 return NULL_TREE. */
8254 fold_view_convert_expr (tree type, tree expr)
8256 /* We support up to 512-bit values (for V8DFmode). */
8257 unsigned char buffer[64];
8260 /* Check that the host and target are sane. */
8261 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8264 len = native_encode_expr (expr, buffer, sizeof (buffer));
8268 return native_interpret_expr (type, buffer, len);
8271 /* Build an expression for the address of T. Folds away INDIRECT_REF
8272 to avoid confusing the gimplify process. */
8275 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8277 /* The size of the object is not relevant when talking about its address. */
8278 if (TREE_CODE (t) == WITH_SIZE_EXPR)
8279 t = TREE_OPERAND (t, 0);
8281 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8282 if (TREE_CODE (t) == INDIRECT_REF
8283 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8285 t = TREE_OPERAND (t, 0);
8287 if (TREE_TYPE (t) != ptrtype)
8289 t = build1 (NOP_EXPR, ptrtype, t);
8290 SET_EXPR_LOCATION (t, loc);
8293 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8295 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8297 if (TREE_TYPE (t) != ptrtype)
8298 t = fold_convert_loc (loc, ptrtype, t);
8302 t = build1 (ADDR_EXPR, ptrtype, t);
8303 SET_EXPR_LOCATION (t, loc);
8309 /* Build an expression for the address of T. */
8312 build_fold_addr_expr_loc (location_t loc, tree t)
8314 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8316 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8319 /* Fold a unary expression of code CODE and type TYPE with operand
8320 OP0. Return the folded expression if folding is successful.
8321 Otherwise, return NULL_TREE. */
8324 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8328 enum tree_code_class kind = TREE_CODE_CLASS (code);
8330 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8331 && TREE_CODE_LENGTH (code) == 1);
8336 if (CONVERT_EXPR_CODE_P (code)
8337 || code == FLOAT_EXPR || code == ABS_EXPR)
8339 /* Don't use STRIP_NOPS, because signedness of argument type
8341 STRIP_SIGN_NOPS (arg0);
8345 /* Strip any conversions that don't change the mode. This
8346 is safe for every expression, except for a comparison
8347 expression because its signedness is derived from its
8350 Note that this is done as an internal manipulation within
8351 the constant folder, in order to find the simplest
8352 representation of the arguments so that their form can be
8353 studied. In any cases, the appropriate type conversions
8354 should be put back in the tree that will get out of the
8360 if (TREE_CODE_CLASS (code) == tcc_unary)
8362 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8363 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8364 fold_build1_loc (loc, code, type,
8365 fold_convert_loc (loc, TREE_TYPE (op0),
8366 TREE_OPERAND (arg0, 1))));
8367 else if (TREE_CODE (arg0) == COND_EXPR)
8369 tree arg01 = TREE_OPERAND (arg0, 1);
8370 tree arg02 = TREE_OPERAND (arg0, 2);
8371 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8372 arg01 = fold_build1_loc (loc, code, type,
8373 fold_convert_loc (loc,
8374 TREE_TYPE (op0), arg01));
8375 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8376 arg02 = fold_build1_loc (loc, code, type,
8377 fold_convert_loc (loc,
8378 TREE_TYPE (op0), arg02));
8379 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8382 /* If this was a conversion, and all we did was to move into
8383 inside the COND_EXPR, bring it back out. But leave it if
8384 it is a conversion from integer to integer and the
8385 result precision is no wider than a word since such a
8386 conversion is cheap and may be optimized away by combine,
8387 while it couldn't if it were outside the COND_EXPR. Then return
8388 so we don't get into an infinite recursion loop taking the
8389 conversion out and then back in. */
8391 if ((CONVERT_EXPR_CODE_P (code)
8392 || code == NON_LVALUE_EXPR)
8393 && TREE_CODE (tem) == COND_EXPR
8394 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8395 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8396 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8397 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8398 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8399 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8400 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8402 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8403 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8404 || flag_syntax_only))
8406 tem = build1 (code, type,
8408 TREE_TYPE (TREE_OPERAND
8409 (TREE_OPERAND (tem, 1), 0)),
8410 TREE_OPERAND (tem, 0),
8411 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8412 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8413 SET_EXPR_LOCATION (tem, loc);
8417 else if (COMPARISON_CLASS_P (arg0))
8419 if (TREE_CODE (type) == BOOLEAN_TYPE)
8421 arg0 = copy_node (arg0);
8422 TREE_TYPE (arg0) = type;
8425 else if (TREE_CODE (type) != INTEGER_TYPE)
8426 return fold_build3_loc (loc, COND_EXPR, type, arg0,
8427 fold_build1_loc (loc, code, type,
8429 fold_build1_loc (loc, code, type,
8430 integer_zero_node));
8437 /* Re-association barriers around constants and other re-association
8438 barriers can be removed. */
8439 if (CONSTANT_CLASS_P (op0)
8440 || TREE_CODE (op0) == PAREN_EXPR)
8441 return fold_convert_loc (loc, type, op0);
8446 case FIX_TRUNC_EXPR:
8447 if (TREE_TYPE (op0) == type)
8450 /* If we have (type) (a CMP b) and type is an integral type, return
8451 new expression involving the new type. */
8452 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8453 return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8454 TREE_OPERAND (op0, 1));
8456 /* Handle cases of two conversions in a row. */
8457 if (CONVERT_EXPR_P (op0))
8459 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8460 tree inter_type = TREE_TYPE (op0);
8461 int inside_int = INTEGRAL_TYPE_P (inside_type);
8462 int inside_ptr = POINTER_TYPE_P (inside_type);
8463 int inside_float = FLOAT_TYPE_P (inside_type);
8464 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8465 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8466 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8467 int inter_int = INTEGRAL_TYPE_P (inter_type);
8468 int inter_ptr = POINTER_TYPE_P (inter_type);
8469 int inter_float = FLOAT_TYPE_P (inter_type);
8470 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8471 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8472 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8473 int final_int = INTEGRAL_TYPE_P (type);
8474 int final_ptr = POINTER_TYPE_P (type);
8475 int final_float = FLOAT_TYPE_P (type);
8476 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8477 unsigned int final_prec = TYPE_PRECISION (type);
8478 int final_unsignedp = TYPE_UNSIGNED (type);
8480 /* In addition to the cases of two conversions in a row
8481 handled below, if we are converting something to its own
8482 type via an object of identical or wider precision, neither
8483 conversion is needed. */
8484 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8485 && (((inter_int || inter_ptr) && final_int)
8486 || (inter_float && final_float))
8487 && inter_prec >= final_prec)
8488 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8490 /* Likewise, if the intermediate and initial types are either both
8491 float or both integer, we don't need the middle conversion if the
8492 former is wider than the latter and doesn't change the signedness
8493 (for integers). Avoid this if the final type is a pointer since
8494 then we sometimes need the middle conversion. Likewise if the
8495 final type has a precision not equal to the size of its mode. */
8496 if (((inter_int && inside_int)
8497 || (inter_float && inside_float)
8498 || (inter_vec && inside_vec))
8499 && inter_prec >= inside_prec
8500 && (inter_float || inter_vec
8501 || inter_unsignedp == inside_unsignedp)
8502 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8503 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8505 && (! final_vec || inter_prec == inside_prec))
8506 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8508 /* If we have a sign-extension of a zero-extended value, we can
8509 replace that by a single zero-extension. */
8510 if (inside_int && inter_int && final_int
8511 && inside_prec < inter_prec && inter_prec < final_prec
8512 && inside_unsignedp && !inter_unsignedp)
8513 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8515 /* Two conversions in a row are not needed unless:
8516 - some conversion is floating-point (overstrict for now), or
8517 - some conversion is a vector (overstrict for now), or
8518 - the intermediate type is narrower than both initial and
8520 - the intermediate type and innermost type differ in signedness,
8521 and the outermost type is wider than the intermediate, or
8522 - the initial type is a pointer type and the precisions of the
8523 intermediate and final types differ, or
8524 - the final type is a pointer type and the precisions of the
8525 initial and intermediate types differ. */
8526 if (! inside_float && ! inter_float && ! final_float
8527 && ! inside_vec && ! inter_vec && ! final_vec
8528 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8529 && ! (inside_int && inter_int
8530 && inter_unsignedp != inside_unsignedp
8531 && inter_prec < final_prec)
8532 && ((inter_unsignedp && inter_prec > inside_prec)
8533 == (final_unsignedp && final_prec > inter_prec))
8534 && ! (inside_ptr && inter_prec != final_prec)
8535 && ! (final_ptr && inside_prec != inter_prec)
8536 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8537 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8538 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8541 /* Handle (T *)&A.B.C for A being of type T and B and C
8542 living at offset zero. This occurs frequently in
8543 C++ upcasting and then accessing the base. */
8544 if (TREE_CODE (op0) == ADDR_EXPR
8545 && POINTER_TYPE_P (type)
8546 && handled_component_p (TREE_OPERAND (op0, 0)))
8548 HOST_WIDE_INT bitsize, bitpos;
8550 enum machine_mode mode;
8551 int unsignedp, volatilep;
8552 tree base = TREE_OPERAND (op0, 0);
8553 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8554 &mode, &unsignedp, &volatilep, false);
8555 /* If the reference was to a (constant) zero offset, we can use
8556 the address of the base if it has the same base type
8557 as the result type. */
8558 if (! offset && bitpos == 0
8559 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8560 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8561 return fold_convert_loc (loc, type,
8562 build_fold_addr_expr_loc (loc, base));
8565 if (TREE_CODE (op0) == MODIFY_EXPR
8566 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8567 /* Detect assigning a bitfield. */
8568 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8570 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8572 /* Don't leave an assignment inside a conversion
8573 unless assigning a bitfield. */
8574 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8575 /* First do the assignment, then return converted constant. */
8576 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8577 TREE_NO_WARNING (tem) = 1;
8578 TREE_USED (tem) = 1;
8579 SET_EXPR_LOCATION (tem, loc);
8583 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8584 constants (if x has signed type, the sign bit cannot be set
8585 in c). This folds extension into the BIT_AND_EXPR.
8586 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8587 very likely don't have maximal range for their precision and this
8588 transformation effectively doesn't preserve non-maximal ranges. */
8589 if (TREE_CODE (type) == INTEGER_TYPE
8590 && TREE_CODE (op0) == BIT_AND_EXPR
8591 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8593 tree and_expr = op0;
8594 tree and0 = TREE_OPERAND (and_expr, 0);
8595 tree and1 = TREE_OPERAND (and_expr, 1);
8598 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8599 || (TYPE_PRECISION (type)
8600 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8602 else if (TYPE_PRECISION (TREE_TYPE (and1))
8603 <= HOST_BITS_PER_WIDE_INT
8604 && host_integerp (and1, 1))
8606 unsigned HOST_WIDE_INT cst;
8608 cst = tree_low_cst (and1, 1);
8609 cst &= (HOST_WIDE_INT) -1
8610 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8611 change = (cst == 0);
8612 #ifdef LOAD_EXTEND_OP
8614 && !flag_syntax_only
8615 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8618 tree uns = unsigned_type_for (TREE_TYPE (and0));
8619 and0 = fold_convert_loc (loc, uns, and0);
8620 and1 = fold_convert_loc (loc, uns, and1);
8626 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8627 TREE_INT_CST_HIGH (and1), 0,
8628 TREE_OVERFLOW (and1));
8629 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8630 fold_convert_loc (loc, type, and0), tem);
8634 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8635 when one of the new casts will fold away. Conservatively we assume
8636 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8637 if (POINTER_TYPE_P (type)
8638 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8639 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8640 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8641 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8643 tree arg00 = TREE_OPERAND (arg0, 0);
8644 tree arg01 = TREE_OPERAND (arg0, 1);
8646 return fold_build2_loc (loc,
8647 TREE_CODE (arg0), type,
8648 fold_convert_loc (loc, type, arg00),
8649 fold_convert_loc (loc, sizetype, arg01));
8652 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8653 of the same precision, and X is an integer type not narrower than
8654 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8655 if (INTEGRAL_TYPE_P (type)
8656 && TREE_CODE (op0) == BIT_NOT_EXPR
8657 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8658 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8659 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8661 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8662 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8663 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8664 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8665 fold_convert_loc (loc, type, tem));
8668 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8669 type of X and Y (integer types only). */
8670 if (INTEGRAL_TYPE_P (type)
8671 && TREE_CODE (op0) == MULT_EXPR
8672 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8673 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8675 /* Be careful not to introduce new overflows. */
8677 if (TYPE_OVERFLOW_WRAPS (type))
8680 mult_type = unsigned_type_for (type);
8682 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8684 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8685 fold_convert_loc (loc, mult_type,
8686 TREE_OPERAND (op0, 0)),
8687 fold_convert_loc (loc, mult_type,
8688 TREE_OPERAND (op0, 1)));
8689 return fold_convert_loc (loc, type, tem);
8693 tem = fold_convert_const (code, type, op0);
8694 return tem ? tem : NULL_TREE;
8696 case ADDR_SPACE_CONVERT_EXPR:
8697 if (integer_zerop (arg0))
8698 return fold_convert_const (code, type, arg0);
8701 case FIXED_CONVERT_EXPR:
8702 tem = fold_convert_const (code, type, arg0);
8703 return tem ? tem : NULL_TREE;
8705 case VIEW_CONVERT_EXPR:
8706 if (TREE_TYPE (op0) == type)
8708 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8709 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8710 type, TREE_OPERAND (op0, 0));
8712 /* For integral conversions with the same precision or pointer
8713 conversions use a NOP_EXPR instead. */
8714 if ((INTEGRAL_TYPE_P (type)
8715 || POINTER_TYPE_P (type))
8716 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8717 || POINTER_TYPE_P (TREE_TYPE (op0)))
8718 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8719 return fold_convert_loc (loc, type, op0);
8721 /* Strip inner integral conversions that do not change the precision. */
8722 if (CONVERT_EXPR_P (op0)
8723 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8724 || POINTER_TYPE_P (TREE_TYPE (op0)))
8725 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8726 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8727 && (TYPE_PRECISION (TREE_TYPE (op0))
8728 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8729 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8730 type, TREE_OPERAND (op0, 0));
8732 return fold_view_convert_expr (type, op0);
8735 tem = fold_negate_expr (loc, arg0);
8737 return fold_convert_loc (loc, type, tem);
8741 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8742 return fold_abs_const (arg0, type);
8743 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8744 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8745 /* Convert fabs((double)float) into (double)fabsf(float). */
8746 else if (TREE_CODE (arg0) == NOP_EXPR
8747 && TREE_CODE (type) == REAL_TYPE)
8749 tree targ0 = strip_float_extensions (arg0);
8751 return fold_convert_loc (loc, type,
8752 fold_build1_loc (loc, ABS_EXPR,
8756 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8757 else if (TREE_CODE (arg0) == ABS_EXPR)
8759 else if (tree_expr_nonnegative_p (arg0))
8762 /* Strip sign ops from argument. */
8763 if (TREE_CODE (type) == REAL_TYPE)
8765 tem = fold_strip_sign_ops (arg0);
8767 return fold_build1_loc (loc, ABS_EXPR, type,
8768 fold_convert_loc (loc, type, tem));
8773 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8774 return fold_convert_loc (loc, type, arg0);
8775 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8777 tree itype = TREE_TYPE (type);
8778 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8779 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8780 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8781 negate_expr (ipart));
8783 if (TREE_CODE (arg0) == COMPLEX_CST)
8785 tree itype = TREE_TYPE (type);
8786 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8787 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8788 return build_complex (type, rpart, negate_expr (ipart));
8790 if (TREE_CODE (arg0) == CONJ_EXPR)
8791 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8795 if (TREE_CODE (arg0) == INTEGER_CST)
8796 return fold_not_const (arg0, type);
8797 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8798 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8799 /* Convert ~ (-A) to A - 1. */
8800 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8801 return fold_build2_loc (loc, MINUS_EXPR, type,
8802 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8803 build_int_cst (type, 1));
8804 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8805 else if (INTEGRAL_TYPE_P (type)
8806 && ((TREE_CODE (arg0) == MINUS_EXPR
8807 && integer_onep (TREE_OPERAND (arg0, 1)))
8808 || (TREE_CODE (arg0) == PLUS_EXPR
8809 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8810 return fold_build1_loc (loc, NEGATE_EXPR, type,
8811 fold_convert_loc (loc, type,
8812 TREE_OPERAND (arg0, 0)));
8813 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8814 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8815 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8816 fold_convert_loc (loc, type,
8817 TREE_OPERAND (arg0, 0)))))
8818 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8819 fold_convert_loc (loc, type,
8820 TREE_OPERAND (arg0, 1)));
8821 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8822 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8823 fold_convert_loc (loc, type,
8824 TREE_OPERAND (arg0, 1)))))
8825 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8826 fold_convert_loc (loc, type,
8827 TREE_OPERAND (arg0, 0)), tem);
8828 /* Perform BIT_NOT_EXPR on each element individually. */
8829 else if (TREE_CODE (arg0) == VECTOR_CST)
8831 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8832 int count = TYPE_VECTOR_SUBPARTS (type), i;
8834 for (i = 0; i < count; i++)
8838 elem = TREE_VALUE (elements);
8839 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8840 if (elem == NULL_TREE)
8842 elements = TREE_CHAIN (elements);
8845 elem = build_int_cst (TREE_TYPE (type), -1);
8846 list = tree_cons (NULL_TREE, elem, list);
8849 return build_vector (type, nreverse (list));
8854 case TRUTH_NOT_EXPR:
8855 /* The argument to invert_truthvalue must have Boolean type. */
8856 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8857 arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8859 /* Note that the operand of this must be an int
8860 and its values must be 0 or 1.
8861 ("true" is a fixed value perhaps depending on the language,
8862 but we don't handle values other than 1 correctly yet.) */
8863 tem = fold_truth_not_expr (loc, arg0);
8866 return fold_convert_loc (loc, type, tem);
8869 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8870 return fold_convert_loc (loc, type, arg0);
8871 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8872 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8873 TREE_OPERAND (arg0, 1));
8874 if (TREE_CODE (arg0) == COMPLEX_CST)
8875 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8876 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8878 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8879 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8880 fold_build1_loc (loc, REALPART_EXPR, itype,
8881 TREE_OPERAND (arg0, 0)),
8882 fold_build1_loc (loc, REALPART_EXPR, itype,
8883 TREE_OPERAND (arg0, 1)));
8884 return fold_convert_loc (loc, type, tem);
8886 if (TREE_CODE (arg0) == CONJ_EXPR)
8888 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8889 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8890 TREE_OPERAND (arg0, 0));
8891 return fold_convert_loc (loc, type, tem);
8893 if (TREE_CODE (arg0) == CALL_EXPR)
8895 tree fn = get_callee_fndecl (arg0);
8896 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8897 switch (DECL_FUNCTION_CODE (fn))
8899 CASE_FLT_FN (BUILT_IN_CEXPI):
8900 fn = mathfn_built_in (type, BUILT_IN_COS);
8902 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8912 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8913 return fold_convert_loc (loc, type, integer_zero_node);
8914 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8915 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8916 TREE_OPERAND (arg0, 0));
8917 if (TREE_CODE (arg0) == COMPLEX_CST)
8918 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8919 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8921 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8922 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8923 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8924 TREE_OPERAND (arg0, 0)),
8925 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8926 TREE_OPERAND (arg0, 1)));
8927 return fold_convert_loc (loc, type, tem);
8929 if (TREE_CODE (arg0) == CONJ_EXPR)
8931 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8932 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8933 return fold_convert_loc (loc, type, negate_expr (tem));
8935 if (TREE_CODE (arg0) == CALL_EXPR)
8937 tree fn = get_callee_fndecl (arg0);
8938 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8939 switch (DECL_FUNCTION_CODE (fn))
8941 CASE_FLT_FN (BUILT_IN_CEXPI):
8942 fn = mathfn_built_in (type, BUILT_IN_SIN);
8944 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8954 /* Fold *&X to X if X is an lvalue. */
8955 if (TREE_CODE (op0) == ADDR_EXPR)
8957 tree op00 = TREE_OPERAND (op0, 0);
8958 if ((TREE_CODE (op00) == VAR_DECL
8959 || TREE_CODE (op00) == PARM_DECL
8960 || TREE_CODE (op00) == RESULT_DECL)
8961 && !TREE_READONLY (op00))
8968 } /* switch (code) */
8972 /* If the operation was a conversion do _not_ mark a resulting constant
8973 with TREE_OVERFLOW if the original constant was not. These conversions
8974 have implementation defined behavior and retaining the TREE_OVERFLOW
8975 flag here would confuse later passes such as VRP. */
8977 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8978 tree type, tree op0)
8980 tree res = fold_unary_loc (loc, code, type, op0);
8982 && TREE_CODE (res) == INTEGER_CST
8983 && TREE_CODE (op0) == INTEGER_CST
8984 && CONVERT_EXPR_CODE_P (code))
8985 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8990 /* Fold a binary expression of code CODE and type TYPE with operands
8991 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8992 Return the folded expression if folding is successful. Otherwise,
8993 return NULL_TREE. */
8996 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8998 enum tree_code compl_code;
9000 if (code == MIN_EXPR)
9001 compl_code = MAX_EXPR;
9002 else if (code == MAX_EXPR)
9003 compl_code = MIN_EXPR;
9007 /* MIN (MAX (a, b), b) == b. */
9008 if (TREE_CODE (op0) == compl_code
9009 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9010 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9012 /* MIN (MAX (b, a), b) == b. */
9013 if (TREE_CODE (op0) == compl_code
9014 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9015 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9016 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9018 /* MIN (a, MAX (a, b)) == a. */
9019 if (TREE_CODE (op1) == compl_code
9020 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9021 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9022 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9024 /* MIN (a, MAX (b, a)) == a. */
9025 if (TREE_CODE (op1) == compl_code
9026 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9027 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9028 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9033 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9034 by changing CODE to reduce the magnitude of constants involved in
9035 ARG0 of the comparison.
9036 Returns a canonicalized comparison tree if a simplification was
9037 possible, otherwise returns NULL_TREE.
9038 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9039 valid if signed overflow is undefined. */
9042 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9043 tree arg0, tree arg1,
9044 bool *strict_overflow_p)
9046 enum tree_code code0 = TREE_CODE (arg0);
9047 tree t, cst0 = NULL_TREE;
9051 /* Match A +- CST code arg1 and CST code arg1. We can change the
9052 first form only if overflow is undefined. */
9053 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9054 /* In principle pointers also have undefined overflow behavior,
9055 but that causes problems elsewhere. */
9056 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9057 && (code0 == MINUS_EXPR
9058 || code0 == PLUS_EXPR)
9059 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9060 || code0 == INTEGER_CST))
9063 /* Identify the constant in arg0 and its sign. */
9064 if (code0 == INTEGER_CST)
9067 cst0 = TREE_OPERAND (arg0, 1);
9068 sgn0 = tree_int_cst_sgn (cst0);
9070 /* Overflowed constants and zero will cause problems. */
9071 if (integer_zerop (cst0)
9072 || TREE_OVERFLOW (cst0))
9075 /* See if we can reduce the magnitude of the constant in
9076 arg0 by changing the comparison code. */
9077 if (code0 == INTEGER_CST)
9079 /* CST <= arg1 -> CST-1 < arg1. */
9080 if (code == LE_EXPR && sgn0 == 1)
9082 /* -CST < arg1 -> -CST-1 <= arg1. */
9083 else if (code == LT_EXPR && sgn0 == -1)
9085 /* CST > arg1 -> CST-1 >= arg1. */
9086 else if (code == GT_EXPR && sgn0 == 1)
9088 /* -CST >= arg1 -> -CST-1 > arg1. */
9089 else if (code == GE_EXPR && sgn0 == -1)
9093 /* arg1 code' CST' might be more canonical. */
9098 /* A - CST < arg1 -> A - CST-1 <= arg1. */
9100 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9102 /* A + CST > arg1 -> A + CST-1 >= arg1. */
9103 else if (code == GT_EXPR
9104 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9106 /* A + CST <= arg1 -> A + CST-1 < arg1. */
9107 else if (code == LE_EXPR
9108 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9110 /* A - CST >= arg1 -> A - CST-1 > arg1. */
9111 else if (code == GE_EXPR
9112 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9116 *strict_overflow_p = true;
9119 /* Now build the constant reduced in magnitude. But not if that
9120 would produce one outside of its types range. */
9121 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9123 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9124 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9126 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9127 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9128 /* We cannot swap the comparison here as that would cause us to
9129 endlessly recurse. */
9132 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9133 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9134 if (code0 != INTEGER_CST)
9135 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9137 /* If swapping might yield to a more canonical form, do so. */
9139 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9141 return fold_build2_loc (loc, code, type, t, arg1);
9144 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9145 overflow further. Try to decrease the magnitude of constants involved
9146 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9147 and put sole constants at the second argument position.
9148 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
9151 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9152 tree arg0, tree arg1)
9155 bool strict_overflow_p;
9156 const char * const warnmsg = G_("assuming signed overflow does not occur "
9157 "when reducing constant in comparison");
9159 /* Try canonicalization by simplifying arg0. */
9160 strict_overflow_p = false;
9161 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9162 &strict_overflow_p);
9165 if (strict_overflow_p)
9166 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9170 /* Try canonicalization by simplifying arg1 using the swapped
9172 code = swap_tree_comparison (code);
9173 strict_overflow_p = false;
9174 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9175 &strict_overflow_p);
9176 if (t && strict_overflow_p)
9177 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9181 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
9182 space. This is used to avoid issuing overflow warnings for
9183 expressions like &p->x which can not wrap. */
9186 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9188 unsigned HOST_WIDE_INT offset_low, total_low;
9189 HOST_WIDE_INT size, offset_high, total_high;
9191 if (!POINTER_TYPE_P (TREE_TYPE (base)))
9197 if (offset == NULL_TREE)
9202 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9206 offset_low = TREE_INT_CST_LOW (offset);
9207 offset_high = TREE_INT_CST_HIGH (offset);
9210 if (add_double_with_sign (offset_low, offset_high,
9211 bitpos / BITS_PER_UNIT, 0,
9212 &total_low, &total_high,
9216 if (total_high != 0)
9219 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9223 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9225 if (TREE_CODE (base) == ADDR_EXPR)
9227 HOST_WIDE_INT base_size;
9229 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9230 if (base_size > 0 && size < base_size)
9234 return total_low > (unsigned HOST_WIDE_INT) size;
9237 /* Subroutine of fold_binary. This routine performs all of the
9238 transformations that are common to the equality/inequality
9239 operators (EQ_EXPR and NE_EXPR) and the ordering operators
9240 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
9241 fold_binary should call fold_binary. Fold a comparison with
9242 tree code CODE and type TYPE with operands OP0 and OP1. Return
9243 the folded comparison or NULL_TREE. */
9246 fold_comparison (location_t loc, enum tree_code code, tree type,
9249 tree arg0, arg1, tem;
9254 STRIP_SIGN_NOPS (arg0);
9255 STRIP_SIGN_NOPS (arg1);
9257 tem = fold_relational_const (code, type, arg0, arg1);
9258 if (tem != NULL_TREE)
9261 /* If one arg is a real or integer constant, put it last. */
9262 if (tree_swap_operands_p (arg0, arg1, true))
9263 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9265 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9266 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9267 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9268 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9269 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9270 && (TREE_CODE (arg1) == INTEGER_CST
9271 && !TREE_OVERFLOW (arg1)))
9273 tree const1 = TREE_OPERAND (arg0, 1);
9275 tree variable = TREE_OPERAND (arg0, 0);
9278 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9280 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9281 TREE_TYPE (arg1), const2, const1);
9283 /* If the constant operation overflowed this can be
9284 simplified as a comparison against INT_MAX/INT_MIN. */
9285 if (TREE_CODE (lhs) == INTEGER_CST
9286 && TREE_OVERFLOW (lhs))
9288 int const1_sgn = tree_int_cst_sgn (const1);
9289 enum tree_code code2 = code;
9291 /* Get the sign of the constant on the lhs if the
9292 operation were VARIABLE + CONST1. */
9293 if (TREE_CODE (arg0) == MINUS_EXPR)
9294 const1_sgn = -const1_sgn;
9296 /* The sign of the constant determines if we overflowed
9297 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9298 Canonicalize to the INT_MIN overflow by swapping the comparison
9300 if (const1_sgn == -1)
9301 code2 = swap_tree_comparison (code);
9303 /* We now can look at the canonicalized case
9304 VARIABLE + 1 CODE2 INT_MIN
9305 and decide on the result. */
9306 if (code2 == LT_EXPR
9308 || code2 == EQ_EXPR)
9309 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9310 else if (code2 == NE_EXPR
9312 || code2 == GT_EXPR)
9313 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9316 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9317 && (TREE_CODE (lhs) != INTEGER_CST
9318 || !TREE_OVERFLOW (lhs)))
9320 fold_overflow_warning (("assuming signed overflow does not occur "
9321 "when changing X +- C1 cmp C2 to "
9323 WARN_STRICT_OVERFLOW_COMPARISON);
9324 return fold_build2_loc (loc, code, type, variable, lhs);
9328 /* For comparisons of pointers we can decompose it to a compile time
9329 comparison of the base objects and the offsets into the object.
9330 This requires at least one operand being an ADDR_EXPR or a
9331 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9332 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9333 && (TREE_CODE (arg0) == ADDR_EXPR
9334 || TREE_CODE (arg1) == ADDR_EXPR
9335 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9336 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9338 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9339 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9340 enum machine_mode mode;
9341 int volatilep, unsignedp;
9342 bool indirect_base0 = false, indirect_base1 = false;
9344 /* Get base and offset for the access. Strip ADDR_EXPR for
9345 get_inner_reference, but put it back by stripping INDIRECT_REF
9346 off the base object if possible. indirect_baseN will be true
9347 if baseN is not an address but refers to the object itself. */
9349 if (TREE_CODE (arg0) == ADDR_EXPR)
9351 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9352 &bitsize, &bitpos0, &offset0, &mode,
9353 &unsignedp, &volatilep, false);
9354 if (TREE_CODE (base0) == INDIRECT_REF)
9355 base0 = TREE_OPERAND (base0, 0);
9357 indirect_base0 = true;
9359 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9361 base0 = TREE_OPERAND (arg0, 0);
9362 offset0 = TREE_OPERAND (arg0, 1);
9366 if (TREE_CODE (arg1) == ADDR_EXPR)
9368 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9369 &bitsize, &bitpos1, &offset1, &mode,
9370 &unsignedp, &volatilep, false);
9371 if (TREE_CODE (base1) == INDIRECT_REF)
9372 base1 = TREE_OPERAND (base1, 0);
9374 indirect_base1 = true;
9376 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9378 base1 = TREE_OPERAND (arg1, 0);
9379 offset1 = TREE_OPERAND (arg1, 1);
9382 /* If we have equivalent bases we might be able to simplify. */
9383 if (indirect_base0 == indirect_base1
9384 && operand_equal_p (base0, base1, 0))
9386 /* We can fold this expression to a constant if the non-constant
9387 offset parts are equal. */
9388 if ((offset0 == offset1
9389 || (offset0 && offset1
9390 && operand_equal_p (offset0, offset1, 0)))
9393 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9398 && bitpos0 != bitpos1
9399 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9400 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9401 fold_overflow_warning (("assuming pointer wraparound does not "
9402 "occur when comparing P +- C1 with "
9404 WARN_STRICT_OVERFLOW_CONDITIONAL);
9409 return constant_boolean_node (bitpos0 == bitpos1, type);
9411 return constant_boolean_node (bitpos0 != bitpos1, type);
9413 return constant_boolean_node (bitpos0 < bitpos1, type);
9415 return constant_boolean_node (bitpos0 <= bitpos1, type);
9417 return constant_boolean_node (bitpos0 >= bitpos1, type);
9419 return constant_boolean_node (bitpos0 > bitpos1, type);
9423 /* We can simplify the comparison to a comparison of the variable
9424 offset parts if the constant offset parts are equal.
9425 Be careful to use signed size type here because otherwise we
9426 mess with array offsets in the wrong way. This is possible
9427 because pointer arithmetic is restricted to retain within an
9428 object and overflow on pointer differences is undefined as of
9429 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9430 else if (bitpos0 == bitpos1
9431 && ((code == EQ_EXPR || code == NE_EXPR)
9432 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9434 tree signed_size_type_node;
9435 signed_size_type_node = signed_type_for (size_type_node);
9437 /* By converting to signed size type we cover middle-end pointer
9438 arithmetic which operates on unsigned pointer types of size
9439 type size and ARRAY_REF offsets which are properly sign or
9440 zero extended from their type in case it is narrower than
9442 if (offset0 == NULL_TREE)
9443 offset0 = build_int_cst (signed_size_type_node, 0);
9445 offset0 = fold_convert_loc (loc, signed_size_type_node,
9447 if (offset1 == NULL_TREE)
9448 offset1 = build_int_cst (signed_size_type_node, 0);
9450 offset1 = fold_convert_loc (loc, signed_size_type_node,
9455 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9456 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9457 fold_overflow_warning (("assuming pointer wraparound does not "
9458 "occur when comparing P +- C1 with "
9460 WARN_STRICT_OVERFLOW_COMPARISON);
9462 return fold_build2_loc (loc, code, type, offset0, offset1);
9465 /* For non-equal bases we can simplify if they are addresses
9466 of local binding decls or constants. */
9467 else if (indirect_base0 && indirect_base1
9468 /* We know that !operand_equal_p (base0, base1, 0)
9469 because the if condition was false. But make
9470 sure two decls are not the same. */
9472 && TREE_CODE (arg0) == ADDR_EXPR
9473 && TREE_CODE (arg1) == ADDR_EXPR
9474 && (((TREE_CODE (base0) == VAR_DECL
9475 || TREE_CODE (base0) == PARM_DECL)
9476 && (targetm.binds_local_p (base0)
9477 || CONSTANT_CLASS_P (base1)))
9478 || CONSTANT_CLASS_P (base0))
9479 && (((TREE_CODE (base1) == VAR_DECL
9480 || TREE_CODE (base1) == PARM_DECL)
9481 && (targetm.binds_local_p (base1)
9482 || CONSTANT_CLASS_P (base0)))
9483 || CONSTANT_CLASS_P (base1)))
9485 if (code == EQ_EXPR)
9486 return omit_two_operands_loc (loc, type, boolean_false_node,
9488 else if (code == NE_EXPR)
9489 return omit_two_operands_loc (loc, type, boolean_true_node,
9492 /* For equal offsets we can simplify to a comparison of the
9494 else if (bitpos0 == bitpos1
9496 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9498 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9499 && ((offset0 == offset1)
9500 || (offset0 && offset1
9501 && operand_equal_p (offset0, offset1, 0))))
9504 base0 = build_fold_addr_expr_loc (loc, base0);
9506 base1 = build_fold_addr_expr_loc (loc, base1);
9507 return fold_build2_loc (loc, code, type, base0, base1);
9511 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9512 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9513 the resulting offset is smaller in absolute value than the
9515 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9516 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9517 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9518 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9519 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9520 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9521 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9523 tree const1 = TREE_OPERAND (arg0, 1);
9524 tree const2 = TREE_OPERAND (arg1, 1);
9525 tree variable1 = TREE_OPERAND (arg0, 0);
9526 tree variable2 = TREE_OPERAND (arg1, 0);
9528 const char * const warnmsg = G_("assuming signed overflow does not "
9529 "occur when combining constants around "
9532 /* Put the constant on the side where it doesn't overflow and is
9533 of lower absolute value than before. */
9534 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9535 ? MINUS_EXPR : PLUS_EXPR,
9537 if (!TREE_OVERFLOW (cst)
9538 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9540 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9541 return fold_build2_loc (loc, code, type,
9543 fold_build2_loc (loc,
9544 TREE_CODE (arg1), TREE_TYPE (arg1),
9548 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9549 ? MINUS_EXPR : PLUS_EXPR,
9551 if (!TREE_OVERFLOW (cst)
9552 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9554 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9555 return fold_build2_loc (loc, code, type,
9556 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9562 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9563 signed arithmetic case. That form is created by the compiler
9564 often enough for folding it to be of value. One example is in
9565 computing loop trip counts after Operator Strength Reduction. */
9566 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9567 && TREE_CODE (arg0) == MULT_EXPR
9568 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9569 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9570 && integer_zerop (arg1))
9572 tree const1 = TREE_OPERAND (arg0, 1);
9573 tree const2 = arg1; /* zero */
9574 tree variable1 = TREE_OPERAND (arg0, 0);
9575 enum tree_code cmp_code = code;
9577 gcc_assert (!integer_zerop (const1));
9579 fold_overflow_warning (("assuming signed overflow does not occur when "
9580 "eliminating multiplication in comparison "
9582 WARN_STRICT_OVERFLOW_COMPARISON);
9584 /* If const1 is negative we swap the sense of the comparison. */
9585 if (tree_int_cst_sgn (const1) < 0)
9586 cmp_code = swap_tree_comparison (cmp_code);
9588 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9591 tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9595 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9597 tree targ0 = strip_float_extensions (arg0);
9598 tree targ1 = strip_float_extensions (arg1);
9599 tree newtype = TREE_TYPE (targ0);
9601 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9602 newtype = TREE_TYPE (targ1);
9604 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9605 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9606 return fold_build2_loc (loc, code, type,
9607 fold_convert_loc (loc, newtype, targ0),
9608 fold_convert_loc (loc, newtype, targ1));
9610 /* (-a) CMP (-b) -> b CMP a */
9611 if (TREE_CODE (arg0) == NEGATE_EXPR
9612 && TREE_CODE (arg1) == NEGATE_EXPR)
9613 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9614 TREE_OPERAND (arg0, 0));
9616 if (TREE_CODE (arg1) == REAL_CST)
9618 REAL_VALUE_TYPE cst;
9619 cst = TREE_REAL_CST (arg1);
9621 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9622 if (TREE_CODE (arg0) == NEGATE_EXPR)
9623 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9624 TREE_OPERAND (arg0, 0),
9625 build_real (TREE_TYPE (arg1),
9626 REAL_VALUE_NEGATE (cst)));
9628 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9629 /* a CMP (-0) -> a CMP 0 */
9630 if (REAL_VALUE_MINUS_ZERO (cst))
9631 return fold_build2_loc (loc, code, type, arg0,
9632 build_real (TREE_TYPE (arg1), dconst0));
9634 /* x != NaN is always true, other ops are always false. */
9635 if (REAL_VALUE_ISNAN (cst)
9636 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9638 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9639 return omit_one_operand_loc (loc, type, tem, arg0);
9642 /* Fold comparisons against infinity. */
9643 if (REAL_VALUE_ISINF (cst)
9644 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9646 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9647 if (tem != NULL_TREE)
9652 /* If this is a comparison of a real constant with a PLUS_EXPR
9653 or a MINUS_EXPR of a real constant, we can convert it into a
9654 comparison with a revised real constant as long as no overflow
9655 occurs when unsafe_math_optimizations are enabled. */
9656 if (flag_unsafe_math_optimizations
9657 && TREE_CODE (arg1) == REAL_CST
9658 && (TREE_CODE (arg0) == PLUS_EXPR
9659 || TREE_CODE (arg0) == MINUS_EXPR)
9660 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9661 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9662 ? MINUS_EXPR : PLUS_EXPR,
9663 arg1, TREE_OPERAND (arg0, 1), 0))
9664 && !TREE_OVERFLOW (tem))
9665 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9667 /* Likewise, we can simplify a comparison of a real constant with
9668 a MINUS_EXPR whose first operand is also a real constant, i.e.
9669 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9670 floating-point types only if -fassociative-math is set. */
9671 if (flag_associative_math
9672 && TREE_CODE (arg1) == REAL_CST
9673 && TREE_CODE (arg0) == MINUS_EXPR
9674 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9675 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9677 && !TREE_OVERFLOW (tem))
9678 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9679 TREE_OPERAND (arg0, 1), tem);
9681 /* Fold comparisons against built-in math functions. */
9682 if (TREE_CODE (arg1) == REAL_CST
9683 && flag_unsafe_math_optimizations
9684 && ! flag_errno_math)
9686 enum built_in_function fcode = builtin_mathfn_code (arg0);
9688 if (fcode != END_BUILTINS)
9690 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9691 if (tem != NULL_TREE)
9697 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9698 && CONVERT_EXPR_P (arg0))
9700 /* If we are widening one operand of an integer comparison,
9701 see if the other operand is similarly being widened. Perhaps we
9702 can do the comparison in the narrower type. */
9703 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9707 /* Or if we are changing signedness. */
9708 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9713 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9714 constant, we can simplify it. */
9715 if (TREE_CODE (arg1) == INTEGER_CST
9716 && (TREE_CODE (arg0) == MIN_EXPR
9717 || TREE_CODE (arg0) == MAX_EXPR)
9718 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9720 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9725 /* Simplify comparison of something with itself. (For IEEE
9726 floating-point, we can only do some of these simplifications.) */
9727 if (operand_equal_p (arg0, arg1, 0))
9732 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9733 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9734 return constant_boolean_node (1, type);
9739 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9740 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9741 return constant_boolean_node (1, type);
9742 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9745 /* For NE, we can only do this simplification if integer
9746 or we don't honor IEEE floating point NaNs. */
9747 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9748 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9750 /* ... fall through ... */
9753 return constant_boolean_node (0, type);
9759 /* If we are comparing an expression that just has comparisons
9760 of two integer values, arithmetic expressions of those comparisons,
9761 and constants, we can simplify it. There are only three cases
9762 to check: the two values can either be equal, the first can be
9763 greater, or the second can be greater. Fold the expression for
9764 those three values. Since each value must be 0 or 1, we have
9765 eight possibilities, each of which corresponds to the constant 0
9766 or 1 or one of the six possible comparisons.
9768 This handles common cases like (a > b) == 0 but also handles
9769 expressions like ((x > y) - (y > x)) > 0, which supposedly
9770 occur in macroized code. */
9772 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9774 tree cval1 = 0, cval2 = 0;
9777 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9778 /* Don't handle degenerate cases here; they should already
9779 have been handled anyway. */
9780 && cval1 != 0 && cval2 != 0
9781 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9782 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9783 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9784 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9785 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9786 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9787 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9789 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9790 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9792 /* We can't just pass T to eval_subst in case cval1 or cval2
9793 was the same as ARG1. */
9796 = fold_build2_loc (loc, code, type,
9797 eval_subst (loc, arg0, cval1, maxval,
9801 = fold_build2_loc (loc, code, type,
9802 eval_subst (loc, arg0, cval1, maxval,
9806 = fold_build2_loc (loc, code, type,
9807 eval_subst (loc, arg0, cval1, minval,
9811 /* All three of these results should be 0 or 1. Confirm they are.
9812 Then use those values to select the proper code to use. */
9814 if (TREE_CODE (high_result) == INTEGER_CST
9815 && TREE_CODE (equal_result) == INTEGER_CST
9816 && TREE_CODE (low_result) == INTEGER_CST)
9818 /* Make a 3-bit mask with the high-order bit being the
9819 value for `>', the next for '=', and the low for '<'. */
9820 switch ((integer_onep (high_result) * 4)
9821 + (integer_onep (equal_result) * 2)
9822 + integer_onep (low_result))
9826 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9847 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9852 tem = save_expr (build2 (code, type, cval1, cval2));
9853 SET_EXPR_LOCATION (tem, loc);
9856 return fold_build2_loc (loc, code, type, cval1, cval2);
9861 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9862 into a single range test. */
9863 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9864 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9865 && TREE_CODE (arg1) == INTEGER_CST
9866 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9867 && !integer_zerop (TREE_OPERAND (arg0, 1))
9868 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9869 && !TREE_OVERFLOW (arg1))
9871 tem = fold_div_compare (loc, code, type, arg0, arg1);
9872 if (tem != NULL_TREE)
9876 /* Fold ~X op ~Y as Y op X. */
9877 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9878 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9880 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9881 return fold_build2_loc (loc, code, type,
9882 fold_convert_loc (loc, cmp_type,
9883 TREE_OPERAND (arg1, 0)),
9884 TREE_OPERAND (arg0, 0));
9887 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9888 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9889 && TREE_CODE (arg1) == INTEGER_CST)
9891 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9892 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9893 TREE_OPERAND (arg0, 0),
9894 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9895 fold_convert_loc (loc, cmp_type, arg1)));
9902 /* Subroutine of fold_binary. Optimize complex multiplications of the
9903 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9904 argument EXPR represents the expression "z" of type TYPE. */
9907 fold_mult_zconjz (location_t loc, tree type, tree expr)
9909 tree itype = TREE_TYPE (type);
9910 tree rpart, ipart, tem;
9912 if (TREE_CODE (expr) == COMPLEX_EXPR)
9914 rpart = TREE_OPERAND (expr, 0);
9915 ipart = TREE_OPERAND (expr, 1);
9917 else if (TREE_CODE (expr) == COMPLEX_CST)
9919 rpart = TREE_REALPART (expr);
9920 ipart = TREE_IMAGPART (expr);
9924 expr = save_expr (expr);
9925 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9926 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9929 rpart = save_expr (rpart);
9930 ipart = save_expr (ipart);
9931 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9932 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9933 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9934 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9935 fold_convert_loc (loc, itype, integer_zero_node));
9939 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9940 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9941 guarantees that P and N have the same least significant log2(M) bits.
9942 N is not otherwise constrained. In particular, N is not normalized to
9943 0 <= N < M as is common. In general, the precise value of P is unknown.
9944 M is chosen as large as possible such that constant N can be determined.
9946 Returns M and sets *RESIDUE to N.
9948 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9949 account. This is not always possible due to PR 35705.
9952 static unsigned HOST_WIDE_INT
9953 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9954 bool allow_func_align)
9956 enum tree_code code;
9960 code = TREE_CODE (expr);
9961 if (code == ADDR_EXPR)
9963 expr = TREE_OPERAND (expr, 0);
9964 if (handled_component_p (expr))
9966 HOST_WIDE_INT bitsize, bitpos;
9968 enum machine_mode mode;
9969 int unsignedp, volatilep;
9971 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9972 &mode, &unsignedp, &volatilep, false);
9973 *residue = bitpos / BITS_PER_UNIT;
9976 if (TREE_CODE (offset) == INTEGER_CST)
9977 *residue += TREE_INT_CST_LOW (offset);
9979 /* We don't handle more complicated offset expressions. */
9985 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9986 return DECL_ALIGN_UNIT (expr);
9988 else if (code == POINTER_PLUS_EXPR)
9991 unsigned HOST_WIDE_INT modulus;
9992 enum tree_code inner_code;
9994 op0 = TREE_OPERAND (expr, 0);
9996 modulus = get_pointer_modulus_and_residue (op0, residue,
9999 op1 = TREE_OPERAND (expr, 1);
10001 inner_code = TREE_CODE (op1);
10002 if (inner_code == INTEGER_CST)
10004 *residue += TREE_INT_CST_LOW (op1);
10007 else if (inner_code == MULT_EXPR)
10009 op1 = TREE_OPERAND (op1, 1);
10010 if (TREE_CODE (op1) == INTEGER_CST)
10012 unsigned HOST_WIDE_INT align;
10014 /* Compute the greatest power-of-2 divisor of op1. */
10015 align = TREE_INT_CST_LOW (op1);
10018 /* If align is non-zero and less than *modulus, replace
10019 *modulus with align., If align is 0, then either op1 is 0
10020 or the greatest power-of-2 divisor of op1 doesn't fit in an
10021 unsigned HOST_WIDE_INT. In either case, no additional
10022 constraint is imposed. */
10024 modulus = MIN (modulus, align);
10031 /* If we get here, we were unable to determine anything useful about the
10037 /* Fold a binary expression of code CODE and type TYPE with operands
10038 OP0 and OP1. LOC is the location of the resulting expression.
10039 Return the folded expression if folding is successful. Otherwise,
10040 return NULL_TREE. */
10043 fold_binary_loc (location_t loc,
10044 enum tree_code code, tree type, tree op0, tree op1)
10046 enum tree_code_class kind = TREE_CODE_CLASS (code);
10047 tree arg0, arg1, tem;
10048 tree t1 = NULL_TREE;
10049 bool strict_overflow_p;
10051 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10052 && TREE_CODE_LENGTH (code) == 2
10053 && op0 != NULL_TREE
10054 && op1 != NULL_TREE);
10059 /* Strip any conversions that don't change the mode. This is
10060 safe for every expression, except for a comparison expression
10061 because its signedness is derived from its operands. So, in
10062 the latter case, only strip conversions that don't change the
10063 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10066 Note that this is done as an internal manipulation within the
10067 constant folder, in order to find the simplest representation
10068 of the arguments so that their form can be studied. In any
10069 cases, the appropriate type conversions should be put back in
10070 the tree that will get out of the constant folder. */
10072 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10074 STRIP_SIGN_NOPS (arg0);
10075 STRIP_SIGN_NOPS (arg1);
10083 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10084 constant but we can't do arithmetic on them. */
10085 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10086 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10087 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10088 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10089 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10090 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10092 if (kind == tcc_binary)
10094 /* Make sure type and arg0 have the same saturating flag. */
10095 gcc_assert (TYPE_SATURATING (type)
10096 == TYPE_SATURATING (TREE_TYPE (arg0)));
10097 tem = const_binop (code, arg0, arg1, 0);
10099 else if (kind == tcc_comparison)
10100 tem = fold_relational_const (code, type, arg0, arg1);
10104 if (tem != NULL_TREE)
10106 if (TREE_TYPE (tem) != type)
10107 tem = fold_convert_loc (loc, type, tem);
10112 /* If this is a commutative operation, and ARG0 is a constant, move it
10113 to ARG1 to reduce the number of tests below. */
10114 if (commutative_tree_code (code)
10115 && tree_swap_operands_p (arg0, arg1, true))
10116 return fold_build2_loc (loc, code, type, op1, op0);
10118 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10120 First check for cases where an arithmetic operation is applied to a
10121 compound, conditional, or comparison operation. Push the arithmetic
10122 operation inside the compound or conditional to see if any folding
10123 can then be done. Convert comparison to conditional for this purpose.
10124 The also optimizes non-constant cases that used to be done in
10127 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10128 one of the operands is a comparison and the other is a comparison, a
10129 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10130 code below would make the expression more complex. Change it to a
10131 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10132 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10134 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10135 || code == EQ_EXPR || code == NE_EXPR)
10136 && ((truth_value_p (TREE_CODE (arg0))
10137 && (truth_value_p (TREE_CODE (arg1))
10138 || (TREE_CODE (arg1) == BIT_AND_EXPR
10139 && integer_onep (TREE_OPERAND (arg1, 1)))))
10140 || (truth_value_p (TREE_CODE (arg1))
10141 && (truth_value_p (TREE_CODE (arg0))
10142 || (TREE_CODE (arg0) == BIT_AND_EXPR
10143 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10145 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10146 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10149 fold_convert_loc (loc, boolean_type_node, arg0),
10150 fold_convert_loc (loc, boolean_type_node, arg1));
10152 if (code == EQ_EXPR)
10153 tem = invert_truthvalue_loc (loc, tem);
10155 return fold_convert_loc (loc, type, tem);
10158 if (TREE_CODE_CLASS (code) == tcc_binary
10159 || TREE_CODE_CLASS (code) == tcc_comparison)
10161 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10163 tem = fold_build2_loc (loc, code, type,
10164 fold_convert_loc (loc, TREE_TYPE (op0),
10165 TREE_OPERAND (arg0, 1)), op1);
10166 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10167 goto fold_binary_exit;
10169 if (TREE_CODE (arg1) == COMPOUND_EXPR
10170 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10172 tem = fold_build2_loc (loc, code, type, op0,
10173 fold_convert_loc (loc, TREE_TYPE (op1),
10174 TREE_OPERAND (arg1, 1)));
10175 tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10176 goto fold_binary_exit;
10179 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10181 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10183 /*cond_first_p=*/1);
10184 if (tem != NULL_TREE)
10188 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10190 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10192 /*cond_first_p=*/0);
10193 if (tem != NULL_TREE)
10200 case POINTER_PLUS_EXPR:
10201 /* 0 +p index -> (type)index */
10202 if (integer_zerop (arg0))
10203 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10205 /* PTR +p 0 -> PTR */
10206 if (integer_zerop (arg1))
10207 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10209 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10210 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10211 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10212 return fold_convert_loc (loc, type,
10213 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10214 fold_convert_loc (loc, sizetype,
10216 fold_convert_loc (loc, sizetype,
10219 /* index +p PTR -> PTR +p index */
10220 if (POINTER_TYPE_P (TREE_TYPE (arg1))
10221 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10222 return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10223 fold_convert_loc (loc, type, arg1),
10224 fold_convert_loc (loc, sizetype, arg0));
10226 /* (PTR +p B) +p A -> PTR +p (B + A) */
10227 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10230 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10231 tree arg00 = TREE_OPERAND (arg0, 0);
10232 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10233 arg01, fold_convert_loc (loc, sizetype, arg1));
10234 return fold_convert_loc (loc, type,
10235 fold_build2_loc (loc, POINTER_PLUS_EXPR,
10240 /* PTR_CST +p CST -> CST1 */
10241 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10242 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10243 fold_convert_loc (loc, type, arg1));
10245 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10246 of the array. Loop optimizer sometimes produce this type of
10248 if (TREE_CODE (arg0) == ADDR_EXPR)
10250 tem = try_move_mult_to_index (loc, arg0,
10251 fold_convert_loc (loc, sizetype, arg1));
10253 return fold_convert_loc (loc, type, tem);
10259 /* A + (-B) -> A - B */
10260 if (TREE_CODE (arg1) == NEGATE_EXPR)
10261 return fold_build2_loc (loc, MINUS_EXPR, type,
10262 fold_convert_loc (loc, type, arg0),
10263 fold_convert_loc (loc, type,
10264 TREE_OPERAND (arg1, 0)));
10265 /* (-A) + B -> B - A */
10266 if (TREE_CODE (arg0) == NEGATE_EXPR
10267 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10268 return fold_build2_loc (loc, MINUS_EXPR, type,
10269 fold_convert_loc (loc, type, arg1),
10270 fold_convert_loc (loc, type,
10271 TREE_OPERAND (arg0, 0)));
10273 if (INTEGRAL_TYPE_P (type))
10275 /* Convert ~A + 1 to -A. */
10276 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10277 && integer_onep (arg1))
10278 return fold_build1_loc (loc, NEGATE_EXPR, type,
10279 fold_convert_loc (loc, type,
10280 TREE_OPERAND (arg0, 0)));
10282 /* ~X + X is -1. */
10283 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10284 && !TYPE_OVERFLOW_TRAPS (type))
10286 tree tem = TREE_OPERAND (arg0, 0);
10289 if (operand_equal_p (tem, arg1, 0))
10291 t1 = build_int_cst_type (type, -1);
10292 return omit_one_operand_loc (loc, type, t1, arg1);
10296 /* X + ~X is -1. */
10297 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10298 && !TYPE_OVERFLOW_TRAPS (type))
10300 tree tem = TREE_OPERAND (arg1, 0);
10303 if (operand_equal_p (arg0, tem, 0))
10305 t1 = build_int_cst_type (type, -1);
10306 return omit_one_operand_loc (loc, type, t1, arg0);
10310 /* X + (X / CST) * -CST is X % CST. */
10311 if (TREE_CODE (arg1) == MULT_EXPR
10312 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10313 && operand_equal_p (arg0,
10314 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10316 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10317 tree cst1 = TREE_OPERAND (arg1, 1);
10318 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10320 if (sum && integer_zerop (sum))
10321 return fold_convert_loc (loc, type,
10322 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10323 TREE_TYPE (arg0), arg0,
10328 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10329 same or one. Make sure type is not saturating.
10330 fold_plusminus_mult_expr will re-associate. */
10331 if ((TREE_CODE (arg0) == MULT_EXPR
10332 || TREE_CODE (arg1) == MULT_EXPR)
10333 && !TYPE_SATURATING (type)
10334 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10336 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10341 if (! FLOAT_TYPE_P (type))
10343 if (integer_zerop (arg1))
10344 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10346 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10347 with a constant, and the two constants have no bits in common,
10348 we should treat this as a BIT_IOR_EXPR since this may produce more
10349 simplifications. */
10350 if (TREE_CODE (arg0) == BIT_AND_EXPR
10351 && TREE_CODE (arg1) == BIT_AND_EXPR
10352 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10353 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10354 && integer_zerop (const_binop (BIT_AND_EXPR,
10355 TREE_OPERAND (arg0, 1),
10356 TREE_OPERAND (arg1, 1), 0)))
10358 code = BIT_IOR_EXPR;
10362 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10363 (plus (plus (mult) (mult)) (foo)) so that we can
10364 take advantage of the factoring cases below. */
10365 if (((TREE_CODE (arg0) == PLUS_EXPR
10366 || TREE_CODE (arg0) == MINUS_EXPR)
10367 && TREE_CODE (arg1) == MULT_EXPR)
10368 || ((TREE_CODE (arg1) == PLUS_EXPR
10369 || TREE_CODE (arg1) == MINUS_EXPR)
10370 && TREE_CODE (arg0) == MULT_EXPR))
10372 tree parg0, parg1, parg, marg;
10373 enum tree_code pcode;
10375 if (TREE_CODE (arg1) == MULT_EXPR)
10376 parg = arg0, marg = arg1;
10378 parg = arg1, marg = arg0;
10379 pcode = TREE_CODE (parg);
10380 parg0 = TREE_OPERAND (parg, 0);
10381 parg1 = TREE_OPERAND (parg, 1);
10382 STRIP_NOPS (parg0);
10383 STRIP_NOPS (parg1);
10385 if (TREE_CODE (parg0) == MULT_EXPR
10386 && TREE_CODE (parg1) != MULT_EXPR)
10387 return fold_build2_loc (loc, pcode, type,
10388 fold_build2_loc (loc, PLUS_EXPR, type,
10389 fold_convert_loc (loc, type,
10391 fold_convert_loc (loc, type,
10393 fold_convert_loc (loc, type, parg1));
10394 if (TREE_CODE (parg0) != MULT_EXPR
10395 && TREE_CODE (parg1) == MULT_EXPR)
10397 fold_build2_loc (loc, PLUS_EXPR, type,
10398 fold_convert_loc (loc, type, parg0),
10399 fold_build2_loc (loc, pcode, type,
10400 fold_convert_loc (loc, type, marg),
10401 fold_convert_loc (loc, type,
10407 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10408 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10409 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10411 /* Likewise if the operands are reversed. */
10412 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10413 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10415 /* Convert X + -C into X - C. */
10416 if (TREE_CODE (arg1) == REAL_CST
10417 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10419 tem = fold_negate_const (arg1, type);
10420 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10421 return fold_build2_loc (loc, MINUS_EXPR, type,
10422 fold_convert_loc (loc, type, arg0),
10423 fold_convert_loc (loc, type, tem));
10426 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10427 to __complex__ ( x, y ). This is not the same for SNaNs or
10428 if signed zeros are involved. */
10429 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10430 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10431 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10433 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10434 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10435 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10436 bool arg0rz = false, arg0iz = false;
10437 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10438 || (arg0i && (arg0iz = real_zerop (arg0i))))
10440 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10441 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10442 if (arg0rz && arg1i && real_zerop (arg1i))
10444 tree rp = arg1r ? arg1r
10445 : build1 (REALPART_EXPR, rtype, arg1);
10446 tree ip = arg0i ? arg0i
10447 : build1 (IMAGPART_EXPR, rtype, arg0);
10448 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10450 else if (arg0iz && arg1r && real_zerop (arg1r))
10452 tree rp = arg0r ? arg0r
10453 : build1 (REALPART_EXPR, rtype, arg0);
10454 tree ip = arg1i ? arg1i
10455 : build1 (IMAGPART_EXPR, rtype, arg1);
10456 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10461 if (flag_unsafe_math_optimizations
10462 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10463 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10464 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10467 /* Convert x+x into x*2.0. */
10468 if (operand_equal_p (arg0, arg1, 0)
10469 && SCALAR_FLOAT_TYPE_P (type))
10470 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10471 build_real (type, dconst2));
10473 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10474 We associate floats only if the user has specified
10475 -fassociative-math. */
10476 if (flag_associative_math
10477 && TREE_CODE (arg1) == PLUS_EXPR
10478 && TREE_CODE (arg0) != MULT_EXPR)
10480 tree tree10 = TREE_OPERAND (arg1, 0);
10481 tree tree11 = TREE_OPERAND (arg1, 1);
10482 if (TREE_CODE (tree11) == MULT_EXPR
10483 && TREE_CODE (tree10) == MULT_EXPR)
10486 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10487 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10490 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10491 We associate floats only if the user has specified
10492 -fassociative-math. */
10493 if (flag_associative_math
10494 && TREE_CODE (arg0) == PLUS_EXPR
10495 && TREE_CODE (arg1) != MULT_EXPR)
10497 tree tree00 = TREE_OPERAND (arg0, 0);
10498 tree tree01 = TREE_OPERAND (arg0, 1);
10499 if (TREE_CODE (tree01) == MULT_EXPR
10500 && TREE_CODE (tree00) == MULT_EXPR)
10503 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10504 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10510 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10511 is a rotate of A by C1 bits. */
10512 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10513 is a rotate of A by B bits. */
10515 enum tree_code code0, code1;
10517 code0 = TREE_CODE (arg0);
10518 code1 = TREE_CODE (arg1);
10519 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10520 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10521 && operand_equal_p (TREE_OPERAND (arg0, 0),
10522 TREE_OPERAND (arg1, 0), 0)
10523 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10524 TYPE_UNSIGNED (rtype))
10525 /* Only create rotates in complete modes. Other cases are not
10526 expanded properly. */
10527 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10529 tree tree01, tree11;
10530 enum tree_code code01, code11;
10532 tree01 = TREE_OPERAND (arg0, 1);
10533 tree11 = TREE_OPERAND (arg1, 1);
10534 STRIP_NOPS (tree01);
10535 STRIP_NOPS (tree11);
10536 code01 = TREE_CODE (tree01);
10537 code11 = TREE_CODE (tree11);
10538 if (code01 == INTEGER_CST
10539 && code11 == INTEGER_CST
10540 && TREE_INT_CST_HIGH (tree01) == 0
10541 && TREE_INT_CST_HIGH (tree11) == 0
10542 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10543 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10545 tem = build2 (LROTATE_EXPR,
10546 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10547 TREE_OPERAND (arg0, 0),
10548 code0 == LSHIFT_EXPR
10549 ? tree01 : tree11);
10550 SET_EXPR_LOCATION (tem, loc);
10551 return fold_convert_loc (loc, type, tem);
10553 else if (code11 == MINUS_EXPR)
10555 tree tree110, tree111;
10556 tree110 = TREE_OPERAND (tree11, 0);
10557 tree111 = TREE_OPERAND (tree11, 1);
10558 STRIP_NOPS (tree110);
10559 STRIP_NOPS (tree111);
10560 if (TREE_CODE (tree110) == INTEGER_CST
10561 && 0 == compare_tree_int (tree110,
10563 (TREE_TYPE (TREE_OPERAND
10565 && operand_equal_p (tree01, tree111, 0))
10567 fold_convert_loc (loc, type,
10568 build2 ((code0 == LSHIFT_EXPR
10571 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10572 TREE_OPERAND (arg0, 0), tree01));
10574 else if (code01 == MINUS_EXPR)
10576 tree tree010, tree011;
10577 tree010 = TREE_OPERAND (tree01, 0);
10578 tree011 = TREE_OPERAND (tree01, 1);
10579 STRIP_NOPS (tree010);
10580 STRIP_NOPS (tree011);
10581 if (TREE_CODE (tree010) == INTEGER_CST
10582 && 0 == compare_tree_int (tree010,
10584 (TREE_TYPE (TREE_OPERAND
10586 && operand_equal_p (tree11, tree011, 0))
10587 return fold_convert_loc
10589 build2 ((code0 != LSHIFT_EXPR
10592 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10593 TREE_OPERAND (arg0, 0), tree11));
10599 /* In most languages, can't associate operations on floats through
10600 parentheses. Rather than remember where the parentheses were, we
10601 don't associate floats at all, unless the user has specified
10602 -fassociative-math.
10603 And, we need to make sure type is not saturating. */
10605 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10606 && !TYPE_SATURATING (type))
10608 tree var0, con0, lit0, minus_lit0;
10609 tree var1, con1, lit1, minus_lit1;
10612 /* Split both trees into variables, constants, and literals. Then
10613 associate each group together, the constants with literals,
10614 then the result with variables. This increases the chances of
10615 literals being recombined later and of generating relocatable
10616 expressions for the sum of a constant and literal. */
10617 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10618 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10619 code == MINUS_EXPR);
10621 /* With undefined overflow we can only associate constants
10622 with one variable. */
10623 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10624 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10630 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10631 tmp0 = TREE_OPERAND (tmp0, 0);
10632 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10633 tmp1 = TREE_OPERAND (tmp1, 0);
10634 /* The only case we can still associate with two variables
10635 is if they are the same, modulo negation. */
10636 if (!operand_equal_p (tmp0, tmp1, 0))
10640 /* Only do something if we found more than two objects. Otherwise,
10641 nothing has changed and we risk infinite recursion. */
10643 && (2 < ((var0 != 0) + (var1 != 0)
10644 + (con0 != 0) + (con1 != 0)
10645 + (lit0 != 0) + (lit1 != 0)
10646 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10648 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10649 if (code == MINUS_EXPR)
10652 var0 = associate_trees (loc, var0, var1, code, type);
10653 con0 = associate_trees (loc, con0, con1, code, type);
10654 lit0 = associate_trees (loc, lit0, lit1, code, type);
10655 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10657 /* Preserve the MINUS_EXPR if the negative part of the literal is
10658 greater than the positive part. Otherwise, the multiplicative
10659 folding code (i.e extract_muldiv) may be fooled in case
10660 unsigned constants are subtracted, like in the following
10661 example: ((X*2 + 4) - 8U)/2. */
10662 if (minus_lit0 && lit0)
10664 if (TREE_CODE (lit0) == INTEGER_CST
10665 && TREE_CODE (minus_lit0) == INTEGER_CST
10666 && tree_int_cst_lt (lit0, minus_lit0))
10668 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10674 lit0 = associate_trees (loc, lit0, minus_lit0,
10683 fold_convert_loc (loc, type,
10684 associate_trees (loc, var0, minus_lit0,
10685 MINUS_EXPR, type));
10688 con0 = associate_trees (loc, con0, minus_lit0,
10691 fold_convert_loc (loc, type,
10692 associate_trees (loc, var0, con0,
10697 con0 = associate_trees (loc, con0, lit0, code, type);
10699 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10707 /* Pointer simplifications for subtraction, simple reassociations. */
10708 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10710 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10711 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10712 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10714 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10715 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10716 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10717 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10718 return fold_build2_loc (loc, PLUS_EXPR, type,
10719 fold_build2_loc (loc, MINUS_EXPR, type,
10721 fold_build2_loc (loc, MINUS_EXPR, type,
10724 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10725 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10727 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10728 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10729 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10730 fold_convert_loc (loc, type, arg1));
10732 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10735 /* A - (-B) -> A + B */
10736 if (TREE_CODE (arg1) == NEGATE_EXPR)
10737 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10738 fold_convert_loc (loc, type,
10739 TREE_OPERAND (arg1, 0)));
10740 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10741 if (TREE_CODE (arg0) == NEGATE_EXPR
10742 && (FLOAT_TYPE_P (type)
10743 || INTEGRAL_TYPE_P (type))
10744 && negate_expr_p (arg1)
10745 && reorder_operands_p (arg0, arg1))
10746 return fold_build2_loc (loc, MINUS_EXPR, type,
10747 fold_convert_loc (loc, type,
10748 negate_expr (arg1)),
10749 fold_convert_loc (loc, type,
10750 TREE_OPERAND (arg0, 0)));
10751 /* Convert -A - 1 to ~A. */
10752 if (INTEGRAL_TYPE_P (type)
10753 && TREE_CODE (arg0) == NEGATE_EXPR
10754 && integer_onep (arg1)
10755 && !TYPE_OVERFLOW_TRAPS (type))
10756 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10757 fold_convert_loc (loc, type,
10758 TREE_OPERAND (arg0, 0)));
10760 /* Convert -1 - A to ~A. */
10761 if (INTEGRAL_TYPE_P (type)
10762 && integer_all_onesp (arg0))
10763 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10766 /* X - (X / CST) * CST is X % CST. */
10767 if (INTEGRAL_TYPE_P (type)
10768 && TREE_CODE (arg1) == MULT_EXPR
10769 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10770 && operand_equal_p (arg0,
10771 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10772 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10773 TREE_OPERAND (arg1, 1), 0))
10775 fold_convert_loc (loc, type,
10776 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10777 arg0, TREE_OPERAND (arg1, 1)));
10779 if (! FLOAT_TYPE_P (type))
10781 if (integer_zerop (arg0))
10782 return negate_expr (fold_convert_loc (loc, type, arg1));
10783 if (integer_zerop (arg1))
10784 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10786 /* Fold A - (A & B) into ~B & A. */
10787 if (!TREE_SIDE_EFFECTS (arg0)
10788 && TREE_CODE (arg1) == BIT_AND_EXPR)
10790 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10792 tree arg10 = fold_convert_loc (loc, type,
10793 TREE_OPERAND (arg1, 0));
10794 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10795 fold_build1_loc (loc, BIT_NOT_EXPR,
10797 fold_convert_loc (loc, type, arg0));
10799 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10801 tree arg11 = fold_convert_loc (loc,
10802 type, TREE_OPERAND (arg1, 1));
10803 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10804 fold_build1_loc (loc, BIT_NOT_EXPR,
10806 fold_convert_loc (loc, type, arg0));
10810 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10811 any power of 2 minus 1. */
10812 if (TREE_CODE (arg0) == BIT_AND_EXPR
10813 && TREE_CODE (arg1) == BIT_AND_EXPR
10814 && operand_equal_p (TREE_OPERAND (arg0, 0),
10815 TREE_OPERAND (arg1, 0), 0))
10817 tree mask0 = TREE_OPERAND (arg0, 1);
10818 tree mask1 = TREE_OPERAND (arg1, 1);
10819 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10821 if (operand_equal_p (tem, mask1, 0))
10823 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10824 TREE_OPERAND (arg0, 0), mask1);
10825 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10830 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10831 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10832 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10834 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10835 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10836 (-ARG1 + ARG0) reduces to -ARG1. */
10837 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10838 return negate_expr (fold_convert_loc (loc, type, arg1));
10840 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10841 __complex__ ( x, -y ). This is not the same for SNaNs or if
10842 signed zeros are involved. */
10843 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10844 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10845 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10847 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10848 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10849 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10850 bool arg0rz = false, arg0iz = false;
10851 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10852 || (arg0i && (arg0iz = real_zerop (arg0i))))
10854 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10855 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10856 if (arg0rz && arg1i && real_zerop (arg1i))
10858 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10860 : build1 (REALPART_EXPR, rtype, arg1));
10861 tree ip = arg0i ? arg0i
10862 : build1 (IMAGPART_EXPR, rtype, arg0);
10863 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10865 else if (arg0iz && arg1r && real_zerop (arg1r))
10867 tree rp = arg0r ? arg0r
10868 : build1 (REALPART_EXPR, rtype, arg0);
10869 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10871 : build1 (IMAGPART_EXPR, rtype, arg1));
10872 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10877 /* Fold &x - &x. This can happen from &x.foo - &x.
10878 This is unsafe for certain floats even in non-IEEE formats.
10879 In IEEE, it is unsafe because it does wrong for NaNs.
10880 Also note that operand_equal_p is always false if an operand
10883 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10884 && operand_equal_p (arg0, arg1, 0))
10885 return fold_convert_loc (loc, type, integer_zero_node);
10887 /* A - B -> A + (-B) if B is easily negatable. */
10888 if (negate_expr_p (arg1)
10889 && ((FLOAT_TYPE_P (type)
10890 /* Avoid this transformation if B is a positive REAL_CST. */
10891 && (TREE_CODE (arg1) != REAL_CST
10892 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10893 || INTEGRAL_TYPE_P (type)))
10894 return fold_build2_loc (loc, PLUS_EXPR, type,
10895 fold_convert_loc (loc, type, arg0),
10896 fold_convert_loc (loc, type,
10897 negate_expr (arg1)));
10899 /* Try folding difference of addresses. */
10901 HOST_WIDE_INT diff;
10903 if ((TREE_CODE (arg0) == ADDR_EXPR
10904 || TREE_CODE (arg1) == ADDR_EXPR)
10905 && ptr_difference_const (arg0, arg1, &diff))
10906 return build_int_cst_type (type, diff);
10909 /* Fold &a[i] - &a[j] to i-j. */
10910 if (TREE_CODE (arg0) == ADDR_EXPR
10911 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10912 && TREE_CODE (arg1) == ADDR_EXPR
10913 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10915 tree aref0 = TREE_OPERAND (arg0, 0);
10916 tree aref1 = TREE_OPERAND (arg1, 0);
10917 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10918 TREE_OPERAND (aref1, 0), 0))
10920 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10921 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10922 tree esz = array_ref_element_size (aref0);
10923 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10924 return fold_build2_loc (loc, MULT_EXPR, type, diff,
10925 fold_convert_loc (loc, type, esz));
10930 if (FLOAT_TYPE_P (type)
10931 && flag_unsafe_math_optimizations
10932 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10933 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10934 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10937 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10938 same or one. Make sure type is not saturating.
10939 fold_plusminus_mult_expr will re-associate. */
10940 if ((TREE_CODE (arg0) == MULT_EXPR
10941 || TREE_CODE (arg1) == MULT_EXPR)
10942 && !TYPE_SATURATING (type)
10943 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10945 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10953 /* (-A) * (-B) -> A * B */
10954 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10955 return fold_build2_loc (loc, MULT_EXPR, type,
10956 fold_convert_loc (loc, type,
10957 TREE_OPERAND (arg0, 0)),
10958 fold_convert_loc (loc, type,
10959 negate_expr (arg1)));
10960 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10961 return fold_build2_loc (loc, MULT_EXPR, type,
10962 fold_convert_loc (loc, type,
10963 negate_expr (arg0)),
10964 fold_convert_loc (loc, type,
10965 TREE_OPERAND (arg1, 0)));
10967 if (! FLOAT_TYPE_P (type))
10969 if (integer_zerop (arg1))
10970 return omit_one_operand_loc (loc, type, arg1, arg0);
10971 if (integer_onep (arg1))
10972 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10973 /* Transform x * -1 into -x. Make sure to do the negation
10974 on the original operand with conversions not stripped
10975 because we can only strip non-sign-changing conversions. */
10976 if (integer_all_onesp (arg1))
10977 return fold_convert_loc (loc, type, negate_expr (op0));
10978 /* Transform x * -C into -x * C if x is easily negatable. */
10979 if (TREE_CODE (arg1) == INTEGER_CST
10980 && tree_int_cst_sgn (arg1) == -1
10981 && negate_expr_p (arg0)
10982 && (tem = negate_expr (arg1)) != arg1
10983 && !TREE_OVERFLOW (tem))
10984 return fold_build2_loc (loc, MULT_EXPR, type,
10985 fold_convert_loc (loc, type,
10986 negate_expr (arg0)),
10989 /* (a * (1 << b)) is (a << b) */
10990 if (TREE_CODE (arg1) == LSHIFT_EXPR
10991 && integer_onep (TREE_OPERAND (arg1, 0)))
10992 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10993 TREE_OPERAND (arg1, 1));
10994 if (TREE_CODE (arg0) == LSHIFT_EXPR
10995 && integer_onep (TREE_OPERAND (arg0, 0)))
10996 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10997 TREE_OPERAND (arg0, 1));
10999 /* (A + A) * C -> A * 2 * C */
11000 if (TREE_CODE (arg0) == PLUS_EXPR
11001 && TREE_CODE (arg1) == INTEGER_CST
11002 && operand_equal_p (TREE_OPERAND (arg0, 0),
11003 TREE_OPERAND (arg0, 1), 0))
11004 return fold_build2_loc (loc, MULT_EXPR, type,
11005 omit_one_operand_loc (loc, type,
11006 TREE_OPERAND (arg0, 0),
11007 TREE_OPERAND (arg0, 1)),
11008 fold_build2_loc (loc, MULT_EXPR, type,
11009 build_int_cst (type, 2) , arg1));
11011 strict_overflow_p = false;
11012 if (TREE_CODE (arg1) == INTEGER_CST
11013 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11014 &strict_overflow_p)))
11016 if (strict_overflow_p)
11017 fold_overflow_warning (("assuming signed overflow does not "
11018 "occur when simplifying "
11020 WARN_STRICT_OVERFLOW_MISC);
11021 return fold_convert_loc (loc, type, tem);
11024 /* Optimize z * conj(z) for integer complex numbers. */
11025 if (TREE_CODE (arg0) == CONJ_EXPR
11026 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11027 return fold_mult_zconjz (loc, type, arg1);
11028 if (TREE_CODE (arg1) == CONJ_EXPR
11029 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11030 return fold_mult_zconjz (loc, type, arg0);
11034 /* Maybe fold x * 0 to 0. The expressions aren't the same
11035 when x is NaN, since x * 0 is also NaN. Nor are they the
11036 same in modes with signed zeros, since multiplying a
11037 negative value by 0 gives -0, not +0. */
11038 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11039 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11040 && real_zerop (arg1))
11041 return omit_one_operand_loc (loc, type, arg1, arg0);
11042 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11043 Likewise for complex arithmetic with signed zeros. */
11044 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11045 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11046 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11047 && real_onep (arg1))
11048 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11050 /* Transform x * -1.0 into -x. */
11051 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11052 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11053 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11054 && real_minus_onep (arg1))
11055 return fold_convert_loc (loc, type, negate_expr (arg0));
11057 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11058 the result for floating point types due to rounding so it is applied
11059 only if -fassociative-math was specify. */
11060 if (flag_associative_math
11061 && TREE_CODE (arg0) == RDIV_EXPR
11062 && TREE_CODE (arg1) == REAL_CST
11063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11065 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11068 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11069 TREE_OPERAND (arg0, 1));
11072 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11073 if (operand_equal_p (arg0, arg1, 0))
11075 tree tem = fold_strip_sign_ops (arg0);
11076 if (tem != NULL_TREE)
11078 tem = fold_convert_loc (loc, type, tem);
11079 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11083 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11084 This is not the same for NaNs or if signed zeros are
11086 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11087 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11088 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11089 && TREE_CODE (arg1) == COMPLEX_CST
11090 && real_zerop (TREE_REALPART (arg1)))
11092 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11093 if (real_onep (TREE_IMAGPART (arg1)))
11095 fold_build2_loc (loc, COMPLEX_EXPR, type,
11096 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11098 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11099 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11101 fold_build2_loc (loc, COMPLEX_EXPR, type,
11102 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11103 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11107 /* Optimize z * conj(z) for floating point complex numbers.
11108 Guarded by flag_unsafe_math_optimizations as non-finite
11109 imaginary components don't produce scalar results. */
11110 if (flag_unsafe_math_optimizations
11111 && TREE_CODE (arg0) == CONJ_EXPR
11112 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11113 return fold_mult_zconjz (loc, type, arg1);
11114 if (flag_unsafe_math_optimizations
11115 && TREE_CODE (arg1) == CONJ_EXPR
11116 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11117 return fold_mult_zconjz (loc, type, arg0);
11119 if (flag_unsafe_math_optimizations)
11121 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11122 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11124 /* Optimizations of root(...)*root(...). */
11125 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11128 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11129 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11131 /* Optimize sqrt(x)*sqrt(x) as x. */
11132 if (BUILTIN_SQRT_P (fcode0)
11133 && operand_equal_p (arg00, arg10, 0)
11134 && ! HONOR_SNANS (TYPE_MODE (type)))
11137 /* Optimize root(x)*root(y) as root(x*y). */
11138 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11139 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11140 return build_call_expr_loc (loc, rootfn, 1, arg);
11143 /* Optimize expN(x)*expN(y) as expN(x+y). */
11144 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11146 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11147 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11148 CALL_EXPR_ARG (arg0, 0),
11149 CALL_EXPR_ARG (arg1, 0));
11150 return build_call_expr_loc (loc, expfn, 1, arg);
11153 /* Optimizations of pow(...)*pow(...). */
11154 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11155 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11156 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11158 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11159 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11160 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11161 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11163 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11164 if (operand_equal_p (arg01, arg11, 0))
11166 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11167 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11169 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11172 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11173 if (operand_equal_p (arg00, arg10, 0))
11175 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11176 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11178 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11182 /* Optimize tan(x)*cos(x) as sin(x). */
11183 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11184 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11185 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11186 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11187 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11188 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11189 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11190 CALL_EXPR_ARG (arg1, 0), 0))
11192 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11194 if (sinfn != NULL_TREE)
11195 return build_call_expr_loc (loc, sinfn, 1,
11196 CALL_EXPR_ARG (arg0, 0));
11199 /* Optimize x*pow(x,c) as pow(x,c+1). */
11200 if (fcode1 == BUILT_IN_POW
11201 || fcode1 == BUILT_IN_POWF
11202 || fcode1 == BUILT_IN_POWL)
11204 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11205 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11206 if (TREE_CODE (arg11) == REAL_CST
11207 && !TREE_OVERFLOW (arg11)
11208 && operand_equal_p (arg0, arg10, 0))
11210 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11214 c = TREE_REAL_CST (arg11);
11215 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11216 arg = build_real (type, c);
11217 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11221 /* Optimize pow(x,c)*x as pow(x,c+1). */
11222 if (fcode0 == BUILT_IN_POW
11223 || fcode0 == BUILT_IN_POWF
11224 || fcode0 == BUILT_IN_POWL)
11226 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11227 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11228 if (TREE_CODE (arg01) == REAL_CST
11229 && !TREE_OVERFLOW (arg01)
11230 && operand_equal_p (arg1, arg00, 0))
11232 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11236 c = TREE_REAL_CST (arg01);
11237 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11238 arg = build_real (type, c);
11239 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11243 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
11244 if (optimize_function_for_speed_p (cfun)
11245 && operand_equal_p (arg0, arg1, 0))
11247 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11251 tree arg = build_real (type, dconst2);
11252 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11261 if (integer_all_onesp (arg1))
11262 return omit_one_operand_loc (loc, type, arg1, arg0);
11263 if (integer_zerop (arg1))
11264 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11265 if (operand_equal_p (arg0, arg1, 0))
11266 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11268 /* ~X | X is -1. */
11269 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11270 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11272 t1 = fold_convert_loc (loc, type, integer_zero_node);
11273 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11274 return omit_one_operand_loc (loc, type, t1, arg1);
11277 /* X | ~X is -1. */
11278 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11279 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11281 t1 = fold_convert_loc (loc, type, integer_zero_node);
11282 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11283 return omit_one_operand_loc (loc, type, t1, arg0);
11286 /* Canonicalize (X & C1) | C2. */
11287 if (TREE_CODE (arg0) == BIT_AND_EXPR
11288 && TREE_CODE (arg1) == INTEGER_CST
11289 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11291 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11292 int width = TYPE_PRECISION (type), w;
11293 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11294 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11295 hi2 = TREE_INT_CST_HIGH (arg1);
11296 lo2 = TREE_INT_CST_LOW (arg1);
11298 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11299 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11300 return omit_one_operand_loc (loc, type, arg1,
11301 TREE_OPERAND (arg0, 0));
11303 if (width > HOST_BITS_PER_WIDE_INT)
11305 mhi = (unsigned HOST_WIDE_INT) -1
11306 >> (2 * HOST_BITS_PER_WIDE_INT - width);
11312 mlo = (unsigned HOST_WIDE_INT) -1
11313 >> (HOST_BITS_PER_WIDE_INT - width);
11316 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11317 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11318 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11319 TREE_OPERAND (arg0, 0), arg1);
11321 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11322 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11323 mode which allows further optimizations. */
11330 for (w = BITS_PER_UNIT;
11331 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11334 unsigned HOST_WIDE_INT mask
11335 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11336 if (((lo1 | lo2) & mask) == mask
11337 && (lo1 & ~mask) == 0 && hi1 == 0)
11344 if (hi3 != hi1 || lo3 != lo1)
11345 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11346 fold_build2_loc (loc, BIT_AND_EXPR, type,
11347 TREE_OPERAND (arg0, 0),
11348 build_int_cst_wide (type,
11353 /* (X & Y) | Y is (X, Y). */
11354 if (TREE_CODE (arg0) == BIT_AND_EXPR
11355 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11356 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11357 /* (X & Y) | X is (Y, X). */
11358 if (TREE_CODE (arg0) == BIT_AND_EXPR
11359 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11360 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11361 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11362 /* X | (X & Y) is (Y, X). */
11363 if (TREE_CODE (arg1) == BIT_AND_EXPR
11364 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11365 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11366 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11367 /* X | (Y & X) is (Y, X). */
11368 if (TREE_CODE (arg1) == BIT_AND_EXPR
11369 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11370 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11371 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11373 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11374 if (t1 != NULL_TREE)
11377 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11379 This results in more efficient code for machines without a NAND
11380 instruction. Combine will canonicalize to the first form
11381 which will allow use of NAND instructions provided by the
11382 backend if they exist. */
11383 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11384 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11387 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11388 build2 (BIT_AND_EXPR, type,
11389 fold_convert_loc (loc, type,
11390 TREE_OPERAND (arg0, 0)),
11391 fold_convert_loc (loc, type,
11392 TREE_OPERAND (arg1, 0))));
11395 /* See if this can be simplified into a rotate first. If that
11396 is unsuccessful continue in the association code. */
11400 if (integer_zerop (arg1))
11401 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11402 if (integer_all_onesp (arg1))
11403 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11404 if (operand_equal_p (arg0, arg1, 0))
11405 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11407 /* ~X ^ X is -1. */
11408 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11411 t1 = fold_convert_loc (loc, type, integer_zero_node);
11412 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11413 return omit_one_operand_loc (loc, type, t1, arg1);
11416 /* X ^ ~X is -1. */
11417 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11418 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11420 t1 = fold_convert_loc (loc, type, integer_zero_node);
11421 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11422 return omit_one_operand_loc (loc, type, t1, arg0);
11425 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11426 with a constant, and the two constants have no bits in common,
11427 we should treat this as a BIT_IOR_EXPR since this may produce more
11428 simplifications. */
11429 if (TREE_CODE (arg0) == BIT_AND_EXPR
11430 && TREE_CODE (arg1) == BIT_AND_EXPR
11431 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11432 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11433 && integer_zerop (const_binop (BIT_AND_EXPR,
11434 TREE_OPERAND (arg0, 1),
11435 TREE_OPERAND (arg1, 1), 0)))
11437 code = BIT_IOR_EXPR;
11441 /* (X | Y) ^ X -> Y & ~ X*/
11442 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11443 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11445 tree t2 = TREE_OPERAND (arg0, 1);
11446 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11448 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11449 fold_convert_loc (loc, type, t2),
11450 fold_convert_loc (loc, type, t1));
11454 /* (Y | X) ^ X -> Y & ~ X*/
11455 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11456 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11458 tree t2 = TREE_OPERAND (arg0, 0);
11459 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11461 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11462 fold_convert_loc (loc, type, t2),
11463 fold_convert_loc (loc, type, t1));
11467 /* X ^ (X | Y) -> Y & ~ X*/
11468 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11469 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11471 tree t2 = TREE_OPERAND (arg1, 1);
11472 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11474 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11475 fold_convert_loc (loc, type, t2),
11476 fold_convert_loc (loc, type, t1));
11480 /* X ^ (Y | X) -> Y & ~ X*/
11481 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11482 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11484 tree t2 = TREE_OPERAND (arg1, 0);
11485 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11487 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11488 fold_convert_loc (loc, type, t2),
11489 fold_convert_loc (loc, type, t1));
11493 /* Convert ~X ^ ~Y to X ^ Y. */
11494 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11495 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11496 return fold_build2_loc (loc, code, type,
11497 fold_convert_loc (loc, type,
11498 TREE_OPERAND (arg0, 0)),
11499 fold_convert_loc (loc, type,
11500 TREE_OPERAND (arg1, 0)));
11502 /* Convert ~X ^ C to X ^ ~C. */
11503 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11504 && TREE_CODE (arg1) == INTEGER_CST)
11505 return fold_build2_loc (loc, code, type,
11506 fold_convert_loc (loc, type,
11507 TREE_OPERAND (arg0, 0)),
11508 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11510 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11511 if (TREE_CODE (arg0) == BIT_AND_EXPR
11512 && integer_onep (TREE_OPERAND (arg0, 1))
11513 && integer_onep (arg1))
11514 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11515 build_int_cst (TREE_TYPE (arg0), 0));
11517 /* Fold (X & Y) ^ Y as ~X & Y. */
11518 if (TREE_CODE (arg0) == BIT_AND_EXPR
11519 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11521 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11522 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11523 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11524 fold_convert_loc (loc, type, arg1));
11526 /* Fold (X & Y) ^ X as ~Y & X. */
11527 if (TREE_CODE (arg0) == BIT_AND_EXPR
11528 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11529 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11531 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11532 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11533 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11534 fold_convert_loc (loc, type, arg1));
11536 /* Fold X ^ (X & Y) as X & ~Y. */
11537 if (TREE_CODE (arg1) == BIT_AND_EXPR
11538 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11540 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11541 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11542 fold_convert_loc (loc, type, arg0),
11543 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11545 /* Fold X ^ (Y & X) as ~Y & X. */
11546 if (TREE_CODE (arg1) == BIT_AND_EXPR
11547 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11548 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11550 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11551 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11552 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11553 fold_convert_loc (loc, type, arg0));
11556 /* See if this can be simplified into a rotate first. If that
11557 is unsuccessful continue in the association code. */
11561 if (integer_all_onesp (arg1))
11562 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11563 if (integer_zerop (arg1))
11564 return omit_one_operand_loc (loc, type, arg1, arg0);
11565 if (operand_equal_p (arg0, arg1, 0))
11566 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11568 /* ~X & X is always zero. */
11569 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11570 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11571 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11573 /* X & ~X is always zero. */
11574 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11575 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11576 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11578 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11579 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11580 && TREE_CODE (arg1) == INTEGER_CST
11581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11583 tree tmp1 = fold_convert_loc (loc, type, arg1);
11584 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11585 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11586 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11587 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11589 fold_convert_loc (loc, type,
11590 fold_build2_loc (loc, BIT_IOR_EXPR,
11591 type, tmp2, tmp3));
11594 /* (X | Y) & Y is (X, Y). */
11595 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11596 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11597 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11598 /* (X | Y) & X is (Y, X). */
11599 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11600 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11601 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11602 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11603 /* X & (X | Y) is (Y, X). */
11604 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11605 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11606 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11607 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11608 /* X & (Y | X) is (Y, X). */
11609 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11611 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11612 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11614 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11615 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11616 && integer_onep (TREE_OPERAND (arg0, 1))
11617 && integer_onep (arg1))
11619 tem = TREE_OPERAND (arg0, 0);
11620 return fold_build2_loc (loc, EQ_EXPR, type,
11621 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11622 build_int_cst (TREE_TYPE (tem), 1)),
11623 build_int_cst (TREE_TYPE (tem), 0));
11625 /* Fold ~X & 1 as (X & 1) == 0. */
11626 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11627 && integer_onep (arg1))
11629 tem = TREE_OPERAND (arg0, 0);
11630 return fold_build2_loc (loc, EQ_EXPR, type,
11631 fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11632 build_int_cst (TREE_TYPE (tem), 1)),
11633 build_int_cst (TREE_TYPE (tem), 0));
11636 /* Fold (X ^ Y) & Y as ~X & Y. */
11637 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11638 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11640 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11641 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11642 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11643 fold_convert_loc (loc, type, arg1));
11645 /* Fold (X ^ Y) & X as ~Y & X. */
11646 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11647 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11648 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11650 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11651 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11652 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11653 fold_convert_loc (loc, type, arg1));
11655 /* Fold X & (X ^ Y) as X & ~Y. */
11656 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11657 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11659 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11660 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11661 fold_convert_loc (loc, type, arg0),
11662 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11664 /* Fold X & (Y ^ X) as ~Y & X. */
11665 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11666 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11667 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11669 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11670 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11671 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11672 fold_convert_loc (loc, type, arg0));
11675 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11676 if (t1 != NULL_TREE)
11678 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11679 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11680 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11683 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11685 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11686 && (~TREE_INT_CST_LOW (arg1)
11687 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11689 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11692 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11694 This results in more efficient code for machines without a NOR
11695 instruction. Combine will canonicalize to the first form
11696 which will allow use of NOR instructions provided by the
11697 backend if they exist. */
11698 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11699 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11701 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11702 build2 (BIT_IOR_EXPR, type,
11703 fold_convert_loc (loc, type,
11704 TREE_OPERAND (arg0, 0)),
11705 fold_convert_loc (loc, type,
11706 TREE_OPERAND (arg1, 0))));
11709 /* If arg0 is derived from the address of an object or function, we may
11710 be able to fold this expression using the object or function's
11712 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11714 unsigned HOST_WIDE_INT modulus, residue;
11715 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11717 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11718 integer_onep (arg1));
11720 /* This works because modulus is a power of 2. If this weren't the
11721 case, we'd have to replace it by its greatest power-of-2
11722 divisor: modulus & -modulus. */
11724 return build_int_cst (type, residue & low);
11727 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11728 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11729 if the new mask might be further optimized. */
11730 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11731 || TREE_CODE (arg0) == RSHIFT_EXPR)
11732 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11733 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11734 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11735 < TYPE_PRECISION (TREE_TYPE (arg0))
11736 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11737 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11739 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11740 unsigned HOST_WIDE_INT mask
11741 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11742 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11743 tree shift_type = TREE_TYPE (arg0);
11745 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11746 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11747 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11748 && TYPE_PRECISION (TREE_TYPE (arg0))
11749 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11751 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11752 tree arg00 = TREE_OPERAND (arg0, 0);
11753 /* See if more bits can be proven as zero because of
11755 if (TREE_CODE (arg00) == NOP_EXPR
11756 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11758 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11759 if (TYPE_PRECISION (inner_type)
11760 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11761 && TYPE_PRECISION (inner_type) < prec)
11763 prec = TYPE_PRECISION (inner_type);
11764 /* See if we can shorten the right shift. */
11766 shift_type = inner_type;
11769 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11770 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11771 zerobits <<= prec - shiftc;
11772 /* For arithmetic shift if sign bit could be set, zerobits
11773 can contain actually sign bits, so no transformation is
11774 possible, unless MASK masks them all away. In that
11775 case the shift needs to be converted into logical shift. */
11776 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11777 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11779 if ((mask & zerobits) == 0)
11780 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11786 /* ((X << 16) & 0xff00) is (X, 0). */
11787 if ((mask & zerobits) == mask)
11788 return omit_one_operand_loc (loc, type,
11789 build_int_cst (type, 0), arg0);
11791 newmask = mask | zerobits;
11792 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11796 /* Only do the transformation if NEWMASK is some integer
11798 for (prec = BITS_PER_UNIT;
11799 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11800 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11802 if (prec < HOST_BITS_PER_WIDE_INT
11803 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11807 if (shift_type != TREE_TYPE (arg0))
11809 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11810 fold_convert_loc (loc, shift_type,
11811 TREE_OPERAND (arg0, 0)),
11812 TREE_OPERAND (arg0, 1));
11813 tem = fold_convert_loc (loc, type, tem);
11817 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11818 if (!tree_int_cst_equal (newmaskt, arg1))
11819 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11827 /* Don't touch a floating-point divide by zero unless the mode
11828 of the constant can represent infinity. */
11829 if (TREE_CODE (arg1) == REAL_CST
11830 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11831 && real_zerop (arg1))
11834 /* Optimize A / A to 1.0 if we don't care about
11835 NaNs or Infinities. Skip the transformation
11836 for non-real operands. */
11837 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11838 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11839 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11840 && operand_equal_p (arg0, arg1, 0))
11842 tree r = build_real (TREE_TYPE (arg0), dconst1);
11844 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11847 /* The complex version of the above A / A optimization. */
11848 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11849 && operand_equal_p (arg0, arg1, 0))
11851 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11852 if (! HONOR_NANS (TYPE_MODE (elem_type))
11853 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11855 tree r = build_real (elem_type, dconst1);
11856 /* omit_two_operands will call fold_convert for us. */
11857 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11861 /* (-A) / (-B) -> A / B */
11862 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11863 return fold_build2_loc (loc, RDIV_EXPR, type,
11864 TREE_OPERAND (arg0, 0),
11865 negate_expr (arg1));
11866 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11867 return fold_build2_loc (loc, RDIV_EXPR, type,
11868 negate_expr (arg0),
11869 TREE_OPERAND (arg1, 0));
11871 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11872 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11873 && real_onep (arg1))
11874 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11876 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11877 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11878 && real_minus_onep (arg1))
11879 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11880 negate_expr (arg0)));
11882 /* If ARG1 is a constant, we can convert this to a multiply by the
11883 reciprocal. This does not have the same rounding properties,
11884 so only do this if -freciprocal-math. We can actually
11885 always safely do it if ARG1 is a power of two, but it's hard to
11886 tell if it is or not in a portable manner. */
11887 if (TREE_CODE (arg1) == REAL_CST)
11889 if (flag_reciprocal_math
11890 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11892 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11893 /* Find the reciprocal if optimizing and the result is exact. */
11897 r = TREE_REAL_CST (arg1);
11898 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11900 tem = build_real (type, r);
11901 return fold_build2_loc (loc, MULT_EXPR, type,
11902 fold_convert_loc (loc, type, arg0), tem);
11906 /* Convert A/B/C to A/(B*C). */
11907 if (flag_reciprocal_math
11908 && TREE_CODE (arg0) == RDIV_EXPR)
11909 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11910 fold_build2_loc (loc, MULT_EXPR, type,
11911 TREE_OPERAND (arg0, 1), arg1));
11913 /* Convert A/(B/C) to (A/B)*C. */
11914 if (flag_reciprocal_math
11915 && TREE_CODE (arg1) == RDIV_EXPR)
11916 return fold_build2_loc (loc, MULT_EXPR, type,
11917 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11918 TREE_OPERAND (arg1, 0)),
11919 TREE_OPERAND (arg1, 1));
11921 /* Convert C1/(X*C2) into (C1/C2)/X. */
11922 if (flag_reciprocal_math
11923 && TREE_CODE (arg1) == MULT_EXPR
11924 && TREE_CODE (arg0) == REAL_CST
11925 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11927 tree tem = const_binop (RDIV_EXPR, arg0,
11928 TREE_OPERAND (arg1, 1), 0);
11930 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11931 TREE_OPERAND (arg1, 0));
11934 if (flag_unsafe_math_optimizations)
11936 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11937 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11939 /* Optimize sin(x)/cos(x) as tan(x). */
11940 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11941 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11942 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11943 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11944 CALL_EXPR_ARG (arg1, 0), 0))
11946 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11948 if (tanfn != NULL_TREE)
11949 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11952 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11953 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11954 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11955 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11956 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11957 CALL_EXPR_ARG (arg1, 0), 0))
11959 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11961 if (tanfn != NULL_TREE)
11963 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11964 CALL_EXPR_ARG (arg0, 0));
11965 return fold_build2_loc (loc, RDIV_EXPR, type,
11966 build_real (type, dconst1), tmp);
11970 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11971 NaNs or Infinities. */
11972 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11973 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11974 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11976 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11977 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11979 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11980 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11981 && operand_equal_p (arg00, arg01, 0))
11983 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11985 if (cosfn != NULL_TREE)
11986 return build_call_expr_loc (loc, cosfn, 1, arg00);
11990 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11991 NaNs or Infinities. */
11992 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11993 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11994 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11996 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11997 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11999 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12000 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12001 && operand_equal_p (arg00, arg01, 0))
12003 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12005 if (cosfn != NULL_TREE)
12007 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12008 return fold_build2_loc (loc, RDIV_EXPR, type,
12009 build_real (type, dconst1),
12015 /* Optimize pow(x,c)/x as pow(x,c-1). */
12016 if (fcode0 == BUILT_IN_POW
12017 || fcode0 == BUILT_IN_POWF
12018 || fcode0 == BUILT_IN_POWL)
12020 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12021 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12022 if (TREE_CODE (arg01) == REAL_CST
12023 && !TREE_OVERFLOW (arg01)
12024 && operand_equal_p (arg1, arg00, 0))
12026 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12030 c = TREE_REAL_CST (arg01);
12031 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12032 arg = build_real (type, c);
12033 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12037 /* Optimize a/root(b/c) into a*root(c/b). */
12038 if (BUILTIN_ROOT_P (fcode1))
12040 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12042 if (TREE_CODE (rootarg) == RDIV_EXPR)
12044 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12045 tree b = TREE_OPERAND (rootarg, 0);
12046 tree c = TREE_OPERAND (rootarg, 1);
12048 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12050 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12051 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12055 /* Optimize x/expN(y) into x*expN(-y). */
12056 if (BUILTIN_EXPONENT_P (fcode1))
12058 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12059 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12060 arg1 = build_call_expr_loc (loc,
12062 fold_convert_loc (loc, type, arg));
12063 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12066 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12067 if (fcode1 == BUILT_IN_POW
12068 || fcode1 == BUILT_IN_POWF
12069 || fcode1 == BUILT_IN_POWL)
12071 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12072 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12073 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12074 tree neg11 = fold_convert_loc (loc, type,
12075 negate_expr (arg11));
12076 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12077 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12082 case TRUNC_DIV_EXPR:
12083 case FLOOR_DIV_EXPR:
12084 /* Simplify A / (B << N) where A and B are positive and B is
12085 a power of 2, to A >> (N + log2(B)). */
12086 strict_overflow_p = false;
12087 if (TREE_CODE (arg1) == LSHIFT_EXPR
12088 && (TYPE_UNSIGNED (type)
12089 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12091 tree sval = TREE_OPERAND (arg1, 0);
12092 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12094 tree sh_cnt = TREE_OPERAND (arg1, 1);
12095 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12097 if (strict_overflow_p)
12098 fold_overflow_warning (("assuming signed overflow does not "
12099 "occur when simplifying A / (B << N)"),
12100 WARN_STRICT_OVERFLOW_MISC);
12102 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12103 sh_cnt, build_int_cst (NULL_TREE, pow2));
12104 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12105 fold_convert_loc (loc, type, arg0), sh_cnt);
12109 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12110 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12111 if (INTEGRAL_TYPE_P (type)
12112 && TYPE_UNSIGNED (type)
12113 && code == FLOOR_DIV_EXPR)
12114 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12118 case ROUND_DIV_EXPR:
12119 case CEIL_DIV_EXPR:
12120 case EXACT_DIV_EXPR:
12121 if (integer_onep (arg1))
12122 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12123 if (integer_zerop (arg1))
12125 /* X / -1 is -X. */
12126 if (!TYPE_UNSIGNED (type)
12127 && TREE_CODE (arg1) == INTEGER_CST
12128 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12129 && TREE_INT_CST_HIGH (arg1) == -1)
12130 return fold_convert_loc (loc, type, negate_expr (arg0));
12132 /* Convert -A / -B to A / B when the type is signed and overflow is
12134 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12135 && TREE_CODE (arg0) == NEGATE_EXPR
12136 && negate_expr_p (arg1))
12138 if (INTEGRAL_TYPE_P (type))
12139 fold_overflow_warning (("assuming signed overflow does not occur "
12140 "when distributing negation across "
12142 WARN_STRICT_OVERFLOW_MISC);
12143 return fold_build2_loc (loc, code, type,
12144 fold_convert_loc (loc, type,
12145 TREE_OPERAND (arg0, 0)),
12146 fold_convert_loc (loc, type,
12147 negate_expr (arg1)));
12149 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12150 && TREE_CODE (arg1) == NEGATE_EXPR
12151 && negate_expr_p (arg0))
12153 if (INTEGRAL_TYPE_P (type))
12154 fold_overflow_warning (("assuming signed overflow does not occur "
12155 "when distributing negation across "
12157 WARN_STRICT_OVERFLOW_MISC);
12158 return fold_build2_loc (loc, code, type,
12159 fold_convert_loc (loc, type,
12160 negate_expr (arg0)),
12161 fold_convert_loc (loc, type,
12162 TREE_OPERAND (arg1, 0)));
12165 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12166 operation, EXACT_DIV_EXPR.
12168 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12169 At one time others generated faster code, it's not clear if they do
12170 after the last round to changes to the DIV code in expmed.c. */
12171 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12172 && multiple_of_p (type, arg0, arg1))
12173 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12175 strict_overflow_p = false;
12176 if (TREE_CODE (arg1) == INTEGER_CST
12177 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12178 &strict_overflow_p)))
12180 if (strict_overflow_p)
12181 fold_overflow_warning (("assuming signed overflow does not occur "
12182 "when simplifying division"),
12183 WARN_STRICT_OVERFLOW_MISC);
12184 return fold_convert_loc (loc, type, tem);
12189 case CEIL_MOD_EXPR:
12190 case FLOOR_MOD_EXPR:
12191 case ROUND_MOD_EXPR:
12192 case TRUNC_MOD_EXPR:
12193 /* X % 1 is always zero, but be sure to preserve any side
12195 if (integer_onep (arg1))
12196 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12198 /* X % 0, return X % 0 unchanged so that we can get the
12199 proper warnings and errors. */
12200 if (integer_zerop (arg1))
12203 /* 0 % X is always zero, but be sure to preserve any side
12204 effects in X. Place this after checking for X == 0. */
12205 if (integer_zerop (arg0))
12206 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12208 /* X % -1 is zero. */
12209 if (!TYPE_UNSIGNED (type)
12210 && TREE_CODE (arg1) == INTEGER_CST
12211 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12212 && TREE_INT_CST_HIGH (arg1) == -1)
12213 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12215 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12216 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12217 strict_overflow_p = false;
12218 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12219 && (TYPE_UNSIGNED (type)
12220 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12223 /* Also optimize A % (C << N) where C is a power of 2,
12224 to A & ((C << N) - 1). */
12225 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12226 c = TREE_OPERAND (arg1, 0);
12228 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12230 tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12231 build_int_cst (TREE_TYPE (arg1), 1));
12232 if (strict_overflow_p)
12233 fold_overflow_warning (("assuming signed overflow does not "
12234 "occur when simplifying "
12235 "X % (power of two)"),
12236 WARN_STRICT_OVERFLOW_MISC);
12237 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12238 fold_convert_loc (loc, type, arg0),
12239 fold_convert_loc (loc, type, mask));
12243 /* X % -C is the same as X % C. */
12244 if (code == TRUNC_MOD_EXPR
12245 && !TYPE_UNSIGNED (type)
12246 && TREE_CODE (arg1) == INTEGER_CST
12247 && !TREE_OVERFLOW (arg1)
12248 && TREE_INT_CST_HIGH (arg1) < 0
12249 && !TYPE_OVERFLOW_TRAPS (type)
12250 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12251 && !sign_bit_p (arg1, arg1))
12252 return fold_build2_loc (loc, code, type,
12253 fold_convert_loc (loc, type, arg0),
12254 fold_convert_loc (loc, type,
12255 negate_expr (arg1)));
12257 /* X % -Y is the same as X % Y. */
12258 if (code == TRUNC_MOD_EXPR
12259 && !TYPE_UNSIGNED (type)
12260 && TREE_CODE (arg1) == NEGATE_EXPR
12261 && !TYPE_OVERFLOW_TRAPS (type))
12262 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12263 fold_convert_loc (loc, type,
12264 TREE_OPERAND (arg1, 0)));
12266 if (TREE_CODE (arg1) == INTEGER_CST
12267 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12268 &strict_overflow_p)))
12270 if (strict_overflow_p)
12271 fold_overflow_warning (("assuming signed overflow does not occur "
12272 "when simplifying modulus"),
12273 WARN_STRICT_OVERFLOW_MISC);
12274 return fold_convert_loc (loc, type, tem);
12281 if (integer_all_onesp (arg0))
12282 return omit_one_operand_loc (loc, type, arg0, arg1);
12286 /* Optimize -1 >> x for arithmetic right shifts. */
12287 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12288 && tree_expr_nonnegative_p (arg1))
12289 return omit_one_operand_loc (loc, type, arg0, arg1);
12290 /* ... fall through ... */
12294 if (integer_zerop (arg1))
12295 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12296 if (integer_zerop (arg0))
12297 return omit_one_operand_loc (loc, type, arg0, arg1);
12299 /* Since negative shift count is not well-defined,
12300 don't try to compute it in the compiler. */
12301 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12304 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12305 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12306 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12307 && host_integerp (TREE_OPERAND (arg0, 1), false)
12308 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12310 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12311 + TREE_INT_CST_LOW (arg1));
12313 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12314 being well defined. */
12315 if (low >= TYPE_PRECISION (type))
12317 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12318 low = low % TYPE_PRECISION (type);
12319 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12320 return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12321 TREE_OPERAND (arg0, 0));
12323 low = TYPE_PRECISION (type) - 1;
12326 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12327 build_int_cst (type, low));
12330 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12331 into x & ((unsigned)-1 >> c) for unsigned types. */
12332 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12333 || (TYPE_UNSIGNED (type)
12334 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12335 && host_integerp (arg1, false)
12336 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12337 && host_integerp (TREE_OPERAND (arg0, 1), false)
12338 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12340 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12341 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12347 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12349 lshift = build_int_cst (type, -1);
12350 lshift = int_const_binop (code, lshift, arg1, 0);
12352 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12356 /* Rewrite an LROTATE_EXPR by a constant into an
12357 RROTATE_EXPR by a new constant. */
12358 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12360 tree tem = build_int_cst (TREE_TYPE (arg1),
12361 TYPE_PRECISION (type));
12362 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12363 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12366 /* If we have a rotate of a bit operation with the rotate count and
12367 the second operand of the bit operation both constant,
12368 permute the two operations. */
12369 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12370 && (TREE_CODE (arg0) == BIT_AND_EXPR
12371 || TREE_CODE (arg0) == BIT_IOR_EXPR
12372 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12373 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12374 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12375 fold_build2_loc (loc, code, type,
12376 TREE_OPERAND (arg0, 0), arg1),
12377 fold_build2_loc (loc, code, type,
12378 TREE_OPERAND (arg0, 1), arg1));
12380 /* Two consecutive rotates adding up to the precision of the
12381 type can be ignored. */
12382 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12383 && TREE_CODE (arg0) == RROTATE_EXPR
12384 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12385 && TREE_INT_CST_HIGH (arg1) == 0
12386 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12387 && ((TREE_INT_CST_LOW (arg1)
12388 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12389 == (unsigned int) TYPE_PRECISION (type)))
12390 return TREE_OPERAND (arg0, 0);
12392 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12393 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12394 if the latter can be further optimized. */
12395 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12396 && TREE_CODE (arg0) == BIT_AND_EXPR
12397 && TREE_CODE (arg1) == INTEGER_CST
12398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12400 tree mask = fold_build2_loc (loc, code, type,
12401 fold_convert_loc (loc, type,
12402 TREE_OPERAND (arg0, 1)),
12404 tree shift = fold_build2_loc (loc, code, type,
12405 fold_convert_loc (loc, type,
12406 TREE_OPERAND (arg0, 0)),
12408 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12416 if (operand_equal_p (arg0, arg1, 0))
12417 return omit_one_operand_loc (loc, type, arg0, arg1);
12418 if (INTEGRAL_TYPE_P (type)
12419 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12420 return omit_one_operand_loc (loc, type, arg1, arg0);
12421 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12427 if (operand_equal_p (arg0, arg1, 0))
12428 return omit_one_operand_loc (loc, type, arg0, arg1);
12429 if (INTEGRAL_TYPE_P (type)
12430 && TYPE_MAX_VALUE (type)
12431 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12432 return omit_one_operand_loc (loc, type, arg1, arg0);
12433 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12438 case TRUTH_ANDIF_EXPR:
12439 /* Note that the operands of this must be ints
12440 and their values must be 0 or 1.
12441 ("true" is a fixed value perhaps depending on the language.) */
12442 /* If first arg is constant zero, return it. */
12443 if (integer_zerop (arg0))
12444 return fold_convert_loc (loc, type, arg0);
12445 case TRUTH_AND_EXPR:
12446 /* If either arg is constant true, drop it. */
12447 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12448 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12449 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12450 /* Preserve sequence points. */
12451 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12452 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12453 /* If second arg is constant zero, result is zero, but first arg
12454 must be evaluated. */
12455 if (integer_zerop (arg1))
12456 return omit_one_operand_loc (loc, type, arg1, arg0);
12457 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12458 case will be handled here. */
12459 if (integer_zerop (arg0))
12460 return omit_one_operand_loc (loc, type, arg0, arg1);
12462 /* !X && X is always false. */
12463 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12464 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12465 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12466 /* X && !X is always false. */
12467 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12468 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12469 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12471 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12472 means A >= Y && A != MAX, but in this case we know that
12475 if (!TREE_SIDE_EFFECTS (arg0)
12476 && !TREE_SIDE_EFFECTS (arg1))
12478 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12479 if (tem && !operand_equal_p (tem, arg0, 0))
12480 return fold_build2_loc (loc, code, type, tem, arg1);
12482 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12483 if (tem && !operand_equal_p (tem, arg1, 0))
12484 return fold_build2_loc (loc, code, type, arg0, tem);
12488 /* We only do these simplifications if we are optimizing. */
12492 /* Check for things like (A || B) && (A || C). We can convert this
12493 to A || (B && C). Note that either operator can be any of the four
12494 truth and/or operations and the transformation will still be
12495 valid. Also note that we only care about order for the
12496 ANDIF and ORIF operators. If B contains side effects, this
12497 might change the truth-value of A. */
12498 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12499 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12500 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12501 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12502 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12503 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12505 tree a00 = TREE_OPERAND (arg0, 0);
12506 tree a01 = TREE_OPERAND (arg0, 1);
12507 tree a10 = TREE_OPERAND (arg1, 0);
12508 tree a11 = TREE_OPERAND (arg1, 1);
12509 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12510 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12511 && (code == TRUTH_AND_EXPR
12512 || code == TRUTH_OR_EXPR));
12514 if (operand_equal_p (a00, a10, 0))
12515 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12516 fold_build2_loc (loc, code, type, a01, a11));
12517 else if (commutative && operand_equal_p (a00, a11, 0))
12518 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12519 fold_build2_loc (loc, code, type, a01, a10));
12520 else if (commutative && operand_equal_p (a01, a10, 0))
12521 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12522 fold_build2_loc (loc, code, type, a00, a11));
12524 /* This case if tricky because we must either have commutative
12525 operators or else A10 must not have side-effects. */
12527 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12528 && operand_equal_p (a01, a11, 0))
12529 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12530 fold_build2_loc (loc, code, type, a00, a10),
12534 /* See if we can build a range comparison. */
12535 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12538 /* Check for the possibility of merging component references. If our
12539 lhs is another similar operation, try to merge its rhs with our
12540 rhs. Then try to merge our lhs and rhs. */
12541 if (TREE_CODE (arg0) == code
12542 && 0 != (tem = fold_truthop (loc, code, type,
12543 TREE_OPERAND (arg0, 1), arg1)))
12544 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12546 if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12551 case TRUTH_ORIF_EXPR:
12552 /* Note that the operands of this must be ints
12553 and their values must be 0 or true.
12554 ("true" is a fixed value perhaps depending on the language.) */
12555 /* If first arg is constant true, return it. */
12556 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12557 return fold_convert_loc (loc, type, arg0);
12558 case TRUTH_OR_EXPR:
12559 /* If either arg is constant zero, drop it. */
12560 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12561 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12562 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12563 /* Preserve sequence points. */
12564 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12565 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12566 /* If second arg is constant true, result is true, but we must
12567 evaluate first arg. */
12568 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12569 return omit_one_operand_loc (loc, type, arg1, arg0);
12570 /* Likewise for first arg, but note this only occurs here for
12572 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12573 return omit_one_operand_loc (loc, type, arg0, arg1);
12575 /* !X || X is always true. */
12576 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12577 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12578 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12579 /* X || !X is always true. */
12580 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12581 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12582 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12586 case TRUTH_XOR_EXPR:
12587 /* If the second arg is constant zero, drop it. */
12588 if (integer_zerop (arg1))
12589 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12590 /* If the second arg is constant true, this is a logical inversion. */
12591 if (integer_onep (arg1))
12593 /* Only call invert_truthvalue if operand is a truth value. */
12594 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12595 tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12597 tem = invert_truthvalue_loc (loc, arg0);
12598 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12600 /* Identical arguments cancel to zero. */
12601 if (operand_equal_p (arg0, arg1, 0))
12602 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12604 /* !X ^ X is always true. */
12605 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12607 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12609 /* X ^ !X is always true. */
12610 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12612 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12618 tem = fold_comparison (loc, code, type, op0, op1);
12619 if (tem != NULL_TREE)
12622 /* bool_var != 0 becomes bool_var. */
12623 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12624 && code == NE_EXPR)
12625 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12627 /* bool_var == 1 becomes bool_var. */
12628 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12629 && code == EQ_EXPR)
12630 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12632 /* bool_var != 1 becomes !bool_var. */
12633 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12634 && code == NE_EXPR)
12635 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12636 fold_convert_loc (loc, type, arg0));
12638 /* bool_var == 0 becomes !bool_var. */
12639 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12640 && code == EQ_EXPR)
12641 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12642 fold_convert_loc (loc, type, arg0));
12644 /* !exp != 0 becomes !exp */
12645 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12646 && code == NE_EXPR)
12647 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12649 /* If this is an equality comparison of the address of two non-weak,
12650 unaliased symbols neither of which are extern (since we do not
12651 have access to attributes for externs), then we know the result. */
12652 if (TREE_CODE (arg0) == ADDR_EXPR
12653 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12654 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12655 && ! lookup_attribute ("alias",
12656 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12657 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12658 && TREE_CODE (arg1) == ADDR_EXPR
12659 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12660 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12661 && ! lookup_attribute ("alias",
12662 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12663 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12665 /* We know that we're looking at the address of two
12666 non-weak, unaliased, static _DECL nodes.
12668 It is both wasteful and incorrect to call operand_equal_p
12669 to compare the two ADDR_EXPR nodes. It is wasteful in that
12670 all we need to do is test pointer equality for the arguments
12671 to the two ADDR_EXPR nodes. It is incorrect to use
12672 operand_equal_p as that function is NOT equivalent to a
12673 C equality test. It can in fact return false for two
12674 objects which would test as equal using the C equality
12676 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12677 return constant_boolean_node (equal
12678 ? code == EQ_EXPR : code != EQ_EXPR,
12682 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12683 a MINUS_EXPR of a constant, we can convert it into a comparison with
12684 a revised constant as long as no overflow occurs. */
12685 if (TREE_CODE (arg1) == INTEGER_CST
12686 && (TREE_CODE (arg0) == PLUS_EXPR
12687 || TREE_CODE (arg0) == MINUS_EXPR)
12688 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12689 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12690 ? MINUS_EXPR : PLUS_EXPR,
12691 fold_convert_loc (loc, TREE_TYPE (arg0),
12693 TREE_OPERAND (arg0, 1), 0))
12694 && !TREE_OVERFLOW (tem))
12695 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12697 /* Similarly for a NEGATE_EXPR. */
12698 if (TREE_CODE (arg0) == NEGATE_EXPR
12699 && TREE_CODE (arg1) == INTEGER_CST
12700 && 0 != (tem = negate_expr (arg1))
12701 && TREE_CODE (tem) == INTEGER_CST
12702 && !TREE_OVERFLOW (tem))
12703 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12705 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12706 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12707 && TREE_CODE (arg1) == INTEGER_CST
12708 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12709 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12710 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12711 fold_convert_loc (loc,
12714 TREE_OPERAND (arg0, 1)));
12716 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12717 if ((TREE_CODE (arg0) == PLUS_EXPR
12718 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12719 || TREE_CODE (arg0) == MINUS_EXPR)
12720 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12721 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12722 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12724 tree val = TREE_OPERAND (arg0, 1);
12725 return omit_two_operands_loc (loc, type,
12726 fold_build2_loc (loc, code, type,
12728 build_int_cst (TREE_TYPE (val),
12730 TREE_OPERAND (arg0, 0), arg1);
12733 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12734 if (TREE_CODE (arg0) == MINUS_EXPR
12735 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12736 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12737 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12739 return omit_two_operands_loc (loc, type,
12741 ? boolean_true_node : boolean_false_node,
12742 TREE_OPERAND (arg0, 1), arg1);
12745 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12746 for !=. Don't do this for ordered comparisons due to overflow. */
12747 if (TREE_CODE (arg0) == MINUS_EXPR
12748 && integer_zerop (arg1))
12749 return fold_build2_loc (loc, code, type,
12750 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12752 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12753 if (TREE_CODE (arg0) == ABS_EXPR
12754 && (integer_zerop (arg1) || real_zerop (arg1)))
12755 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12757 /* If this is an EQ or NE comparison with zero and ARG0 is
12758 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12759 two operations, but the latter can be done in one less insn
12760 on machines that have only two-operand insns or on which a
12761 constant cannot be the first operand. */
12762 if (TREE_CODE (arg0) == BIT_AND_EXPR
12763 && integer_zerop (arg1))
12765 tree arg00 = TREE_OPERAND (arg0, 0);
12766 tree arg01 = TREE_OPERAND (arg0, 1);
12767 if (TREE_CODE (arg00) == LSHIFT_EXPR
12768 && integer_onep (TREE_OPERAND (arg00, 0)))
12770 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12771 arg01, TREE_OPERAND (arg00, 1));
12772 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12773 build_int_cst (TREE_TYPE (arg0), 1));
12774 return fold_build2_loc (loc, code, type,
12775 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12778 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12779 && integer_onep (TREE_OPERAND (arg01, 0)))
12781 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12782 arg00, TREE_OPERAND (arg01, 1));
12783 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12784 build_int_cst (TREE_TYPE (arg0), 1));
12785 return fold_build2_loc (loc, code, type,
12786 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12791 /* If this is an NE or EQ comparison of zero against the result of a
12792 signed MOD operation whose second operand is a power of 2, make
12793 the MOD operation unsigned since it is simpler and equivalent. */
12794 if (integer_zerop (arg1)
12795 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12796 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12797 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12798 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12799 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12800 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12802 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12803 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12804 fold_convert_loc (loc, newtype,
12805 TREE_OPERAND (arg0, 0)),
12806 fold_convert_loc (loc, newtype,
12807 TREE_OPERAND (arg0, 1)));
12809 return fold_build2_loc (loc, code, type, newmod,
12810 fold_convert_loc (loc, newtype, arg1));
12813 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12814 C1 is a valid shift constant, and C2 is a power of two, i.e.
12816 if (TREE_CODE (arg0) == BIT_AND_EXPR
12817 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12818 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12820 && integer_pow2p (TREE_OPERAND (arg0, 1))
12821 && integer_zerop (arg1))
12823 tree itype = TREE_TYPE (arg0);
12824 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12825 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12827 /* Check for a valid shift count. */
12828 if (TREE_INT_CST_HIGH (arg001) == 0
12829 && TREE_INT_CST_LOW (arg001) < prec)
12831 tree arg01 = TREE_OPERAND (arg0, 1);
12832 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12833 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12834 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12835 can be rewritten as (X & (C2 << C1)) != 0. */
12836 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12838 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12839 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12840 return fold_build2_loc (loc, code, type, tem, arg1);
12842 /* Otherwise, for signed (arithmetic) shifts,
12843 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12844 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12845 else if (!TYPE_UNSIGNED (itype))
12846 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12847 arg000, build_int_cst (itype, 0));
12848 /* Otherwise, of unsigned (logical) shifts,
12849 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12850 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12852 return omit_one_operand_loc (loc, type,
12853 code == EQ_EXPR ? integer_one_node
12854 : integer_zero_node,
12859 /* If this is an NE comparison of zero with an AND of one, remove the
12860 comparison since the AND will give the correct value. */
12861 if (code == NE_EXPR
12862 && integer_zerop (arg1)
12863 && TREE_CODE (arg0) == BIT_AND_EXPR
12864 && integer_onep (TREE_OPERAND (arg0, 1)))
12865 return fold_convert_loc (loc, type, arg0);
12867 /* If we have (A & C) == C where C is a power of 2, convert this into
12868 (A & C) != 0. Similarly for NE_EXPR. */
12869 if (TREE_CODE (arg0) == BIT_AND_EXPR
12870 && integer_pow2p (TREE_OPERAND (arg0, 1))
12871 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12872 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12873 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12874 integer_zero_node));
12876 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12877 bit, then fold the expression into A < 0 or A >= 0. */
12878 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12882 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12883 Similarly for NE_EXPR. */
12884 if (TREE_CODE (arg0) == BIT_AND_EXPR
12885 && TREE_CODE (arg1) == INTEGER_CST
12886 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12888 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12889 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12890 TREE_OPERAND (arg0, 1));
12891 tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12893 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12894 if (integer_nonzerop (dandnotc))
12895 return omit_one_operand_loc (loc, type, rslt, arg0);
12898 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12899 Similarly for NE_EXPR. */
12900 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12901 && TREE_CODE (arg1) == INTEGER_CST
12902 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12904 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12905 tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12906 TREE_OPERAND (arg0, 1), notd);
12907 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12908 if (integer_nonzerop (candnotd))
12909 return omit_one_operand_loc (loc, type, rslt, arg0);
12912 /* If this is a comparison of a field, we may be able to simplify it. */
12913 if ((TREE_CODE (arg0) == COMPONENT_REF
12914 || TREE_CODE (arg0) == BIT_FIELD_REF)
12915 /* Handle the constant case even without -O
12916 to make sure the warnings are given. */
12917 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12919 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12924 /* Optimize comparisons of strlen vs zero to a compare of the
12925 first character of the string vs zero. To wit,
12926 strlen(ptr) == 0 => *ptr == 0
12927 strlen(ptr) != 0 => *ptr != 0
12928 Other cases should reduce to one of these two (or a constant)
12929 due to the return value of strlen being unsigned. */
12930 if (TREE_CODE (arg0) == CALL_EXPR
12931 && integer_zerop (arg1))
12933 tree fndecl = get_callee_fndecl (arg0);
12936 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12937 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12938 && call_expr_nargs (arg0) == 1
12939 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12941 tree iref = build_fold_indirect_ref_loc (loc,
12942 CALL_EXPR_ARG (arg0, 0));
12943 return fold_build2_loc (loc, code, type, iref,
12944 build_int_cst (TREE_TYPE (iref), 0));
12948 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12949 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12950 if (TREE_CODE (arg0) == RSHIFT_EXPR
12951 && integer_zerop (arg1)
12952 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12954 tree arg00 = TREE_OPERAND (arg0, 0);
12955 tree arg01 = TREE_OPERAND (arg0, 1);
12956 tree itype = TREE_TYPE (arg00);
12957 if (TREE_INT_CST_HIGH (arg01) == 0
12958 && TREE_INT_CST_LOW (arg01)
12959 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12961 if (TYPE_UNSIGNED (itype))
12963 itype = signed_type_for (itype);
12964 arg00 = fold_convert_loc (loc, itype, arg00);
12966 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12967 type, arg00, build_int_cst (itype, 0));
12971 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12972 if (integer_zerop (arg1)
12973 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12974 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12975 TREE_OPERAND (arg0, 1));
12977 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12978 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12979 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12980 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12981 build_int_cst (TREE_TYPE (arg1), 0));
12982 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12983 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12984 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12985 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12986 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12987 build_int_cst (TREE_TYPE (arg1), 0));
12989 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12990 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12991 && TREE_CODE (arg1) == INTEGER_CST
12992 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12993 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12994 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12995 TREE_OPERAND (arg0, 1), arg1));
12997 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12998 (X & C) == 0 when C is a single bit. */
12999 if (TREE_CODE (arg0) == BIT_AND_EXPR
13000 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13001 && integer_zerop (arg1)
13002 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13004 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13005 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13006 TREE_OPERAND (arg0, 1));
13007 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13011 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13012 constant C is a power of two, i.e. a single bit. */
13013 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13014 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13015 && integer_zerop (arg1)
13016 && integer_pow2p (TREE_OPERAND (arg0, 1))
13017 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13018 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13020 tree arg00 = TREE_OPERAND (arg0, 0);
13021 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13022 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13025 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13026 when is C is a power of two, i.e. a single bit. */
13027 if (TREE_CODE (arg0) == BIT_AND_EXPR
13028 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13029 && integer_zerop (arg1)
13030 && integer_pow2p (TREE_OPERAND (arg0, 1))
13031 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13032 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13034 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13035 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13036 arg000, TREE_OPERAND (arg0, 1));
13037 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13038 tem, build_int_cst (TREE_TYPE (tem), 0));
13041 if (integer_zerop (arg1)
13042 && tree_expr_nonzero_p (arg0))
13044 tree res = constant_boolean_node (code==NE_EXPR, type);
13045 return omit_one_operand_loc (loc, type, res, arg0);
13048 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13049 if (TREE_CODE (arg0) == NEGATE_EXPR
13050 && TREE_CODE (arg1) == NEGATE_EXPR)
13051 return fold_build2_loc (loc, code, type,
13052 TREE_OPERAND (arg0, 0),
13053 TREE_OPERAND (arg1, 0));
13055 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13056 if (TREE_CODE (arg0) == BIT_AND_EXPR
13057 && TREE_CODE (arg1) == BIT_AND_EXPR)
13059 tree arg00 = TREE_OPERAND (arg0, 0);
13060 tree arg01 = TREE_OPERAND (arg0, 1);
13061 tree arg10 = TREE_OPERAND (arg1, 0);
13062 tree arg11 = TREE_OPERAND (arg1, 1);
13063 tree itype = TREE_TYPE (arg0);
13065 if (operand_equal_p (arg01, arg11, 0))
13066 return fold_build2_loc (loc, code, type,
13067 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13068 fold_build2_loc (loc,
13069 BIT_XOR_EXPR, itype,
13072 build_int_cst (itype, 0));
13074 if (operand_equal_p (arg01, arg10, 0))
13075 return fold_build2_loc (loc, code, type,
13076 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13077 fold_build2_loc (loc,
13078 BIT_XOR_EXPR, itype,
13081 build_int_cst (itype, 0));
13083 if (operand_equal_p (arg00, arg11, 0))
13084 return fold_build2_loc (loc, code, type,
13085 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13086 fold_build2_loc (loc,
13087 BIT_XOR_EXPR, itype,
13090 build_int_cst (itype, 0));
13092 if (operand_equal_p (arg00, arg10, 0))
13093 return fold_build2_loc (loc, code, type,
13094 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13095 fold_build2_loc (loc,
13096 BIT_XOR_EXPR, itype,
13099 build_int_cst (itype, 0));
13102 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13103 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13105 tree arg00 = TREE_OPERAND (arg0, 0);
13106 tree arg01 = TREE_OPERAND (arg0, 1);
13107 tree arg10 = TREE_OPERAND (arg1, 0);
13108 tree arg11 = TREE_OPERAND (arg1, 1);
13109 tree itype = TREE_TYPE (arg0);
13111 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13112 operand_equal_p guarantees no side-effects so we don't need
13113 to use omit_one_operand on Z. */
13114 if (operand_equal_p (arg01, arg11, 0))
13115 return fold_build2_loc (loc, code, type, arg00, arg10);
13116 if (operand_equal_p (arg01, arg10, 0))
13117 return fold_build2_loc (loc, code, type, arg00, arg11);
13118 if (operand_equal_p (arg00, arg11, 0))
13119 return fold_build2_loc (loc, code, type, arg01, arg10);
13120 if (operand_equal_p (arg00, arg10, 0))
13121 return fold_build2_loc (loc, code, type, arg01, arg11);
13123 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13124 if (TREE_CODE (arg01) == INTEGER_CST
13125 && TREE_CODE (arg11) == INTEGER_CST)
13126 return fold_build2_loc (loc, code, type,
13127 fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13128 fold_build2_loc (loc,
13129 BIT_XOR_EXPR, itype,
13134 /* Attempt to simplify equality/inequality comparisons of complex
13135 values. Only lower the comparison if the result is known or
13136 can be simplified to a single scalar comparison. */
13137 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13138 || TREE_CODE (arg0) == COMPLEX_CST)
13139 && (TREE_CODE (arg1) == COMPLEX_EXPR
13140 || TREE_CODE (arg1) == COMPLEX_CST))
13142 tree real0, imag0, real1, imag1;
13145 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13147 real0 = TREE_OPERAND (arg0, 0);
13148 imag0 = TREE_OPERAND (arg0, 1);
13152 real0 = TREE_REALPART (arg0);
13153 imag0 = TREE_IMAGPART (arg0);
13156 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13158 real1 = TREE_OPERAND (arg1, 0);
13159 imag1 = TREE_OPERAND (arg1, 1);
13163 real1 = TREE_REALPART (arg1);
13164 imag1 = TREE_IMAGPART (arg1);
13167 rcond = fold_binary_loc (loc, code, type, real0, real1);
13168 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13170 if (integer_zerop (rcond))
13172 if (code == EQ_EXPR)
13173 return omit_two_operands_loc (loc, type, boolean_false_node,
13175 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13179 if (code == NE_EXPR)
13180 return omit_two_operands_loc (loc, type, boolean_true_node,
13182 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13186 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13187 if (icond && TREE_CODE (icond) == INTEGER_CST)
13189 if (integer_zerop (icond))
13191 if (code == EQ_EXPR)
13192 return omit_two_operands_loc (loc, type, boolean_false_node,
13194 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13198 if (code == NE_EXPR)
13199 return omit_two_operands_loc (loc, type, boolean_true_node,
13201 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13212 tem = fold_comparison (loc, code, type, op0, op1);
13213 if (tem != NULL_TREE)
13216 /* Transform comparisons of the form X +- C CMP X. */
13217 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13218 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13219 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13220 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13221 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13222 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13224 tree arg01 = TREE_OPERAND (arg0, 1);
13225 enum tree_code code0 = TREE_CODE (arg0);
13228 if (TREE_CODE (arg01) == REAL_CST)
13229 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13231 is_positive = tree_int_cst_sgn (arg01);
13233 /* (X - c) > X becomes false. */
13234 if (code == GT_EXPR
13235 && ((code0 == MINUS_EXPR && is_positive >= 0)
13236 || (code0 == PLUS_EXPR && is_positive <= 0)))
13238 if (TREE_CODE (arg01) == INTEGER_CST
13239 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13240 fold_overflow_warning (("assuming signed overflow does not "
13241 "occur when assuming that (X - c) > X "
13242 "is always false"),
13243 WARN_STRICT_OVERFLOW_ALL);
13244 return constant_boolean_node (0, type);
13247 /* Likewise (X + c) < X becomes false. */
13248 if (code == LT_EXPR
13249 && ((code0 == PLUS_EXPR && is_positive >= 0)
13250 || (code0 == MINUS_EXPR && is_positive <= 0)))
13252 if (TREE_CODE (arg01) == INTEGER_CST
13253 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13254 fold_overflow_warning (("assuming signed overflow does not "
13255 "occur when assuming that "
13256 "(X + c) < X is always false"),
13257 WARN_STRICT_OVERFLOW_ALL);
13258 return constant_boolean_node (0, type);
13261 /* Convert (X - c) <= X to true. */
13262 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13264 && ((code0 == MINUS_EXPR && is_positive >= 0)
13265 || (code0 == PLUS_EXPR && is_positive <= 0)))
13267 if (TREE_CODE (arg01) == INTEGER_CST
13268 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13269 fold_overflow_warning (("assuming signed overflow does not "
13270 "occur when assuming that "
13271 "(X - c) <= X is always true"),
13272 WARN_STRICT_OVERFLOW_ALL);
13273 return constant_boolean_node (1, type);
13276 /* Convert (X + c) >= X to true. */
13277 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13279 && ((code0 == PLUS_EXPR && is_positive >= 0)
13280 || (code0 == MINUS_EXPR && is_positive <= 0)))
13282 if (TREE_CODE (arg01) == INTEGER_CST
13283 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13284 fold_overflow_warning (("assuming signed overflow does not "
13285 "occur when assuming that "
13286 "(X + c) >= X is always true"),
13287 WARN_STRICT_OVERFLOW_ALL);
13288 return constant_boolean_node (1, type);
13291 if (TREE_CODE (arg01) == INTEGER_CST)
13293 /* Convert X + c > X and X - c < X to true for integers. */
13294 if (code == GT_EXPR
13295 && ((code0 == PLUS_EXPR && is_positive > 0)
13296 || (code0 == MINUS_EXPR && is_positive < 0)))
13298 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13299 fold_overflow_warning (("assuming signed overflow does "
13300 "not occur when assuming that "
13301 "(X + c) > X is always true"),
13302 WARN_STRICT_OVERFLOW_ALL);
13303 return constant_boolean_node (1, type);
13306 if (code == LT_EXPR
13307 && ((code0 == MINUS_EXPR && is_positive > 0)
13308 || (code0 == PLUS_EXPR && is_positive < 0)))
13310 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13311 fold_overflow_warning (("assuming signed overflow does "
13312 "not occur when assuming that "
13313 "(X - c) < X is always true"),
13314 WARN_STRICT_OVERFLOW_ALL);
13315 return constant_boolean_node (1, type);
13318 /* Convert X + c <= X and X - c >= X to false for integers. */
13319 if (code == LE_EXPR
13320 && ((code0 == PLUS_EXPR && is_positive > 0)
13321 || (code0 == MINUS_EXPR && is_positive < 0)))
13323 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13324 fold_overflow_warning (("assuming signed overflow does "
13325 "not occur when assuming that "
13326 "(X + c) <= X is always false"),
13327 WARN_STRICT_OVERFLOW_ALL);
13328 return constant_boolean_node (0, type);
13331 if (code == GE_EXPR
13332 && ((code0 == MINUS_EXPR && is_positive > 0)
13333 || (code0 == PLUS_EXPR && is_positive < 0)))
13335 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13336 fold_overflow_warning (("assuming signed overflow does "
13337 "not occur when assuming that "
13338 "(X - c) >= X is always false"),
13339 WARN_STRICT_OVERFLOW_ALL);
13340 return constant_boolean_node (0, type);
13345 /* Comparisons with the highest or lowest possible integer of
13346 the specified precision will have known values. */
13348 tree arg1_type = TREE_TYPE (arg1);
13349 unsigned int width = TYPE_PRECISION (arg1_type);
13351 if (TREE_CODE (arg1) == INTEGER_CST
13352 && width <= 2 * HOST_BITS_PER_WIDE_INT
13353 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13355 HOST_WIDE_INT signed_max_hi;
13356 unsigned HOST_WIDE_INT signed_max_lo;
13357 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13359 if (width <= HOST_BITS_PER_WIDE_INT)
13361 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13366 if (TYPE_UNSIGNED (arg1_type))
13368 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13374 max_lo = signed_max_lo;
13375 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13381 width -= HOST_BITS_PER_WIDE_INT;
13382 signed_max_lo = -1;
13383 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13388 if (TYPE_UNSIGNED (arg1_type))
13390 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13395 max_hi = signed_max_hi;
13396 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13400 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13401 && TREE_INT_CST_LOW (arg1) == max_lo)
13405 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13408 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13411 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13414 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13416 /* The GE_EXPR and LT_EXPR cases above are not normally
13417 reached because of previous transformations. */
13422 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13424 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13428 arg1 = const_binop (PLUS_EXPR, arg1,
13429 build_int_cst (TREE_TYPE (arg1), 1), 0);
13430 return fold_build2_loc (loc, EQ_EXPR, type,
13431 fold_convert_loc (loc,
13432 TREE_TYPE (arg1), arg0),
13435 arg1 = const_binop (PLUS_EXPR, arg1,
13436 build_int_cst (TREE_TYPE (arg1), 1), 0);
13437 return fold_build2_loc (loc, NE_EXPR, type,
13438 fold_convert_loc (loc, TREE_TYPE (arg1),
13444 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13446 && TREE_INT_CST_LOW (arg1) == min_lo)
13450 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13453 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13456 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13459 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13464 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13466 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13470 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13471 return fold_build2_loc (loc, NE_EXPR, type,
13472 fold_convert_loc (loc,
13473 TREE_TYPE (arg1), arg0),
13476 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13477 return fold_build2_loc (loc, EQ_EXPR, type,
13478 fold_convert_loc (loc, TREE_TYPE (arg1),
13485 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13486 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13487 && TYPE_UNSIGNED (arg1_type)
13488 /* We will flip the signedness of the comparison operator
13489 associated with the mode of arg1, so the sign bit is
13490 specified by this mode. Check that arg1 is the signed
13491 max associated with this sign bit. */
13492 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13493 /* signed_type does not work on pointer types. */
13494 && INTEGRAL_TYPE_P (arg1_type))
13496 /* The following case also applies to X < signed_max+1
13497 and X >= signed_max+1 because previous transformations. */
13498 if (code == LE_EXPR || code == GT_EXPR)
13501 st = signed_type_for (TREE_TYPE (arg1));
13502 return fold_build2_loc (loc,
13503 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13504 type, fold_convert_loc (loc, st, arg0),
13505 build_int_cst (st, 0));
13511 /* If we are comparing an ABS_EXPR with a constant, we can
13512 convert all the cases into explicit comparisons, but they may
13513 well not be faster than doing the ABS and one comparison.
13514 But ABS (X) <= C is a range comparison, which becomes a subtraction
13515 and a comparison, and is probably faster. */
13516 if (code == LE_EXPR
13517 && TREE_CODE (arg1) == INTEGER_CST
13518 && TREE_CODE (arg0) == ABS_EXPR
13519 && ! TREE_SIDE_EFFECTS (arg0)
13520 && (0 != (tem = negate_expr (arg1)))
13521 && TREE_CODE (tem) == INTEGER_CST
13522 && !TREE_OVERFLOW (tem))
13523 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13524 build2 (GE_EXPR, type,
13525 TREE_OPERAND (arg0, 0), tem),
13526 build2 (LE_EXPR, type,
13527 TREE_OPERAND (arg0, 0), arg1));
13529 /* Convert ABS_EXPR<x> >= 0 to true. */
13530 strict_overflow_p = false;
13531 if (code == GE_EXPR
13532 && (integer_zerop (arg1)
13533 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13534 && real_zerop (arg1)))
13535 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13537 if (strict_overflow_p)
13538 fold_overflow_warning (("assuming signed overflow does not occur "
13539 "when simplifying comparison of "
13540 "absolute value and zero"),
13541 WARN_STRICT_OVERFLOW_CONDITIONAL);
13542 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13545 /* Convert ABS_EXPR<x> < 0 to false. */
13546 strict_overflow_p = false;
13547 if (code == LT_EXPR
13548 && (integer_zerop (arg1) || real_zerop (arg1))
13549 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13551 if (strict_overflow_p)
13552 fold_overflow_warning (("assuming signed overflow does not occur "
13553 "when simplifying comparison of "
13554 "absolute value and zero"),
13555 WARN_STRICT_OVERFLOW_CONDITIONAL);
13556 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13559 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13560 and similarly for >= into !=. */
13561 if ((code == LT_EXPR || code == GE_EXPR)
13562 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13563 && TREE_CODE (arg1) == LSHIFT_EXPR
13564 && integer_onep (TREE_OPERAND (arg1, 0)))
13566 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13567 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13568 TREE_OPERAND (arg1, 1)),
13569 build_int_cst (TREE_TYPE (arg0), 0));
13570 goto fold_binary_exit;
13573 if ((code == LT_EXPR || code == GE_EXPR)
13574 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13575 && CONVERT_EXPR_P (arg1)
13576 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13577 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13579 tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13580 fold_convert_loc (loc, TREE_TYPE (arg0),
13581 build2 (RSHIFT_EXPR,
13582 TREE_TYPE (arg0), arg0,
13583 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13585 build_int_cst (TREE_TYPE (arg0), 0));
13586 goto fold_binary_exit;
13591 case UNORDERED_EXPR:
13599 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13601 t1 = fold_relational_const (code, type, arg0, arg1);
13602 if (t1 != NULL_TREE)
13606 /* If the first operand is NaN, the result is constant. */
13607 if (TREE_CODE (arg0) == REAL_CST
13608 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13609 && (code != LTGT_EXPR || ! flag_trapping_math))
13611 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13612 ? integer_zero_node
13613 : integer_one_node;
13614 return omit_one_operand_loc (loc, type, t1, arg1);
13617 /* If the second operand is NaN, the result is constant. */
13618 if (TREE_CODE (arg1) == REAL_CST
13619 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13620 && (code != LTGT_EXPR || ! flag_trapping_math))
13622 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13623 ? integer_zero_node
13624 : integer_one_node;
13625 return omit_one_operand_loc (loc, type, t1, arg0);
13628 /* Simplify unordered comparison of something with itself. */
13629 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13630 && operand_equal_p (arg0, arg1, 0))
13631 return constant_boolean_node (1, type);
13633 if (code == LTGT_EXPR
13634 && !flag_trapping_math
13635 && operand_equal_p (arg0, arg1, 0))
13636 return constant_boolean_node (0, type);
13638 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13640 tree targ0 = strip_float_extensions (arg0);
13641 tree targ1 = strip_float_extensions (arg1);
13642 tree newtype = TREE_TYPE (targ0);
13644 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13645 newtype = TREE_TYPE (targ1);
13647 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13648 return fold_build2_loc (loc, code, type,
13649 fold_convert_loc (loc, newtype, targ0),
13650 fold_convert_loc (loc, newtype, targ1));
13655 case COMPOUND_EXPR:
13656 /* When pedantic, a compound expression can be neither an lvalue
13657 nor an integer constant expression. */
13658 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13660 /* Don't let (0, 0) be null pointer constant. */
13661 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13662 : fold_convert_loc (loc, type, arg1);
13663 return pedantic_non_lvalue_loc (loc, tem);
13666 if ((TREE_CODE (arg0) == REAL_CST
13667 && TREE_CODE (arg1) == REAL_CST)
13668 || (TREE_CODE (arg0) == INTEGER_CST
13669 && TREE_CODE (arg1) == INTEGER_CST))
13670 return build_complex (type, arg0, arg1);
13674 /* An ASSERT_EXPR should never be passed to fold_binary. */
13675 gcc_unreachable ();
13679 } /* switch (code) */
13681 protected_set_expr_location (tem, loc);
13685 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13686 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13690 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13692 switch (TREE_CODE (*tp))
13698 *walk_subtrees = 0;
13700 /* ... fall through ... */
13707 /* Return whether the sub-tree ST contains a label which is accessible from
13708 outside the sub-tree. */
13711 contains_label_p (tree st)
13714 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13717 /* Fold a ternary expression of code CODE and type TYPE with operands
13718 OP0, OP1, and OP2. Return the folded expression if folding is
13719 successful. Otherwise, return NULL_TREE. */
13722 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13723 tree op0, tree op1, tree op2)
13726 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13727 enum tree_code_class kind = TREE_CODE_CLASS (code);
13729 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13730 && TREE_CODE_LENGTH (code) == 3);
13732 /* Strip any conversions that don't change the mode. This is safe
13733 for every expression, except for a comparison expression because
13734 its signedness is derived from its operands. So, in the latter
13735 case, only strip conversions that don't change the signedness.
13737 Note that this is done as an internal manipulation within the
13738 constant folder, in order to find the simplest representation of
13739 the arguments so that their form can be studied. In any cases,
13740 the appropriate type conversions should be put back in the tree
13741 that will get out of the constant folder. */
13756 case COMPONENT_REF:
13757 if (TREE_CODE (arg0) == CONSTRUCTOR
13758 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13760 unsigned HOST_WIDE_INT idx;
13762 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13769 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13770 so all simple results must be passed through pedantic_non_lvalue. */
13771 if (TREE_CODE (arg0) == INTEGER_CST)
13773 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13774 tem = integer_zerop (arg0) ? op2 : op1;
13775 /* Only optimize constant conditions when the selected branch
13776 has the same type as the COND_EXPR. This avoids optimizing
13777 away "c ? x : throw", where the throw has a void type.
13778 Avoid throwing away that operand which contains label. */
13779 if ((!TREE_SIDE_EFFECTS (unused_op)
13780 || !contains_label_p (unused_op))
13781 && (! VOID_TYPE_P (TREE_TYPE (tem))
13782 || VOID_TYPE_P (type)))
13783 return pedantic_non_lvalue_loc (loc, tem);
13786 if (operand_equal_p (arg1, op2, 0))
13787 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13789 /* If we have A op B ? A : C, we may be able to convert this to a
13790 simpler expression, depending on the operation and the values
13791 of B and C. Signed zeros prevent all of these transformations,
13792 for reasons given above each one.
13794 Also try swapping the arguments and inverting the conditional. */
13795 if (COMPARISON_CLASS_P (arg0)
13796 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13797 arg1, TREE_OPERAND (arg0, 1))
13798 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13800 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13805 if (COMPARISON_CLASS_P (arg0)
13806 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13808 TREE_OPERAND (arg0, 1))
13809 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13811 tem = fold_truth_not_expr (loc, arg0);
13812 if (tem && COMPARISON_CLASS_P (tem))
13814 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13820 /* If the second operand is simpler than the third, swap them
13821 since that produces better jump optimization results. */
13822 if (truth_value_p (TREE_CODE (arg0))
13823 && tree_swap_operands_p (op1, op2, false))
13825 /* See if this can be inverted. If it can't, possibly because
13826 it was a floating-point inequality comparison, don't do
13828 tem = fold_truth_not_expr (loc, arg0);
13830 return fold_build3_loc (loc, code, type, tem, op2, op1);
13833 /* Convert A ? 1 : 0 to simply A. */
13834 if (integer_onep (op1)
13835 && integer_zerop (op2)
13836 /* If we try to convert OP0 to our type, the
13837 call to fold will try to move the conversion inside
13838 a COND, which will recurse. In that case, the COND_EXPR
13839 is probably the best choice, so leave it alone. */
13840 && type == TREE_TYPE (arg0))
13841 return pedantic_non_lvalue_loc (loc, arg0);
13843 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13844 over COND_EXPR in cases such as floating point comparisons. */
13845 if (integer_zerop (op1)
13846 && integer_onep (op2)
13847 && truth_value_p (TREE_CODE (arg0)))
13848 return pedantic_non_lvalue_loc (loc,
13849 fold_convert_loc (loc, type,
13850 invert_truthvalue_loc (loc,
13853 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13854 if (TREE_CODE (arg0) == LT_EXPR
13855 && integer_zerop (TREE_OPERAND (arg0, 1))
13856 && integer_zerop (op2)
13857 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13859 /* sign_bit_p only checks ARG1 bits within A's precision.
13860 If <sign bit of A> has wider type than A, bits outside
13861 of A's precision in <sign bit of A> need to be checked.
13862 If they are all 0, this optimization needs to be done
13863 in unsigned A's type, if they are all 1 in signed A's type,
13864 otherwise this can't be done. */
13865 if (TYPE_PRECISION (TREE_TYPE (tem))
13866 < TYPE_PRECISION (TREE_TYPE (arg1))
13867 && TYPE_PRECISION (TREE_TYPE (tem))
13868 < TYPE_PRECISION (type))
13870 unsigned HOST_WIDE_INT mask_lo;
13871 HOST_WIDE_INT mask_hi;
13872 int inner_width, outer_width;
13875 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13876 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13877 if (outer_width > TYPE_PRECISION (type))
13878 outer_width = TYPE_PRECISION (type);
13880 if (outer_width > HOST_BITS_PER_WIDE_INT)
13882 mask_hi = ((unsigned HOST_WIDE_INT) -1
13883 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13889 mask_lo = ((unsigned HOST_WIDE_INT) -1
13890 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13892 if (inner_width > HOST_BITS_PER_WIDE_INT)
13894 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13895 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13899 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13900 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13902 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13903 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13905 tem_type = signed_type_for (TREE_TYPE (tem));
13906 tem = fold_convert_loc (loc, tem_type, tem);
13908 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13909 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13911 tem_type = unsigned_type_for (TREE_TYPE (tem));
13912 tem = fold_convert_loc (loc, tem_type, tem);
13920 fold_convert_loc (loc, type,
13921 fold_build2_loc (loc, BIT_AND_EXPR,
13922 TREE_TYPE (tem), tem,
13923 fold_convert_loc (loc,
13928 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13929 already handled above. */
13930 if (TREE_CODE (arg0) == BIT_AND_EXPR
13931 && integer_onep (TREE_OPERAND (arg0, 1))
13932 && integer_zerop (op2)
13933 && integer_pow2p (arg1))
13935 tree tem = TREE_OPERAND (arg0, 0);
13937 if (TREE_CODE (tem) == RSHIFT_EXPR
13938 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13939 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13940 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13941 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13942 TREE_OPERAND (tem, 0), arg1);
13945 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13946 is probably obsolete because the first operand should be a
13947 truth value (that's why we have the two cases above), but let's
13948 leave it in until we can confirm this for all front-ends. */
13949 if (integer_zerop (op2)
13950 && TREE_CODE (arg0) == NE_EXPR
13951 && integer_zerop (TREE_OPERAND (arg0, 1))
13952 && integer_pow2p (arg1)
13953 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13954 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13955 arg1, OEP_ONLY_CONST))
13956 return pedantic_non_lvalue_loc (loc,
13957 fold_convert_loc (loc, type,
13958 TREE_OPERAND (arg0, 0)));
13960 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13961 if (integer_zerop (op2)
13962 && truth_value_p (TREE_CODE (arg0))
13963 && truth_value_p (TREE_CODE (arg1)))
13964 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13965 fold_convert_loc (loc, type, arg0),
13968 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13969 if (integer_onep (op2)
13970 && truth_value_p (TREE_CODE (arg0))
13971 && truth_value_p (TREE_CODE (arg1)))
13973 /* Only perform transformation if ARG0 is easily inverted. */
13974 tem = fold_truth_not_expr (loc, arg0);
13976 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13977 fold_convert_loc (loc, type, tem),
13981 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13982 if (integer_zerop (arg1)
13983 && truth_value_p (TREE_CODE (arg0))
13984 && truth_value_p (TREE_CODE (op2)))
13986 /* Only perform transformation if ARG0 is easily inverted. */
13987 tem = fold_truth_not_expr (loc, arg0);
13989 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13990 fold_convert_loc (loc, type, tem),
13994 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13995 if (integer_onep (arg1)
13996 && truth_value_p (TREE_CODE (arg0))
13997 && truth_value_p (TREE_CODE (op2)))
13998 return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13999 fold_convert_loc (loc, type, arg0),
14005 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14006 of fold_ternary on them. */
14007 gcc_unreachable ();
14009 case BIT_FIELD_REF:
14010 if ((TREE_CODE (arg0) == VECTOR_CST
14011 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14012 && type == TREE_TYPE (TREE_TYPE (arg0)))
14014 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14015 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14018 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14019 && (idx % width) == 0
14020 && (idx = idx / width)
14021 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14023 tree elements = NULL_TREE;
14025 if (TREE_CODE (arg0) == VECTOR_CST)
14026 elements = TREE_VECTOR_CST_ELTS (arg0);
14029 unsigned HOST_WIDE_INT idx;
14032 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14033 elements = tree_cons (NULL_TREE, value, elements);
14035 while (idx-- > 0 && elements)
14036 elements = TREE_CHAIN (elements);
14038 return TREE_VALUE (elements);
14040 return fold_convert_loc (loc, type, integer_zero_node);
14044 /* A bit-field-ref that referenced the full argument can be stripped. */
14045 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14046 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14047 && integer_zerop (op2))
14048 return fold_convert_loc (loc, type, arg0);
14054 } /* switch (code) */
14057 /* Perform constant folding and related simplification of EXPR.
14058 The related simplifications include x*1 => x, x*0 => 0, etc.,
14059 and application of the associative law.
14060 NOP_EXPR conversions may be removed freely (as long as we
14061 are careful not to change the type of the overall expression).
14062 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14063 but we can constant-fold them if they have constant operands. */
14065 #ifdef ENABLE_FOLD_CHECKING
14066 # define fold(x) fold_1 (x)
14067 static tree fold_1 (tree);
14073 const tree t = expr;
14074 enum tree_code code = TREE_CODE (t);
14075 enum tree_code_class kind = TREE_CODE_CLASS (code);
14077 location_t loc = EXPR_LOCATION (expr);
14079 /* Return right away if a constant. */
14080 if (kind == tcc_constant)
14083 /* CALL_EXPR-like objects with variable numbers of operands are
14084 treated specially. */
14085 if (kind == tcc_vl_exp)
14087 if (code == CALL_EXPR)
14089 tem = fold_call_expr (loc, expr, false);
14090 return tem ? tem : expr;
14095 if (IS_EXPR_CODE_CLASS (kind))
14097 tree type = TREE_TYPE (t);
14098 tree op0, op1, op2;
14100 switch (TREE_CODE_LENGTH (code))
14103 op0 = TREE_OPERAND (t, 0);
14104 tem = fold_unary_loc (loc, code, type, op0);
14105 return tem ? tem : expr;
14107 op0 = TREE_OPERAND (t, 0);
14108 op1 = TREE_OPERAND (t, 1);
14109 tem = fold_binary_loc (loc, code, type, op0, op1);
14110 return tem ? tem : expr;
14112 op0 = TREE_OPERAND (t, 0);
14113 op1 = TREE_OPERAND (t, 1);
14114 op2 = TREE_OPERAND (t, 2);
14115 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14116 return tem ? tem : expr;
14126 tree op0 = TREE_OPERAND (t, 0);
14127 tree op1 = TREE_OPERAND (t, 1);
14129 if (TREE_CODE (op1) == INTEGER_CST
14130 && TREE_CODE (op0) == CONSTRUCTOR
14131 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14133 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14134 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14135 unsigned HOST_WIDE_INT begin = 0;
14137 /* Find a matching index by means of a binary search. */
14138 while (begin != end)
14140 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14141 tree index = VEC_index (constructor_elt, elts, middle)->index;
14143 if (TREE_CODE (index) == INTEGER_CST
14144 && tree_int_cst_lt (index, op1))
14145 begin = middle + 1;
14146 else if (TREE_CODE (index) == INTEGER_CST
14147 && tree_int_cst_lt (op1, index))
14149 else if (TREE_CODE (index) == RANGE_EXPR
14150 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14151 begin = middle + 1;
14152 else if (TREE_CODE (index) == RANGE_EXPR
14153 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14156 return VEC_index (constructor_elt, elts, middle)->value;
14164 return fold (DECL_INITIAL (t));
14168 } /* switch (code) */
14171 #ifdef ENABLE_FOLD_CHECKING
14174 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14175 static void fold_check_failed (const_tree, const_tree);
14176 void print_fold_checksum (const_tree);
14178 /* When --enable-checking=fold, compute a digest of expr before
14179 and after actual fold call to see if fold did not accidentally
14180 change original expr. */
14186 struct md5_ctx ctx;
14187 unsigned char checksum_before[16], checksum_after[16];
14190 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14191 md5_init_ctx (&ctx);
14192 fold_checksum_tree (expr, &ctx, ht);
14193 md5_finish_ctx (&ctx, checksum_before);
14196 ret = fold_1 (expr);
14198 md5_init_ctx (&ctx);
14199 fold_checksum_tree (expr, &ctx, ht);
14200 md5_finish_ctx (&ctx, checksum_after);
14203 if (memcmp (checksum_before, checksum_after, 16))
14204 fold_check_failed (expr, ret);
14210 print_fold_checksum (const_tree expr)
14212 struct md5_ctx ctx;
14213 unsigned char checksum[16], cnt;
14216 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14217 md5_init_ctx (&ctx);
14218 fold_checksum_tree (expr, &ctx, ht);
14219 md5_finish_ctx (&ctx, checksum);
14221 for (cnt = 0; cnt < 16; ++cnt)
14222 fprintf (stderr, "%02x", checksum[cnt]);
14223 putc ('\n', stderr);
14227 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14229 internal_error ("fold check: original tree changed by fold");
14233 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14236 enum tree_code code;
14237 union tree_node buf;
14242 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14243 <= sizeof (struct tree_function_decl))
14244 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14247 slot = (const void **) htab_find_slot (ht, expr, INSERT);
14251 code = TREE_CODE (expr);
14252 if (TREE_CODE_CLASS (code) == tcc_declaration
14253 && DECL_ASSEMBLER_NAME_SET_P (expr))
14255 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14256 memcpy ((char *) &buf, expr, tree_size (expr));
14257 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14258 expr = (tree) &buf;
14260 else if (TREE_CODE_CLASS (code) == tcc_type
14261 && (TYPE_POINTER_TO (expr)
14262 || TYPE_REFERENCE_TO (expr)
14263 || TYPE_CACHED_VALUES_P (expr)
14264 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14265 || TYPE_NEXT_VARIANT (expr)))
14267 /* Allow these fields to be modified. */
14269 memcpy ((char *) &buf, expr, tree_size (expr));
14270 expr = tmp = (tree) &buf;
14271 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14272 TYPE_POINTER_TO (tmp) = NULL;
14273 TYPE_REFERENCE_TO (tmp) = NULL;
14274 TYPE_NEXT_VARIANT (tmp) = NULL;
14275 if (TYPE_CACHED_VALUES_P (tmp))
14277 TYPE_CACHED_VALUES_P (tmp) = 0;
14278 TYPE_CACHED_VALUES (tmp) = NULL;
14281 md5_process_bytes (expr, tree_size (expr), ctx);
14282 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14283 if (TREE_CODE_CLASS (code) != tcc_type
14284 && TREE_CODE_CLASS (code) != tcc_declaration
14285 && code != TREE_LIST
14286 && code != SSA_NAME)
14287 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14288 switch (TREE_CODE_CLASS (code))
14294 md5_process_bytes (TREE_STRING_POINTER (expr),
14295 TREE_STRING_LENGTH (expr), ctx);
14298 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14299 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14302 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14308 case tcc_exceptional:
14312 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14313 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14314 expr = TREE_CHAIN (expr);
14315 goto recursive_label;
14318 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14319 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14325 case tcc_expression:
14326 case tcc_reference:
14327 case tcc_comparison:
14330 case tcc_statement:
14332 len = TREE_OPERAND_LENGTH (expr);
14333 for (i = 0; i < len; ++i)
14334 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14336 case tcc_declaration:
14337 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14338 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14339 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14341 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14342 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14343 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14344 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14345 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14347 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14348 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14350 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14352 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14353 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14354 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14358 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14359 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14360 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14361 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14362 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14363 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14364 if (INTEGRAL_TYPE_P (expr)
14365 || SCALAR_FLOAT_TYPE_P (expr))
14367 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14368 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14370 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14371 if (TREE_CODE (expr) == RECORD_TYPE
14372 || TREE_CODE (expr) == UNION_TYPE
14373 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14374 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14375 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14382 /* Helper function for outputting the checksum of a tree T. When
14383 debugging with gdb, you can "define mynext" to be "next" followed
14384 by "call debug_fold_checksum (op0)", then just trace down till the
14388 debug_fold_checksum (const_tree t)
14391 unsigned char checksum[16];
14392 struct md5_ctx ctx;
14393 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14395 md5_init_ctx (&ctx);
14396 fold_checksum_tree (t, &ctx, ht);
14397 md5_finish_ctx (&ctx, checksum);
14400 for (i = 0; i < 16; i++)
14401 fprintf (stderr, "%d ", checksum[i]);
14403 fprintf (stderr, "\n");
14408 /* Fold a unary tree expression with code CODE of type TYPE with an
14409 operand OP0. LOC is the location of the resulting expression.
14410 Return a folded expression if successful. Otherwise, return a tree
14411 expression with code CODE of type TYPE with an operand OP0. */
14414 fold_build1_stat_loc (location_t loc,
14415 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14418 #ifdef ENABLE_FOLD_CHECKING
14419 unsigned char checksum_before[16], checksum_after[16];
14420 struct md5_ctx ctx;
14423 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14424 md5_init_ctx (&ctx);
14425 fold_checksum_tree (op0, &ctx, ht);
14426 md5_finish_ctx (&ctx, checksum_before);
14430 tem = fold_unary_loc (loc, code, type, op0);
14433 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14434 SET_EXPR_LOCATION (tem, loc);
14437 #ifdef ENABLE_FOLD_CHECKING
14438 md5_init_ctx (&ctx);
14439 fold_checksum_tree (op0, &ctx, ht);
14440 md5_finish_ctx (&ctx, checksum_after);
14443 if (memcmp (checksum_before, checksum_after, 16))
14444 fold_check_failed (op0, tem);
14449 /* Fold a binary tree expression with code CODE of type TYPE with
14450 operands OP0 and OP1. LOC is the location of the resulting
14451 expression. Return a folded expression if successful. Otherwise,
14452 return a tree expression with code CODE of type TYPE with operands
14456 fold_build2_stat_loc (location_t loc,
14457 enum tree_code code, tree type, tree op0, tree op1
14461 #ifdef ENABLE_FOLD_CHECKING
14462 unsigned char checksum_before_op0[16],
14463 checksum_before_op1[16],
14464 checksum_after_op0[16],
14465 checksum_after_op1[16];
14466 struct md5_ctx ctx;
14469 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14470 md5_init_ctx (&ctx);
14471 fold_checksum_tree (op0, &ctx, ht);
14472 md5_finish_ctx (&ctx, checksum_before_op0);
14475 md5_init_ctx (&ctx);
14476 fold_checksum_tree (op1, &ctx, ht);
14477 md5_finish_ctx (&ctx, checksum_before_op1);
14481 tem = fold_binary_loc (loc, code, type, op0, op1);
14484 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14485 SET_EXPR_LOCATION (tem, loc);
14488 #ifdef ENABLE_FOLD_CHECKING
14489 md5_init_ctx (&ctx);
14490 fold_checksum_tree (op0, &ctx, ht);
14491 md5_finish_ctx (&ctx, checksum_after_op0);
14494 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14495 fold_check_failed (op0, tem);
14497 md5_init_ctx (&ctx);
14498 fold_checksum_tree (op1, &ctx, ht);
14499 md5_finish_ctx (&ctx, checksum_after_op1);
14502 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14503 fold_check_failed (op1, tem);
14508 /* Fold a ternary tree expression with code CODE of type TYPE with
14509 operands OP0, OP1, and OP2. Return a folded expression if
14510 successful. Otherwise, return a tree expression with code CODE of
14511 type TYPE with operands OP0, OP1, and OP2. */
14514 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14515 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14518 #ifdef ENABLE_FOLD_CHECKING
14519 unsigned char checksum_before_op0[16],
14520 checksum_before_op1[16],
14521 checksum_before_op2[16],
14522 checksum_after_op0[16],
14523 checksum_after_op1[16],
14524 checksum_after_op2[16];
14525 struct md5_ctx ctx;
14528 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14529 md5_init_ctx (&ctx);
14530 fold_checksum_tree (op0, &ctx, ht);
14531 md5_finish_ctx (&ctx, checksum_before_op0);
14534 md5_init_ctx (&ctx);
14535 fold_checksum_tree (op1, &ctx, ht);
14536 md5_finish_ctx (&ctx, checksum_before_op1);
14539 md5_init_ctx (&ctx);
14540 fold_checksum_tree (op2, &ctx, ht);
14541 md5_finish_ctx (&ctx, checksum_before_op2);
14545 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14546 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14549 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14550 SET_EXPR_LOCATION (tem, loc);
14553 #ifdef ENABLE_FOLD_CHECKING
14554 md5_init_ctx (&ctx);
14555 fold_checksum_tree (op0, &ctx, ht);
14556 md5_finish_ctx (&ctx, checksum_after_op0);
14559 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14560 fold_check_failed (op0, tem);
14562 md5_init_ctx (&ctx);
14563 fold_checksum_tree (op1, &ctx, ht);
14564 md5_finish_ctx (&ctx, checksum_after_op1);
14567 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14568 fold_check_failed (op1, tem);
14570 md5_init_ctx (&ctx);
14571 fold_checksum_tree (op2, &ctx, ht);
14572 md5_finish_ctx (&ctx, checksum_after_op2);
14575 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14576 fold_check_failed (op2, tem);
14581 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14582 arguments in ARGARRAY, and a null static chain.
14583 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14584 of type TYPE from the given operands as constructed by build_call_array. */
14587 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14588 int nargs, tree *argarray)
14591 #ifdef ENABLE_FOLD_CHECKING
14592 unsigned char checksum_before_fn[16],
14593 checksum_before_arglist[16],
14594 checksum_after_fn[16],
14595 checksum_after_arglist[16];
14596 struct md5_ctx ctx;
14600 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14601 md5_init_ctx (&ctx);
14602 fold_checksum_tree (fn, &ctx, ht);
14603 md5_finish_ctx (&ctx, checksum_before_fn);
14606 md5_init_ctx (&ctx);
14607 for (i = 0; i < nargs; i++)
14608 fold_checksum_tree (argarray[i], &ctx, ht);
14609 md5_finish_ctx (&ctx, checksum_before_arglist);
14613 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14615 #ifdef ENABLE_FOLD_CHECKING
14616 md5_init_ctx (&ctx);
14617 fold_checksum_tree (fn, &ctx, ht);
14618 md5_finish_ctx (&ctx, checksum_after_fn);
14621 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14622 fold_check_failed (fn, tem);
14624 md5_init_ctx (&ctx);
14625 for (i = 0; i < nargs; i++)
14626 fold_checksum_tree (argarray[i], &ctx, ht);
14627 md5_finish_ctx (&ctx, checksum_after_arglist);
14630 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14631 fold_check_failed (NULL_TREE, tem);
14636 /* Perform constant folding and related simplification of initializer
14637 expression EXPR. These behave identically to "fold_buildN" but ignore
14638 potential run-time traps and exceptions that fold must preserve. */
14640 #define START_FOLD_INIT \
14641 int saved_signaling_nans = flag_signaling_nans;\
14642 int saved_trapping_math = flag_trapping_math;\
14643 int saved_rounding_math = flag_rounding_math;\
14644 int saved_trapv = flag_trapv;\
14645 int saved_folding_initializer = folding_initializer;\
14646 flag_signaling_nans = 0;\
14647 flag_trapping_math = 0;\
14648 flag_rounding_math = 0;\
14650 folding_initializer = 1;
14652 #define END_FOLD_INIT \
14653 flag_signaling_nans = saved_signaling_nans;\
14654 flag_trapping_math = saved_trapping_math;\
14655 flag_rounding_math = saved_rounding_math;\
14656 flag_trapv = saved_trapv;\
14657 folding_initializer = saved_folding_initializer;
14660 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14661 tree type, tree op)
14666 result = fold_build1_loc (loc, code, type, op);
14673 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14674 tree type, tree op0, tree op1)
14679 result = fold_build2_loc (loc, code, type, op0, op1);
14686 fold_build3_initializer_loc (location_t loc, enum tree_code code,
14687 tree type, tree op0, tree op1, tree op2)
14692 result = fold_build3_loc (loc, code, type, op0, op1, op2);
14699 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14700 int nargs, tree *argarray)
14705 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14711 #undef START_FOLD_INIT
14712 #undef END_FOLD_INIT
14714 /* Determine if first argument is a multiple of second argument. Return 0 if
14715 it is not, or we cannot easily determined it to be.
14717 An example of the sort of thing we care about (at this point; this routine
14718 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14719 fold cases do now) is discovering that
14721 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14727 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14729 This code also handles discovering that
14731 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14733 is a multiple of 8 so we don't have to worry about dealing with a
14734 possible remainder.
14736 Note that we *look* inside a SAVE_EXPR only to determine how it was
14737 calculated; it is not safe for fold to do much of anything else with the
14738 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14739 at run time. For example, the latter example above *cannot* be implemented
14740 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14741 evaluation time of the original SAVE_EXPR is not necessarily the same at
14742 the time the new expression is evaluated. The only optimization of this
14743 sort that would be valid is changing
14745 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14749 SAVE_EXPR (I) * SAVE_EXPR (J)
14751 (where the same SAVE_EXPR (J) is used in the original and the
14752 transformed version). */
14755 multiple_of_p (tree type, const_tree top, const_tree bottom)
14757 if (operand_equal_p (top, bottom, 0))
14760 if (TREE_CODE (type) != INTEGER_TYPE)
14763 switch (TREE_CODE (top))
14766 /* Bitwise and provides a power of two multiple. If the mask is
14767 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14768 if (!integer_pow2p (bottom))
14773 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14774 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14778 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14779 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14782 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14786 op1 = TREE_OPERAND (top, 1);
14787 /* const_binop may not detect overflow correctly,
14788 so check for it explicitly here. */
14789 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14790 > TREE_INT_CST_LOW (op1)
14791 && TREE_INT_CST_HIGH (op1) == 0
14792 && 0 != (t1 = fold_convert (type,
14793 const_binop (LSHIFT_EXPR,
14796 && !TREE_OVERFLOW (t1))
14797 return multiple_of_p (type, t1, bottom);
14802 /* Can't handle conversions from non-integral or wider integral type. */
14803 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14804 || (TYPE_PRECISION (type)
14805 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14808 /* .. fall through ... */
14811 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14814 if (TREE_CODE (bottom) != INTEGER_CST
14815 || integer_zerop (bottom)
14816 || (TYPE_UNSIGNED (type)
14817 && (tree_int_cst_sgn (top) < 0
14818 || tree_int_cst_sgn (bottom) < 0)))
14820 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14828 /* Return true if CODE or TYPE is known to be non-negative. */
14831 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14833 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14834 && truth_value_p (code))
14835 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14836 have a signed:1 type (where the value is -1 and 0). */
14841 /* Return true if (CODE OP0) is known to be non-negative. If the return
14842 value is based on the assumption that signed overflow is undefined,
14843 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14844 *STRICT_OVERFLOW_P. */
14847 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14848 bool *strict_overflow_p)
14850 if (TYPE_UNSIGNED (type))
14856 /* We can't return 1 if flag_wrapv is set because
14857 ABS_EXPR<INT_MIN> = INT_MIN. */
14858 if (!INTEGRAL_TYPE_P (type))
14860 if (TYPE_OVERFLOW_UNDEFINED (type))
14862 *strict_overflow_p = true;
14867 case NON_LVALUE_EXPR:
14869 case FIX_TRUNC_EXPR:
14870 return tree_expr_nonnegative_warnv_p (op0,
14871 strict_overflow_p);
14875 tree inner_type = TREE_TYPE (op0);
14876 tree outer_type = type;
14878 if (TREE_CODE (outer_type) == REAL_TYPE)
14880 if (TREE_CODE (inner_type) == REAL_TYPE)
14881 return tree_expr_nonnegative_warnv_p (op0,
14882 strict_overflow_p);
14883 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14885 if (TYPE_UNSIGNED (inner_type))
14887 return tree_expr_nonnegative_warnv_p (op0,
14888 strict_overflow_p);
14891 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14893 if (TREE_CODE (inner_type) == REAL_TYPE)
14894 return tree_expr_nonnegative_warnv_p (op0,
14895 strict_overflow_p);
14896 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14897 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14898 && TYPE_UNSIGNED (inner_type);
14904 return tree_simple_nonnegative_warnv_p (code, type);
14907 /* We don't know sign of `t', so be conservative and return false. */
14911 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14912 value is based on the assumption that signed overflow is undefined,
14913 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14914 *STRICT_OVERFLOW_P. */
14917 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14918 tree op1, bool *strict_overflow_p)
14920 if (TYPE_UNSIGNED (type))
14925 case POINTER_PLUS_EXPR:
14927 if (FLOAT_TYPE_P (type))
14928 return (tree_expr_nonnegative_warnv_p (op0,
14930 && tree_expr_nonnegative_warnv_p (op1,
14931 strict_overflow_p));
14933 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14934 both unsigned and at least 2 bits shorter than the result. */
14935 if (TREE_CODE (type) == INTEGER_TYPE
14936 && TREE_CODE (op0) == NOP_EXPR
14937 && TREE_CODE (op1) == NOP_EXPR)
14939 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14940 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14941 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14942 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14944 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14945 TYPE_PRECISION (inner2)) + 1;
14946 return prec < TYPE_PRECISION (type);
14952 if (FLOAT_TYPE_P (type))
14954 /* x * x for floating point x is always non-negative. */
14955 if (operand_equal_p (op0, op1, 0))
14957 return (tree_expr_nonnegative_warnv_p (op0,
14959 && tree_expr_nonnegative_warnv_p (op1,
14960 strict_overflow_p));
14963 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14964 both unsigned and their total bits is shorter than the result. */
14965 if (TREE_CODE (type) == INTEGER_TYPE
14966 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14967 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14969 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14970 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14972 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14973 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14976 bool unsigned0 = TYPE_UNSIGNED (inner0);
14977 bool unsigned1 = TYPE_UNSIGNED (inner1);
14979 if (TREE_CODE (op0) == INTEGER_CST)
14980 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14982 if (TREE_CODE (op1) == INTEGER_CST)
14983 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14985 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14986 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14988 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14989 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14990 : TYPE_PRECISION (inner0);
14992 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14993 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14994 : TYPE_PRECISION (inner1);
14996 return precision0 + precision1 < TYPE_PRECISION (type);
15003 return (tree_expr_nonnegative_warnv_p (op0,
15005 || tree_expr_nonnegative_warnv_p (op1,
15006 strict_overflow_p));
15012 case TRUNC_DIV_EXPR:
15013 case CEIL_DIV_EXPR:
15014 case FLOOR_DIV_EXPR:
15015 case ROUND_DIV_EXPR:
15016 return (tree_expr_nonnegative_warnv_p (op0,
15018 && tree_expr_nonnegative_warnv_p (op1,
15019 strict_overflow_p));
15021 case TRUNC_MOD_EXPR:
15022 case CEIL_MOD_EXPR:
15023 case FLOOR_MOD_EXPR:
15024 case ROUND_MOD_EXPR:
15025 return tree_expr_nonnegative_warnv_p (op0,
15026 strict_overflow_p);
15028 return tree_simple_nonnegative_warnv_p (code, type);
15031 /* We don't know sign of `t', so be conservative and return false. */
15035 /* Return true if T is known to be non-negative. If the return
15036 value is based on the assumption that signed overflow is undefined,
15037 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15038 *STRICT_OVERFLOW_P. */
15041 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15043 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15046 switch (TREE_CODE (t))
15049 return tree_int_cst_sgn (t) >= 0;
15052 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15055 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15058 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15060 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15061 strict_overflow_p));
15063 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15066 /* We don't know sign of `t', so be conservative and return false. */
15070 /* Return true if T is known to be non-negative. If the return
15071 value is based on the assumption that signed overflow is undefined,
15072 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15073 *STRICT_OVERFLOW_P. */
15076 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15077 tree arg0, tree arg1, bool *strict_overflow_p)
15079 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15080 switch (DECL_FUNCTION_CODE (fndecl))
15082 CASE_FLT_FN (BUILT_IN_ACOS):
15083 CASE_FLT_FN (BUILT_IN_ACOSH):
15084 CASE_FLT_FN (BUILT_IN_CABS):
15085 CASE_FLT_FN (BUILT_IN_COSH):
15086 CASE_FLT_FN (BUILT_IN_ERFC):
15087 CASE_FLT_FN (BUILT_IN_EXP):
15088 CASE_FLT_FN (BUILT_IN_EXP10):
15089 CASE_FLT_FN (BUILT_IN_EXP2):
15090 CASE_FLT_FN (BUILT_IN_FABS):
15091 CASE_FLT_FN (BUILT_IN_FDIM):
15092 CASE_FLT_FN (BUILT_IN_HYPOT):
15093 CASE_FLT_FN (BUILT_IN_POW10):
15094 CASE_INT_FN (BUILT_IN_FFS):
15095 CASE_INT_FN (BUILT_IN_PARITY):
15096 CASE_INT_FN (BUILT_IN_POPCOUNT):
15097 case BUILT_IN_BSWAP32:
15098 case BUILT_IN_BSWAP64:
15102 CASE_FLT_FN (BUILT_IN_SQRT):
15103 /* sqrt(-0.0) is -0.0. */
15104 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15106 return tree_expr_nonnegative_warnv_p (arg0,
15107 strict_overflow_p);
15109 CASE_FLT_FN (BUILT_IN_ASINH):
15110 CASE_FLT_FN (BUILT_IN_ATAN):
15111 CASE_FLT_FN (BUILT_IN_ATANH):
15112 CASE_FLT_FN (BUILT_IN_CBRT):
15113 CASE_FLT_FN (BUILT_IN_CEIL):
15114 CASE_FLT_FN (BUILT_IN_ERF):
15115 CASE_FLT_FN (BUILT_IN_EXPM1):
15116 CASE_FLT_FN (BUILT_IN_FLOOR):
15117 CASE_FLT_FN (BUILT_IN_FMOD):
15118 CASE_FLT_FN (BUILT_IN_FREXP):
15119 CASE_FLT_FN (BUILT_IN_LCEIL):
15120 CASE_FLT_FN (BUILT_IN_LDEXP):
15121 CASE_FLT_FN (BUILT_IN_LFLOOR):
15122 CASE_FLT_FN (BUILT_IN_LLCEIL):
15123 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15124 CASE_FLT_FN (BUILT_IN_LLRINT):
15125 CASE_FLT_FN (BUILT_IN_LLROUND):
15126 CASE_FLT_FN (BUILT_IN_LRINT):
15127 CASE_FLT_FN (BUILT_IN_LROUND):
15128 CASE_FLT_FN (BUILT_IN_MODF):
15129 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15130 CASE_FLT_FN (BUILT_IN_RINT):
15131 CASE_FLT_FN (BUILT_IN_ROUND):
15132 CASE_FLT_FN (BUILT_IN_SCALB):
15133 CASE_FLT_FN (BUILT_IN_SCALBLN):
15134 CASE_FLT_FN (BUILT_IN_SCALBN):
15135 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15136 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15137 CASE_FLT_FN (BUILT_IN_SINH):
15138 CASE_FLT_FN (BUILT_IN_TANH):
15139 CASE_FLT_FN (BUILT_IN_TRUNC):
15140 /* True if the 1st argument is nonnegative. */
15141 return tree_expr_nonnegative_warnv_p (arg0,
15142 strict_overflow_p);
15144 CASE_FLT_FN (BUILT_IN_FMAX):
15145 /* True if the 1st OR 2nd arguments are nonnegative. */
15146 return (tree_expr_nonnegative_warnv_p (arg0,
15148 || (tree_expr_nonnegative_warnv_p (arg1,
15149 strict_overflow_p)));
15151 CASE_FLT_FN (BUILT_IN_FMIN):
15152 /* True if the 1st AND 2nd arguments are nonnegative. */
15153 return (tree_expr_nonnegative_warnv_p (arg0,
15155 && (tree_expr_nonnegative_warnv_p (arg1,
15156 strict_overflow_p)));
15158 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15159 /* True if the 2nd argument is nonnegative. */
15160 return tree_expr_nonnegative_warnv_p (arg1,
15161 strict_overflow_p);
15163 CASE_FLT_FN (BUILT_IN_POWI):
15164 /* True if the 1st argument is nonnegative or the second
15165 argument is an even integer. */
15166 if (TREE_CODE (arg1) == INTEGER_CST
15167 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15169 return tree_expr_nonnegative_warnv_p (arg0,
15170 strict_overflow_p);
15172 CASE_FLT_FN (BUILT_IN_POW):
15173 /* True if the 1st argument is nonnegative or the second
15174 argument is an even integer valued real. */
15175 if (TREE_CODE (arg1) == REAL_CST)
15180 c = TREE_REAL_CST (arg1);
15181 n = real_to_integer (&c);
15184 REAL_VALUE_TYPE cint;
15185 real_from_integer (&cint, VOIDmode, n,
15186 n < 0 ? -1 : 0, 0);
15187 if (real_identical (&c, &cint))
15191 return tree_expr_nonnegative_warnv_p (arg0,
15192 strict_overflow_p);
15197 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15201 /* Return true if T is known to be non-negative. If the return
15202 value is based on the assumption that signed overflow is undefined,
15203 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15204 *STRICT_OVERFLOW_P. */
15207 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15209 enum tree_code code = TREE_CODE (t);
15210 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15217 tree temp = TARGET_EXPR_SLOT (t);
15218 t = TARGET_EXPR_INITIAL (t);
15220 /* If the initializer is non-void, then it's a normal expression
15221 that will be assigned to the slot. */
15222 if (!VOID_TYPE_P (t))
15223 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15225 /* Otherwise, the initializer sets the slot in some way. One common
15226 way is an assignment statement at the end of the initializer. */
15229 if (TREE_CODE (t) == BIND_EXPR)
15230 t = expr_last (BIND_EXPR_BODY (t));
15231 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15232 || TREE_CODE (t) == TRY_CATCH_EXPR)
15233 t = expr_last (TREE_OPERAND (t, 0));
15234 else if (TREE_CODE (t) == STATEMENT_LIST)
15239 if (TREE_CODE (t) == MODIFY_EXPR
15240 && TREE_OPERAND (t, 0) == temp)
15241 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15242 strict_overflow_p);
15249 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15250 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15252 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15253 get_callee_fndecl (t),
15256 strict_overflow_p);
15258 case COMPOUND_EXPR:
15260 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15261 strict_overflow_p);
15263 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15264 strict_overflow_p);
15266 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15267 strict_overflow_p);
15270 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15274 /* We don't know sign of `t', so be conservative and return false. */
15278 /* Return true if T is known to be non-negative. If the return
15279 value is based on the assumption that signed overflow is undefined,
15280 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15281 *STRICT_OVERFLOW_P. */
15284 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15286 enum tree_code code;
15287 if (t == error_mark_node)
15290 code = TREE_CODE (t);
15291 switch (TREE_CODE_CLASS (code))
15294 case tcc_comparison:
15295 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15297 TREE_OPERAND (t, 0),
15298 TREE_OPERAND (t, 1),
15299 strict_overflow_p);
15302 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15304 TREE_OPERAND (t, 0),
15305 strict_overflow_p);
15308 case tcc_declaration:
15309 case tcc_reference:
15310 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15318 case TRUTH_AND_EXPR:
15319 case TRUTH_OR_EXPR:
15320 case TRUTH_XOR_EXPR:
15321 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15323 TREE_OPERAND (t, 0),
15324 TREE_OPERAND (t, 1),
15325 strict_overflow_p);
15326 case TRUTH_NOT_EXPR:
15327 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15329 TREE_OPERAND (t, 0),
15330 strict_overflow_p);
15337 case WITH_SIZE_EXPR:
15339 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15342 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15346 /* Return true if `t' is known to be non-negative. Handle warnings
15347 about undefined signed overflow. */
15350 tree_expr_nonnegative_p (tree t)
15352 bool ret, strict_overflow_p;
15354 strict_overflow_p = false;
15355 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15356 if (strict_overflow_p)
15357 fold_overflow_warning (("assuming signed overflow does not occur when "
15358 "determining that expression is always "
15360 WARN_STRICT_OVERFLOW_MISC);
15365 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15366 For floating point we further ensure that T is not denormal.
15367 Similar logic is present in nonzero_address in rtlanal.h.
15369 If the return value is based on the assumption that signed overflow
15370 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15371 change *STRICT_OVERFLOW_P. */
15374 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15375 bool *strict_overflow_p)
15380 return tree_expr_nonzero_warnv_p (op0,
15381 strict_overflow_p);
15385 tree inner_type = TREE_TYPE (op0);
15386 tree outer_type = type;
15388 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15389 && tree_expr_nonzero_warnv_p (op0,
15390 strict_overflow_p));
15394 case NON_LVALUE_EXPR:
15395 return tree_expr_nonzero_warnv_p (op0,
15396 strict_overflow_p);
15405 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15406 For floating point we further ensure that T is not denormal.
15407 Similar logic is present in nonzero_address in rtlanal.h.
15409 If the return value is based on the assumption that signed overflow
15410 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15411 change *STRICT_OVERFLOW_P. */
15414 tree_binary_nonzero_warnv_p (enum tree_code code,
15417 tree op1, bool *strict_overflow_p)
15419 bool sub_strict_overflow_p;
15422 case POINTER_PLUS_EXPR:
15424 if (TYPE_OVERFLOW_UNDEFINED (type))
15426 /* With the presence of negative values it is hard
15427 to say something. */
15428 sub_strict_overflow_p = false;
15429 if (!tree_expr_nonnegative_warnv_p (op0,
15430 &sub_strict_overflow_p)
15431 || !tree_expr_nonnegative_warnv_p (op1,
15432 &sub_strict_overflow_p))
15434 /* One of operands must be positive and the other non-negative. */
15435 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15436 overflows, on a twos-complement machine the sum of two
15437 nonnegative numbers can never be zero. */
15438 return (tree_expr_nonzero_warnv_p (op0,
15440 || tree_expr_nonzero_warnv_p (op1,
15441 strict_overflow_p));
15446 if (TYPE_OVERFLOW_UNDEFINED (type))
15448 if (tree_expr_nonzero_warnv_p (op0,
15450 && tree_expr_nonzero_warnv_p (op1,
15451 strict_overflow_p))
15453 *strict_overflow_p = true;
15460 sub_strict_overflow_p = false;
15461 if (tree_expr_nonzero_warnv_p (op0,
15462 &sub_strict_overflow_p)
15463 && tree_expr_nonzero_warnv_p (op1,
15464 &sub_strict_overflow_p))
15466 if (sub_strict_overflow_p)
15467 *strict_overflow_p = true;
15472 sub_strict_overflow_p = false;
15473 if (tree_expr_nonzero_warnv_p (op0,
15474 &sub_strict_overflow_p))
15476 if (sub_strict_overflow_p)
15477 *strict_overflow_p = true;
15479 /* When both operands are nonzero, then MAX must be too. */
15480 if (tree_expr_nonzero_warnv_p (op1,
15481 strict_overflow_p))
15484 /* MAX where operand 0 is positive is positive. */
15485 return tree_expr_nonnegative_warnv_p (op0,
15486 strict_overflow_p);
15488 /* MAX where operand 1 is positive is positive. */
15489 else if (tree_expr_nonzero_warnv_p (op1,
15490 &sub_strict_overflow_p)
15491 && tree_expr_nonnegative_warnv_p (op1,
15492 &sub_strict_overflow_p))
15494 if (sub_strict_overflow_p)
15495 *strict_overflow_p = true;
15501 return (tree_expr_nonzero_warnv_p (op1,
15503 || tree_expr_nonzero_warnv_p (op0,
15504 strict_overflow_p));
15513 /* Return true when T is an address and is known to be nonzero.
15514 For floating point we further ensure that T is not denormal.
15515 Similar logic is present in nonzero_address in rtlanal.h.
15517 If the return value is based on the assumption that signed overflow
15518 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15519 change *STRICT_OVERFLOW_P. */
15522 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15524 bool sub_strict_overflow_p;
15525 switch (TREE_CODE (t))
15528 return !integer_zerop (t);
15532 tree base = get_base_address (TREE_OPERAND (t, 0));
15537 /* Weak declarations may link to NULL. Other things may also be NULL
15538 so protect with -fdelete-null-pointer-checks; but not variables
15539 allocated on the stack. */
15541 && (flag_delete_null_pointer_checks
15542 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15543 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15545 /* Constants are never weak. */
15546 if (CONSTANT_CLASS_P (base))
15553 sub_strict_overflow_p = false;
15554 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15555 &sub_strict_overflow_p)
15556 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15557 &sub_strict_overflow_p))
15559 if (sub_strict_overflow_p)
15560 *strict_overflow_p = true;
15571 /* Return true when T is an address and is known to be nonzero.
15572 For floating point we further ensure that T is not denormal.
15573 Similar logic is present in nonzero_address in rtlanal.h.
15575 If the return value is based on the assumption that signed overflow
15576 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15577 change *STRICT_OVERFLOW_P. */
15580 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15582 tree type = TREE_TYPE (t);
15583 enum tree_code code;
15585 /* Doing something useful for floating point would need more work. */
15586 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15589 code = TREE_CODE (t);
15590 switch (TREE_CODE_CLASS (code))
15593 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15594 strict_overflow_p);
15596 case tcc_comparison:
15597 return tree_binary_nonzero_warnv_p (code, type,
15598 TREE_OPERAND (t, 0),
15599 TREE_OPERAND (t, 1),
15600 strict_overflow_p);
15602 case tcc_declaration:
15603 case tcc_reference:
15604 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15612 case TRUTH_NOT_EXPR:
15613 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15614 strict_overflow_p);
15616 case TRUTH_AND_EXPR:
15617 case TRUTH_OR_EXPR:
15618 case TRUTH_XOR_EXPR:
15619 return tree_binary_nonzero_warnv_p (code, type,
15620 TREE_OPERAND (t, 0),
15621 TREE_OPERAND (t, 1),
15622 strict_overflow_p);
15629 case WITH_SIZE_EXPR:
15631 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15633 case COMPOUND_EXPR:
15636 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15637 strict_overflow_p);
15640 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15641 strict_overflow_p);
15644 return alloca_call_p (t);
15652 /* Return true when T is an address and is known to be nonzero.
15653 Handle warnings about undefined signed overflow. */
15656 tree_expr_nonzero_p (tree t)
15658 bool ret, strict_overflow_p;
15660 strict_overflow_p = false;
15661 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15662 if (strict_overflow_p)
15663 fold_overflow_warning (("assuming signed overflow does not occur when "
15664 "determining that expression is always "
15666 WARN_STRICT_OVERFLOW_MISC);
15670 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15671 attempt to fold the expression to a constant without modifying TYPE,
15674 If the expression could be simplified to a constant, then return
15675 the constant. If the expression would not be simplified to a
15676 constant, then return NULL_TREE. */
15679 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15681 tree tem = fold_binary (code, type, op0, op1);
15682 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15685 /* Given the components of a unary expression CODE, TYPE and OP0,
15686 attempt to fold the expression to a constant without modifying
15689 If the expression could be simplified to a constant, then return
15690 the constant. If the expression would not be simplified to a
15691 constant, then return NULL_TREE. */
15694 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15696 tree tem = fold_unary (code, type, op0);
15697 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15700 /* If EXP represents referencing an element in a constant string
15701 (either via pointer arithmetic or array indexing), return the
15702 tree representing the value accessed, otherwise return NULL. */
15705 fold_read_from_constant_string (tree exp)
15707 if ((TREE_CODE (exp) == INDIRECT_REF
15708 || TREE_CODE (exp) == ARRAY_REF)
15709 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15711 tree exp1 = TREE_OPERAND (exp, 0);
15714 location_t loc = EXPR_LOCATION (exp);
15716 if (TREE_CODE (exp) == INDIRECT_REF)
15717 string = string_constant (exp1, &index);
15720 tree low_bound = array_ref_low_bound (exp);
15721 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15723 /* Optimize the special-case of a zero lower bound.
15725 We convert the low_bound to sizetype to avoid some problems
15726 with constant folding. (E.g. suppose the lower bound is 1,
15727 and its mode is QI. Without the conversion,l (ARRAY
15728 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15729 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15730 if (! integer_zerop (low_bound))
15731 index = size_diffop_loc (loc, index,
15732 fold_convert_loc (loc, sizetype, low_bound));
15738 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15739 && TREE_CODE (string) == STRING_CST
15740 && TREE_CODE (index) == INTEGER_CST
15741 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15742 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15744 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15745 return build_int_cst_type (TREE_TYPE (exp),
15746 (TREE_STRING_POINTER (string)
15747 [TREE_INT_CST_LOW (index)]));
15752 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15753 an integer constant, real, or fixed-point constant.
15755 TYPE is the type of the result. */
15758 fold_negate_const (tree arg0, tree type)
15760 tree t = NULL_TREE;
15762 switch (TREE_CODE (arg0))
15766 unsigned HOST_WIDE_INT low;
15767 HOST_WIDE_INT high;
15768 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15769 TREE_INT_CST_HIGH (arg0),
15771 t = force_fit_type_double (type, low, high, 1,
15772 (overflow | TREE_OVERFLOW (arg0))
15773 && !TYPE_UNSIGNED (type));
15778 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15783 FIXED_VALUE_TYPE f;
15784 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15785 &(TREE_FIXED_CST (arg0)), NULL,
15786 TYPE_SATURATING (type));
15787 t = build_fixed (type, f);
15788 /* Propagate overflow flags. */
15789 if (overflow_p | TREE_OVERFLOW (arg0))
15790 TREE_OVERFLOW (t) = 1;
15795 gcc_unreachable ();
15801 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15802 an integer constant or real constant.
15804 TYPE is the type of the result. */
15807 fold_abs_const (tree arg0, tree type)
15809 tree t = NULL_TREE;
15811 switch (TREE_CODE (arg0))
15814 /* If the value is unsigned, then the absolute value is
15815 the same as the ordinary value. */
15816 if (TYPE_UNSIGNED (type))
15818 /* Similarly, if the value is non-negative. */
15819 else if (INT_CST_LT (integer_minus_one_node, arg0))
15821 /* If the value is negative, then the absolute value is
15825 unsigned HOST_WIDE_INT low;
15826 HOST_WIDE_INT high;
15827 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15828 TREE_INT_CST_HIGH (arg0),
15830 t = force_fit_type_double (type, low, high, -1,
15831 overflow | TREE_OVERFLOW (arg0));
15836 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15837 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15843 gcc_unreachable ();
15849 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15850 constant. TYPE is the type of the result. */
15853 fold_not_const (tree arg0, tree type)
15855 tree t = NULL_TREE;
15857 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15859 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15860 ~TREE_INT_CST_HIGH (arg0), 0,
15861 TREE_OVERFLOW (arg0));
15866 /* Given CODE, a relational operator, the target type, TYPE and two
15867 constant operands OP0 and OP1, return the result of the
15868 relational operation. If the result is not a compile time
15869 constant, then return NULL_TREE. */
15872 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15874 int result, invert;
15876 /* From here on, the only cases we handle are when the result is
15877 known to be a constant. */
15879 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15881 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15882 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15884 /* Handle the cases where either operand is a NaN. */
15885 if (real_isnan (c0) || real_isnan (c1))
15895 case UNORDERED_EXPR:
15909 if (flag_trapping_math)
15915 gcc_unreachable ();
15918 return constant_boolean_node (result, type);
15921 return constant_boolean_node (real_compare (code, c0, c1), type);
15924 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15926 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15927 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15928 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15931 /* Handle equality/inequality of complex constants. */
15932 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15934 tree rcond = fold_relational_const (code, type,
15935 TREE_REALPART (op0),
15936 TREE_REALPART (op1));
15937 tree icond = fold_relational_const (code, type,
15938 TREE_IMAGPART (op0),
15939 TREE_IMAGPART (op1));
15940 if (code == EQ_EXPR)
15941 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15942 else if (code == NE_EXPR)
15943 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15948 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15950 To compute GT, swap the arguments and do LT.
15951 To compute GE, do LT and invert the result.
15952 To compute LE, swap the arguments, do LT and invert the result.
15953 To compute NE, do EQ and invert the result.
15955 Therefore, the code below must handle only EQ and LT. */
15957 if (code == LE_EXPR || code == GT_EXPR)
15962 code = swap_tree_comparison (code);
15965 /* Note that it is safe to invert for real values here because we
15966 have already handled the one case that it matters. */
15969 if (code == NE_EXPR || code == GE_EXPR)
15972 code = invert_tree_comparison (code, false);
15975 /* Compute a result for LT or EQ if args permit;
15976 Otherwise return T. */
15977 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15979 if (code == EQ_EXPR)
15980 result = tree_int_cst_equal (op0, op1);
15981 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15982 result = INT_CST_LT_UNSIGNED (op0, op1);
15984 result = INT_CST_LT (op0, op1);
15991 return constant_boolean_node (result, type);
15994 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15995 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15999 fold_build_cleanup_point_expr (tree type, tree expr)
16001 /* If the expression does not have side effects then we don't have to wrap
16002 it with a cleanup point expression. */
16003 if (!TREE_SIDE_EFFECTS (expr))
16006 /* If the expression is a return, check to see if the expression inside the
16007 return has no side effects or the right hand side of the modify expression
16008 inside the return. If either don't have side effects set we don't need to
16009 wrap the expression in a cleanup point expression. Note we don't check the
16010 left hand side of the modify because it should always be a return decl. */
16011 if (TREE_CODE (expr) == RETURN_EXPR)
16013 tree op = TREE_OPERAND (expr, 0);
16014 if (!op || !TREE_SIDE_EFFECTS (op))
16016 op = TREE_OPERAND (op, 1);
16017 if (!TREE_SIDE_EFFECTS (op))
16021 return build1 (CLEANUP_POINT_EXPR, type, expr);
16024 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16025 of an indirection through OP0, or NULL_TREE if no simplification is
16029 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16035 subtype = TREE_TYPE (sub);
16036 if (!POINTER_TYPE_P (subtype))
16039 if (TREE_CODE (sub) == ADDR_EXPR)
16041 tree op = TREE_OPERAND (sub, 0);
16042 tree optype = TREE_TYPE (op);
16043 /* *&CONST_DECL -> to the value of the const decl. */
16044 if (TREE_CODE (op) == CONST_DECL)
16045 return DECL_INITIAL (op);
16046 /* *&p => p; make sure to handle *&"str"[cst] here. */
16047 if (type == optype)
16049 tree fop = fold_read_from_constant_string (op);
16055 /* *(foo *)&fooarray => fooarray[0] */
16056 else if (TREE_CODE (optype) == ARRAY_TYPE
16057 && type == TREE_TYPE (optype))
16059 tree type_domain = TYPE_DOMAIN (optype);
16060 tree min_val = size_zero_node;
16061 if (type_domain && TYPE_MIN_VALUE (type_domain))
16062 min_val = TYPE_MIN_VALUE (type_domain);
16063 op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16064 SET_EXPR_LOCATION (op0, loc);
16067 /* *(foo *)&complexfoo => __real__ complexfoo */
16068 else if (TREE_CODE (optype) == COMPLEX_TYPE
16069 && type == TREE_TYPE (optype))
16070 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16071 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16072 else if (TREE_CODE (optype) == VECTOR_TYPE
16073 && type == TREE_TYPE (optype))
16075 tree part_width = TYPE_SIZE (type);
16076 tree index = bitsize_int (0);
16077 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16081 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16082 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16083 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16085 tree op00 = TREE_OPERAND (sub, 0);
16086 tree op01 = TREE_OPERAND (sub, 1);
16090 op00type = TREE_TYPE (op00);
16091 if (TREE_CODE (op00) == ADDR_EXPR
16092 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16093 && type == TREE_TYPE (TREE_TYPE (op00type)))
16095 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16096 tree part_width = TYPE_SIZE (type);
16097 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16098 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16099 tree index = bitsize_int (indexi);
16101 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16102 return fold_build3_loc (loc,
16103 BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16104 part_width, index);
16110 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16111 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16112 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16114 tree op00 = TREE_OPERAND (sub, 0);
16115 tree op01 = TREE_OPERAND (sub, 1);
16119 op00type = TREE_TYPE (op00);
16120 if (TREE_CODE (op00) == ADDR_EXPR
16121 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16122 && type == TREE_TYPE (TREE_TYPE (op00type)))
16124 tree size = TYPE_SIZE_UNIT (type);
16125 if (tree_int_cst_equal (size, op01))
16126 return fold_build1_loc (loc, IMAGPART_EXPR, type,
16127 TREE_OPERAND (op00, 0));
16131 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16132 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16133 && type == TREE_TYPE (TREE_TYPE (subtype)))
16136 tree min_val = size_zero_node;
16137 sub = build_fold_indirect_ref_loc (loc, sub);
16138 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16139 if (type_domain && TYPE_MIN_VALUE (type_domain))
16140 min_val = TYPE_MIN_VALUE (type_domain);
16141 op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16142 SET_EXPR_LOCATION (op0, loc);
16149 /* Builds an expression for an indirection through T, simplifying some
16153 build_fold_indirect_ref_loc (location_t loc, tree t)
16155 tree type = TREE_TYPE (TREE_TYPE (t));
16156 tree sub = fold_indirect_ref_1 (loc, type, t);
16161 t = build1 (INDIRECT_REF, type, t);
16162 SET_EXPR_LOCATION (t, loc);
16166 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16169 fold_indirect_ref_loc (location_t loc, tree t)
16171 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16179 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16180 whose result is ignored. The type of the returned tree need not be
16181 the same as the original expression. */
16184 fold_ignored_result (tree t)
16186 if (!TREE_SIDE_EFFECTS (t))
16187 return integer_zero_node;
16190 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16193 t = TREE_OPERAND (t, 0);
16197 case tcc_comparison:
16198 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16199 t = TREE_OPERAND (t, 0);
16200 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16201 t = TREE_OPERAND (t, 1);
16206 case tcc_expression:
16207 switch (TREE_CODE (t))
16209 case COMPOUND_EXPR:
16210 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16212 t = TREE_OPERAND (t, 0);
16216 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16217 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16219 t = TREE_OPERAND (t, 0);
16232 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16233 This can only be applied to objects of a sizetype. */
16236 round_up_loc (location_t loc, tree value, int divisor)
16238 tree div = NULL_TREE;
16240 gcc_assert (divisor > 0);
16244 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16245 have to do anything. Only do this when we are not given a const,
16246 because in that case, this check is more expensive than just
16248 if (TREE_CODE (value) != INTEGER_CST)
16250 div = build_int_cst (TREE_TYPE (value), divisor);
16252 if (multiple_of_p (TREE_TYPE (value), value, div))
16256 /* If divisor is a power of two, simplify this to bit manipulation. */
16257 if (divisor == (divisor & -divisor))
16259 if (TREE_CODE (value) == INTEGER_CST)
16261 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16262 unsigned HOST_WIDE_INT high;
16265 if ((low & (divisor - 1)) == 0)
16268 overflow_p = TREE_OVERFLOW (value);
16269 high = TREE_INT_CST_HIGH (value);
16270 low &= ~(divisor - 1);
16279 return force_fit_type_double (TREE_TYPE (value), low, high,
16286 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16287 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16288 t = build_int_cst (TREE_TYPE (value), -divisor);
16289 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16295 div = build_int_cst (TREE_TYPE (value), divisor);
16296 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16297 value = size_binop_loc (loc, MULT_EXPR, value, div);
16303 /* Likewise, but round down. */
16306 round_down_loc (location_t loc, tree value, int divisor)
16308 tree div = NULL_TREE;
16310 gcc_assert (divisor > 0);
16314 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16315 have to do anything. Only do this when we are not given a const,
16316 because in that case, this check is more expensive than just
16318 if (TREE_CODE (value) != INTEGER_CST)
16320 div = build_int_cst (TREE_TYPE (value), divisor);
16322 if (multiple_of_p (TREE_TYPE (value), value, div))
16326 /* If divisor is a power of two, simplify this to bit manipulation. */
16327 if (divisor == (divisor & -divisor))
16331 t = build_int_cst (TREE_TYPE (value), -divisor);
16332 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16337 div = build_int_cst (TREE_TYPE (value), divisor);
16338 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16339 value = size_binop_loc (loc, MULT_EXPR, value, div);
16345 /* Returns the pointer to the base of the object addressed by EXP and
16346 extracts the information about the offset of the access, storing it
16347 to PBITPOS and POFFSET. */
16350 split_address_to_core_and_offset (tree exp,
16351 HOST_WIDE_INT *pbitpos, tree *poffset)
16354 enum machine_mode mode;
16355 int unsignedp, volatilep;
16356 HOST_WIDE_INT bitsize;
16357 location_t loc = EXPR_LOCATION (exp);
16359 if (TREE_CODE (exp) == ADDR_EXPR)
16361 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16362 poffset, &mode, &unsignedp, &volatilep,
16364 core = build_fold_addr_expr_loc (loc, core);
16370 *poffset = NULL_TREE;
16376 /* Returns true if addresses of E1 and E2 differ by a constant, false
16377 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16380 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16383 HOST_WIDE_INT bitpos1, bitpos2;
16384 tree toffset1, toffset2, tdiff, type;
16386 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16387 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16389 if (bitpos1 % BITS_PER_UNIT != 0
16390 || bitpos2 % BITS_PER_UNIT != 0
16391 || !operand_equal_p (core1, core2, 0))
16394 if (toffset1 && toffset2)
16396 type = TREE_TYPE (toffset1);
16397 if (type != TREE_TYPE (toffset2))
16398 toffset2 = fold_convert (type, toffset2);
16400 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16401 if (!cst_and_fits_in_hwi (tdiff))
16404 *diff = int_cst_value (tdiff);
16406 else if (toffset1 || toffset2)
16408 /* If only one of the offsets is non-constant, the difference cannot
16415 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16419 /* Simplify the floating point expression EXP when the sign of the
16420 result is not significant. Return NULL_TREE if no simplification
16424 fold_strip_sign_ops (tree exp)
16427 location_t loc = EXPR_LOCATION (exp);
16429 switch (TREE_CODE (exp))
16433 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16434 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16438 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16440 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16441 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16442 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16443 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16444 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16445 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16448 case COMPOUND_EXPR:
16449 arg0 = TREE_OPERAND (exp, 0);
16450 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16452 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16456 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16457 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16459 return fold_build3_loc (loc,
16460 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16461 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16462 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16467 const enum built_in_function fcode = builtin_mathfn_code (exp);
16470 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16471 /* Strip copysign function call, return the 1st argument. */
16472 arg0 = CALL_EXPR_ARG (exp, 0);
16473 arg1 = CALL_EXPR_ARG (exp, 1);
16474 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16477 /* Strip sign ops from the argument of "odd" math functions. */
16478 if (negate_mathfn_p (fcode))
16480 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16482 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);