1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static tree combine_comparisons (enum tree_code, enum tree_code,
107 enum tree_code, tree, tree, tree);
108 static int truth_value_p (enum tree_code);
109 static int operand_equal_for_comparison_p (tree, tree, tree);
110 static int twoval_comparison_p (tree, tree *, tree *, int *);
111 static tree eval_subst (tree, tree, tree, tree, tree);
112 static tree pedantic_omit_one_operand (tree, tree, tree);
113 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
147 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
148 and SUM1. Then this yields nonzero if overflow occurred during the
151 Overflow occurs if A and B have the same sign, but A and SUM differ in
152 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
154 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
156 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
157 We do that by representing the two-word integer in 4 words, with only
158 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
159 number. The value of the word is LOWPART + HIGHPART * BASE. */
162 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
163 #define HIGHPART(x) \
164 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
165 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
167 /* Unpack a two-word integer into 4 words.
168 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
169 WORDS points to the array of HOST_WIDE_INTs. */
172 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
174 words[0] = LOWPART (low);
175 words[1] = HIGHPART (low);
176 words[2] = LOWPART (hi);
177 words[3] = HIGHPART (hi);
180 /* Pack an array of 4 words into a two-word integer.
181 WORDS points to the array of words.
182 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
185 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
188 *low = words[0] + words[1] * BASE;
189 *hi = words[2] + words[3] * BASE;
192 /* Force the double-word integer L1, H1 to be within the range of the
193 integer type TYPE. Stores the properly truncated and sign-extended
194 double-word integer in *LV, *HV. Returns true if the operation
195 overflows, that is, argument and result are different. */
198 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
199 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
201 unsigned HOST_WIDE_INT low0 = l1;
202 HOST_WIDE_INT high0 = h1;
204 int sign_extended_type;
206 if (POINTER_TYPE_P (type)
207 || TREE_CODE (type) == OFFSET_TYPE)
210 prec = TYPE_PRECISION (type);
212 /* Size types *are* sign extended. */
213 sign_extended_type = (!TYPE_UNSIGNED (type)
214 || (TREE_CODE (type) == INTEGER_TYPE
215 && TYPE_IS_SIZETYPE (type)));
217 /* First clear all bits that are beyond the type's precision. */
218 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
220 else if (prec > HOST_BITS_PER_WIDE_INT)
221 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 if (prec < HOST_BITS_PER_WIDE_INT)
226 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
229 /* Then do sign extension if necessary. */
230 if (!sign_extended_type)
231 /* No sign extension */;
232 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233 /* Correct width already. */;
234 else if (prec > HOST_BITS_PER_WIDE_INT)
236 /* Sign extend top half? */
237 if (h1 & ((unsigned HOST_WIDE_INT)1
238 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
239 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
241 else if (prec == HOST_BITS_PER_WIDE_INT)
243 if ((HOST_WIDE_INT)l1 < 0)
248 /* Sign extend bottom half? */
249 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
252 l1 |= (HOST_WIDE_INT)(-1) << prec;
259 /* If the value didn't fit, signal overflow. */
260 return l1 != low0 || h1 != high0;
263 /* We force the double-int HIGH:LOW to the range of the type TYPE by
264 sign or zero extending it.
265 OVERFLOWABLE indicates if we are interested
266 in overflow of the value, when >0 we are only interested in signed
267 overflow, for <0 we are interested in any overflow. OVERFLOWED
268 indicates whether overflow has already occurred. CONST_OVERFLOWED
269 indicates whether constant overflow has already occurred. We force
270 T's value to be within range of T's type (by setting to 0 or 1 all
271 the bits outside the type's range). We set TREE_OVERFLOWED if,
272 OVERFLOWED is nonzero,
273 or OVERFLOWABLE is >0 and signed overflow occurs
274 or OVERFLOWABLE is <0 and any overflow occurs
275 We return a new tree node for the extended double-int. The node
276 is shared if no overflow flags are set. */
279 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280 HOST_WIDE_INT high, int overflowable,
283 int sign_extended_type;
286 /* Size types *are* sign extended. */
287 sign_extended_type = (!TYPE_UNSIGNED (type)
288 || (TREE_CODE (type) == INTEGER_TYPE
289 && TYPE_IS_SIZETYPE (type)));
291 overflow = fit_double_type (low, high, &low, &high, type);
293 /* If we need to set overflow flags, return a new unshared node. */
294 if (overflowed || overflow)
298 || (overflowable > 0 && sign_extended_type))
300 tree t = make_node (INTEGER_CST);
301 TREE_INT_CST_LOW (t) = low;
302 TREE_INT_CST_HIGH (t) = high;
303 TREE_TYPE (t) = type;
304 TREE_OVERFLOW (t) = 1;
309 /* Else build a shared node. */
310 return build_int_cst_wide (type, low, high);
313 /* Add two doubleword integers with doubleword result.
314 Return nonzero if the operation overflows according to UNSIGNED_P.
315 Each argument is given as two `HOST_WIDE_INT' pieces.
316 One argument is L1 and H1; the other, L2 and H2.
317 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
320 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
325 unsigned HOST_WIDE_INT l;
329 h = h1 + h2 + (l < l1);
335 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
337 return OVERFLOW_SUM_SIGN (h1, h2, h);
340 /* Negate a doubleword integer with doubleword result.
341 Return nonzero if the operation overflows, assuming it's signed.
342 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
343 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
346 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
347 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
353 return (*hv & h1) < 0;
363 /* Multiply two doubleword integers with doubleword result.
364 Return nonzero if the operation overflows according to UNSIGNED_P.
365 Each argument is given as two `HOST_WIDE_INT' pieces.
366 One argument is L1 and H1; the other, L2 and H2.
367 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
370 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
371 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
372 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
375 HOST_WIDE_INT arg1[4];
376 HOST_WIDE_INT arg2[4];
377 HOST_WIDE_INT prod[4 * 2];
378 unsigned HOST_WIDE_INT carry;
380 unsigned HOST_WIDE_INT toplow, neglow;
381 HOST_WIDE_INT tophigh, neghigh;
383 encode (arg1, l1, h1);
384 encode (arg2, l2, h2);
386 memset (prod, 0, sizeof prod);
388 for (i = 0; i < 4; i++)
391 for (j = 0; j < 4; j++)
394 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
395 carry += arg1[i] * arg2[j];
396 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
398 prod[k] = LOWPART (carry);
399 carry = HIGHPART (carry);
404 decode (prod, lv, hv);
405 decode (prod + 4, &toplow, &tophigh);
407 /* Unsigned overflow is immediate. */
409 return (toplow | tophigh) != 0;
411 /* Check for signed overflow by calculating the signed representation of the
412 top half of the result; it should agree with the low half's sign bit. */
415 neg_double (l2, h2, &neglow, &neghigh);
416 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
420 neg_double (l1, h1, &neglow, &neghigh);
421 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
423 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
426 /* Shift the doubleword integer in L1, H1 left by COUNT places
427 keeping only PREC bits of result.
428 Shift right if COUNT is negative.
429 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
430 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
433 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
434 HOST_WIDE_INT count, unsigned int prec,
435 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
437 unsigned HOST_WIDE_INT signmask;
441 rshift_double (l1, h1, -count, prec, lv, hv, arith);
445 if (SHIFT_COUNT_TRUNCATED)
448 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
450 /* Shifting by the host word size is undefined according to the
451 ANSI standard, so we must handle this as a special case. */
455 else if (count >= HOST_BITS_PER_WIDE_INT)
457 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
463 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
467 /* Sign extend all bits that are beyond the precision. */
469 signmask = -((prec > HOST_BITS_PER_WIDE_INT
470 ? ((unsigned HOST_WIDE_INT) *hv
471 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
472 : (*lv >> (prec - 1))) & 1);
474 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
476 else if (prec >= HOST_BITS_PER_WIDE_INT)
478 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
479 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
485 *lv |= signmask << prec;
489 /* Shift the doubleword integer in L1, H1 right by COUNT places
490 keeping only PREC bits of result. COUNT must be positive.
491 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
492 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
495 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
496 HOST_WIDE_INT count, unsigned int prec,
497 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
500 unsigned HOST_WIDE_INT signmask;
503 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
506 if (SHIFT_COUNT_TRUNCATED)
509 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
511 /* Shifting by the host word size is undefined according to the
512 ANSI standard, so we must handle this as a special case. */
516 else if (count >= HOST_BITS_PER_WIDE_INT)
519 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
523 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
525 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
528 /* Zero / sign extend all bits that are beyond the precision. */
530 if (count >= (HOST_WIDE_INT)prec)
535 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
537 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
539 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
540 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
546 *lv |= signmask << (prec - count);
550 /* Rotate the doubleword integer in L1, H1 left by COUNT places
551 keeping only PREC bits of result.
552 Rotate right if COUNT is negative.
553 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
556 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
557 HOST_WIDE_INT count, unsigned int prec,
558 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
560 unsigned HOST_WIDE_INT s1l, s2l;
561 HOST_WIDE_INT s1h, s2h;
567 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
568 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573 /* Rotate the doubleword integer in L1, H1 left by COUNT places
574 keeping only PREC bits of result. COUNT must be positive.
575 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
578 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
579 HOST_WIDE_INT count, unsigned int prec,
580 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
582 unsigned HOST_WIDE_INT s1l, s2l;
583 HOST_WIDE_INT s1h, s2h;
589 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
590 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
596 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
597 CODE is a tree code for a kind of division, one of
598 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
600 It controls how the quotient is rounded to an integer.
601 Return nonzero if the operation overflows.
602 UNS nonzero says do unsigned division. */
605 div_and_round_double (enum tree_code code, int uns,
606 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
607 HOST_WIDE_INT hnum_orig,
608 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
609 HOST_WIDE_INT hden_orig,
610 unsigned HOST_WIDE_INT *lquo,
611 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
615 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
616 HOST_WIDE_INT den[4], quo[4];
618 unsigned HOST_WIDE_INT work;
619 unsigned HOST_WIDE_INT carry = 0;
620 unsigned HOST_WIDE_INT lnum = lnum_orig;
621 HOST_WIDE_INT hnum = hnum_orig;
622 unsigned HOST_WIDE_INT lden = lden_orig;
623 HOST_WIDE_INT hden = hden_orig;
626 if (hden == 0 && lden == 0)
627 overflow = 1, lden = 1;
629 /* Calculate quotient sign and convert operands to unsigned. */
635 /* (minimum integer) / (-1) is the only overflow case. */
636 if (neg_double (lnum, hnum, &lnum, &hnum)
637 && ((HOST_WIDE_INT) lden & hden) == -1)
643 neg_double (lden, hden, &lden, &hden);
647 if (hnum == 0 && hden == 0)
648 { /* single precision */
650 /* This unsigned division rounds toward zero. */
656 { /* trivial case: dividend < divisor */
657 /* hden != 0 already checked. */
664 memset (quo, 0, sizeof quo);
666 memset (num, 0, sizeof num); /* to zero 9th element */
667 memset (den, 0, sizeof den);
669 encode (num, lnum, hnum);
670 encode (den, lden, hden);
672 /* Special code for when the divisor < BASE. */
673 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
675 /* hnum != 0 already checked. */
676 for (i = 4 - 1; i >= 0; i--)
678 work = num[i] + carry * BASE;
679 quo[i] = work / lden;
685 /* Full double precision division,
686 with thanks to Don Knuth's "Seminumerical Algorithms". */
687 int num_hi_sig, den_hi_sig;
688 unsigned HOST_WIDE_INT quo_est, scale;
690 /* Find the highest nonzero divisor digit. */
691 for (i = 4 - 1;; i--)
698 /* Insure that the first digit of the divisor is at least BASE/2.
699 This is required by the quotient digit estimation algorithm. */
701 scale = BASE / (den[den_hi_sig] + 1);
703 { /* scale divisor and dividend */
705 for (i = 0; i <= 4 - 1; i++)
707 work = (num[i] * scale) + carry;
708 num[i] = LOWPART (work);
709 carry = HIGHPART (work);
714 for (i = 0; i <= 4 - 1; i++)
716 work = (den[i] * scale) + carry;
717 den[i] = LOWPART (work);
718 carry = HIGHPART (work);
719 if (den[i] != 0) den_hi_sig = i;
726 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
728 /* Guess the next quotient digit, quo_est, by dividing the first
729 two remaining dividend digits by the high order quotient digit.
730 quo_est is never low and is at most 2 high. */
731 unsigned HOST_WIDE_INT tmp;
733 num_hi_sig = i + den_hi_sig + 1;
734 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
735 if (num[num_hi_sig] != den[den_hi_sig])
736 quo_est = work / den[den_hi_sig];
740 /* Refine quo_est so it's usually correct, and at most one high. */
741 tmp = work - quo_est * den[den_hi_sig];
743 && (den[den_hi_sig - 1] * quo_est
744 > (tmp * BASE + num[num_hi_sig - 2])))
747 /* Try QUO_EST as the quotient digit, by multiplying the
748 divisor by QUO_EST and subtracting from the remaining dividend.
749 Keep in mind that QUO_EST is the I - 1st digit. */
752 for (j = 0; j <= den_hi_sig; j++)
754 work = quo_est * den[j] + carry;
755 carry = HIGHPART (work);
756 work = num[i + j] - LOWPART (work);
757 num[i + j] = LOWPART (work);
758 carry += HIGHPART (work) != 0;
761 /* If quo_est was high by one, then num[i] went negative and
762 we need to correct things. */
763 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
766 carry = 0; /* add divisor back in */
767 for (j = 0; j <= den_hi_sig; j++)
769 work = num[i + j] + den[j] + carry;
770 carry = HIGHPART (work);
771 num[i + j] = LOWPART (work);
774 num [num_hi_sig] += carry;
777 /* Store the quotient digit. */
782 decode (quo, lquo, hquo);
785 /* If result is negative, make it so. */
787 neg_double (*lquo, *hquo, lquo, hquo);
789 /* Compute trial remainder: rem = num - (quo * den) */
790 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
791 neg_double (*lrem, *hrem, lrem, hrem);
792 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797 case TRUNC_MOD_EXPR: /* round toward zero */
798 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
802 case FLOOR_MOD_EXPR: /* round toward negative infinity */
803 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
806 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
814 case CEIL_MOD_EXPR: /* round toward positive infinity */
815 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
817 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
825 case ROUND_MOD_EXPR: /* round to closest integer */
827 unsigned HOST_WIDE_INT labs_rem = *lrem;
828 HOST_WIDE_INT habs_rem = *hrem;
829 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
830 HOST_WIDE_INT habs_den = hden, htwice;
832 /* Get absolute values. */
834 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
836 neg_double (lden, hden, &labs_den, &habs_den);
838 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
839 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
840 labs_rem, habs_rem, <wice, &htwice);
842 if (((unsigned HOST_WIDE_INT) habs_den
843 < (unsigned HOST_WIDE_INT) htwice)
844 || (((unsigned HOST_WIDE_INT) habs_den
845 == (unsigned HOST_WIDE_INT) htwice)
846 && (labs_den <= ltwice)))
850 add_double (*lquo, *hquo,
851 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
854 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
866 /* Compute true remainder: rem = num - (quo * den) */
867 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
868 neg_double (*lrem, *hrem, lrem, hrem);
869 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
873 /* If ARG2 divides ARG1 with zero remainder, carries out the division
874 of type CODE and returns the quotient.
875 Otherwise returns NULL_TREE. */
878 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
880 unsigned HOST_WIDE_INT int1l, int2l;
881 HOST_WIDE_INT int1h, int2h;
882 unsigned HOST_WIDE_INT quol, reml;
883 HOST_WIDE_INT quoh, remh;
884 tree type = TREE_TYPE (arg1);
885 int uns = TYPE_UNSIGNED (type);
887 int1l = TREE_INT_CST_LOW (arg1);
888 int1h = TREE_INT_CST_HIGH (arg1);
889 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
890 &obj[some_exotic_number]. */
891 if (POINTER_TYPE_P (type))
894 type = signed_type_for (type);
895 fit_double_type (int1l, int1h, &int1l, &int1h,
899 fit_double_type (int1l, int1h, &int1l, &int1h, type);
900 int2l = TREE_INT_CST_LOW (arg2);
901 int2h = TREE_INT_CST_HIGH (arg2);
903 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904 &quol, &quoh, &reml, &remh);
905 if (remh != 0 || reml != 0)
908 return build_int_cst_wide (type, quol, quoh);
911 /* This is nonzero if we should defer warnings about undefined
912 overflow. This facility exists because these warnings are a
913 special case. The code to estimate loop iterations does not want
914 to issue any warnings, since it works with expressions which do not
915 occur in user code. Various bits of cleanup code call fold(), but
916 only use the result if it has certain characteristics (e.g., is a
917 constant); that code only wants to issue a warning if the result is
920 static int fold_deferring_overflow_warnings;
922 /* If a warning about undefined overflow is deferred, this is the
923 warning. Note that this may cause us to turn two warnings into
924 one, but that is fine since it is sufficient to only give one
925 warning per expression. */
927 static const char* fold_deferred_overflow_warning;
929 /* If a warning about undefined overflow is deferred, this is the
930 level at which the warning should be emitted. */
932 static enum warn_strict_overflow_code fold_deferred_overflow_code;
934 /* Start deferring overflow warnings. We could use a stack here to
935 permit nested calls, but at present it is not necessary. */
938 fold_defer_overflow_warnings (void)
940 ++fold_deferring_overflow_warnings;
943 /* Stop deferring overflow warnings. If there is a pending warning,
944 and ISSUE is true, then issue the warning if appropriate. STMT is
945 the statement with which the warning should be associated (used for
946 location information); STMT may be NULL. CODE is the level of the
947 warning--a warn_strict_overflow_code value. This function will use
948 the smaller of CODE and the deferred code when deciding whether to
949 issue the warning. CODE may be zero to mean to always use the
953 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
958 gcc_assert (fold_deferring_overflow_warnings > 0);
959 --fold_deferring_overflow_warnings;
960 if (fold_deferring_overflow_warnings > 0)
962 if (fold_deferred_overflow_warning != NULL
964 && code < (int) fold_deferred_overflow_code)
965 fold_deferred_overflow_code = code;
969 warnmsg = fold_deferred_overflow_warning;
970 fold_deferred_overflow_warning = NULL;
972 if (!issue || warnmsg == NULL)
975 if (gimple_no_warning_p (stmt))
978 /* Use the smallest code level when deciding to issue the
980 if (code == 0 || code > (int) fold_deferred_overflow_code)
981 code = fold_deferred_overflow_code;
983 if (!issue_strict_overflow_warning (code))
987 locus = input_location;
989 locus = gimple_location (stmt);
990 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
993 /* Stop deferring overflow warnings, ignoring any deferred
997 fold_undefer_and_ignore_overflow_warnings (void)
999 fold_undefer_overflow_warnings (false, NULL, 0);
1002 /* Whether we are deferring overflow warnings. */
1005 fold_deferring_overflow_warnings_p (void)
1007 return fold_deferring_overflow_warnings > 0;
1010 /* This is called when we fold something based on the fact that signed
1011 overflow is undefined. */
1014 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1016 if (fold_deferring_overflow_warnings > 0)
1018 if (fold_deferred_overflow_warning == NULL
1019 || wc < fold_deferred_overflow_code)
1021 fold_deferred_overflow_warning = gmsgid;
1022 fold_deferred_overflow_code = wc;
1025 else if (issue_strict_overflow_warning (wc))
1026 warning (OPT_Wstrict_overflow, gmsgid);
1029 /* Return true if the built-in mathematical function specified by CODE
1030 is odd, i.e. -f(x) == f(-x). */
1033 negate_mathfn_p (enum built_in_function code)
1037 CASE_FLT_FN (BUILT_IN_ASIN):
1038 CASE_FLT_FN (BUILT_IN_ASINH):
1039 CASE_FLT_FN (BUILT_IN_ATAN):
1040 CASE_FLT_FN (BUILT_IN_ATANH):
1041 CASE_FLT_FN (BUILT_IN_CASIN):
1042 CASE_FLT_FN (BUILT_IN_CASINH):
1043 CASE_FLT_FN (BUILT_IN_CATAN):
1044 CASE_FLT_FN (BUILT_IN_CATANH):
1045 CASE_FLT_FN (BUILT_IN_CBRT):
1046 CASE_FLT_FN (BUILT_IN_CPROJ):
1047 CASE_FLT_FN (BUILT_IN_CSIN):
1048 CASE_FLT_FN (BUILT_IN_CSINH):
1049 CASE_FLT_FN (BUILT_IN_CTAN):
1050 CASE_FLT_FN (BUILT_IN_CTANH):
1051 CASE_FLT_FN (BUILT_IN_ERF):
1052 CASE_FLT_FN (BUILT_IN_LLROUND):
1053 CASE_FLT_FN (BUILT_IN_LROUND):
1054 CASE_FLT_FN (BUILT_IN_ROUND):
1055 CASE_FLT_FN (BUILT_IN_SIN):
1056 CASE_FLT_FN (BUILT_IN_SINH):
1057 CASE_FLT_FN (BUILT_IN_TAN):
1058 CASE_FLT_FN (BUILT_IN_TANH):
1059 CASE_FLT_FN (BUILT_IN_TRUNC):
1062 CASE_FLT_FN (BUILT_IN_LLRINT):
1063 CASE_FLT_FN (BUILT_IN_LRINT):
1064 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065 CASE_FLT_FN (BUILT_IN_RINT):
1066 return !flag_rounding_math;
1074 /* Check whether we may negate an integer constant T without causing
1078 may_negate_without_overflow_p (const_tree t)
1080 unsigned HOST_WIDE_INT val;
1084 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1086 type = TREE_TYPE (t);
1087 if (TYPE_UNSIGNED (type))
1090 prec = TYPE_PRECISION (type);
1091 if (prec > HOST_BITS_PER_WIDE_INT)
1093 if (TREE_INT_CST_LOW (t) != 0)
1095 prec -= HOST_BITS_PER_WIDE_INT;
1096 val = TREE_INT_CST_HIGH (t);
1099 val = TREE_INT_CST_LOW (t);
1100 if (prec < HOST_BITS_PER_WIDE_INT)
1101 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1105 /* Determine whether an expression T can be cheaply negated using
1106 the function negate_expr without introducing undefined overflow. */
1109 negate_expr_p (tree t)
1116 type = TREE_TYPE (t);
1118 STRIP_SIGN_NOPS (t);
1119 switch (TREE_CODE (t))
1122 if (TYPE_OVERFLOW_WRAPS (type))
1125 /* Check that -CST will not overflow type. */
1126 return may_negate_without_overflow_p (t);
1128 return (INTEGRAL_TYPE_P (type)
1129 && TYPE_OVERFLOW_WRAPS (type));
1137 return negate_expr_p (TREE_REALPART (t))
1138 && negate_expr_p (TREE_IMAGPART (t));
1141 return negate_expr_p (TREE_OPERAND (t, 0))
1142 && negate_expr_p (TREE_OPERAND (t, 1));
1145 return negate_expr_p (TREE_OPERAND (t, 0));
1148 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1156 /* -(A + B) -> (-A) - B. */
1157 return negate_expr_p (TREE_OPERAND (t, 0));
1160 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1161 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1162 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1163 && reorder_operands_p (TREE_OPERAND (t, 0),
1164 TREE_OPERAND (t, 1));
1167 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1173 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1174 return negate_expr_p (TREE_OPERAND (t, 1))
1175 || negate_expr_p (TREE_OPERAND (t, 0));
1178 case TRUNC_DIV_EXPR:
1179 case ROUND_DIV_EXPR:
1180 case FLOOR_DIV_EXPR:
1182 case EXACT_DIV_EXPR:
1183 /* In general we can't negate A / B, because if A is INT_MIN and
1184 B is 1, we may turn this into INT_MIN / -1 which is undefined
1185 and actually traps on some architectures. But if overflow is
1186 undefined, we can negate, because - (INT_MIN / 1) is an
1188 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1189 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1191 return negate_expr_p (TREE_OPERAND (t, 1))
1192 || negate_expr_p (TREE_OPERAND (t, 0));
1195 /* Negate -((double)float) as (double)(-float). */
1196 if (TREE_CODE (type) == REAL_TYPE)
1198 tree tem = strip_float_extensions (t);
1200 return negate_expr_p (tem);
1205 /* Negate -f(x) as f(-x). */
1206 if (negate_mathfn_p (builtin_mathfn_code (t)))
1207 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1211 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1212 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1214 tree op1 = TREE_OPERAND (t, 1);
1215 if (TREE_INT_CST_HIGH (op1) == 0
1216 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1217 == TREE_INT_CST_LOW (op1))
1228 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1229 simplification is possible.
1230 If negate_expr_p would return true for T, NULL_TREE will never be
1234 fold_negate_expr (tree t)
1236 tree type = TREE_TYPE (t);
1239 switch (TREE_CODE (t))
1241 /* Convert - (~A) to A + 1. */
1243 if (INTEGRAL_TYPE_P (type))
1244 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1245 build_int_cst (type, 1));
1249 tem = fold_negate_const (t, type);
1250 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1251 || !TYPE_OVERFLOW_TRAPS (type))
1256 tem = fold_negate_const (t, type);
1257 /* Two's complement FP formats, such as c4x, may overflow. */
1258 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263 tem = fold_negate_const (t, type);
1268 tree rpart = negate_expr (TREE_REALPART (t));
1269 tree ipart = negate_expr (TREE_IMAGPART (t));
1271 if ((TREE_CODE (rpart) == REAL_CST
1272 && TREE_CODE (ipart) == REAL_CST)
1273 || (TREE_CODE (rpart) == INTEGER_CST
1274 && TREE_CODE (ipart) == INTEGER_CST))
1275 return build_complex (type, rpart, ipart);
1280 if (negate_expr_p (t))
1281 return fold_build2 (COMPLEX_EXPR, type,
1282 fold_negate_expr (TREE_OPERAND (t, 0)),
1283 fold_negate_expr (TREE_OPERAND (t, 1)));
1287 if (negate_expr_p (t))
1288 return fold_build1 (CONJ_EXPR, type,
1289 fold_negate_expr (TREE_OPERAND (t, 0)));
1293 return TREE_OPERAND (t, 0);
1296 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1297 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1299 /* -(A + B) -> (-B) - A. */
1300 if (negate_expr_p (TREE_OPERAND (t, 1))
1301 && reorder_operands_p (TREE_OPERAND (t, 0),
1302 TREE_OPERAND (t, 1)))
1304 tem = negate_expr (TREE_OPERAND (t, 1));
1305 return fold_build2 (MINUS_EXPR, type,
1306 tem, TREE_OPERAND (t, 0));
1309 /* -(A + B) -> (-A) - B. */
1310 if (negate_expr_p (TREE_OPERAND (t, 0)))
1312 tem = negate_expr (TREE_OPERAND (t, 0));
1313 return fold_build2 (MINUS_EXPR, type,
1314 tem, TREE_OPERAND (t, 1));
1320 /* - (A - B) -> B - A */
1321 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1322 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1323 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1324 return fold_build2 (MINUS_EXPR, type,
1325 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1329 if (TYPE_UNSIGNED (type))
1335 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1337 tem = TREE_OPERAND (t, 1);
1338 if (negate_expr_p (tem))
1339 return fold_build2 (TREE_CODE (t), type,
1340 TREE_OPERAND (t, 0), negate_expr (tem));
1341 tem = TREE_OPERAND (t, 0);
1342 if (negate_expr_p (tem))
1343 return fold_build2 (TREE_CODE (t), type,
1344 negate_expr (tem), TREE_OPERAND (t, 1));
1348 case TRUNC_DIV_EXPR:
1349 case ROUND_DIV_EXPR:
1350 case FLOOR_DIV_EXPR:
1352 case EXACT_DIV_EXPR:
1353 /* In general we can't negate A / B, because if A is INT_MIN and
1354 B is 1, we may turn this into INT_MIN / -1 which is undefined
1355 and actually traps on some architectures. But if overflow is
1356 undefined, we can negate, because - (INT_MIN / 1) is an
1358 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1360 const char * const warnmsg = G_("assuming signed overflow does not "
1361 "occur when negating a division");
1362 tem = TREE_OPERAND (t, 1);
1363 if (negate_expr_p (tem))
1365 if (INTEGRAL_TYPE_P (type)
1366 && (TREE_CODE (tem) != INTEGER_CST
1367 || integer_onep (tem)))
1368 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1369 return fold_build2 (TREE_CODE (t), type,
1370 TREE_OPERAND (t, 0), negate_expr (tem));
1372 tem = TREE_OPERAND (t, 0);
1373 if (negate_expr_p (tem))
1375 if (INTEGRAL_TYPE_P (type)
1376 && (TREE_CODE (tem) != INTEGER_CST
1377 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1378 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1379 return fold_build2 (TREE_CODE (t), type,
1380 negate_expr (tem), TREE_OPERAND (t, 1));
1386 /* Convert -((double)float) into (double)(-float). */
1387 if (TREE_CODE (type) == REAL_TYPE)
1389 tem = strip_float_extensions (t);
1390 if (tem != t && negate_expr_p (tem))
1391 return fold_convert (type, negate_expr (tem));
1396 /* Negate -f(x) as f(-x). */
1397 if (negate_mathfn_p (builtin_mathfn_code (t))
1398 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1402 fndecl = get_callee_fndecl (t);
1403 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1404 return build_call_expr (fndecl, 1, arg);
1409 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1410 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1412 tree op1 = TREE_OPERAND (t, 1);
1413 if (TREE_INT_CST_HIGH (op1) == 0
1414 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1415 == TREE_INT_CST_LOW (op1))
1417 tree ntype = TYPE_UNSIGNED (type)
1418 ? signed_type_for (type)
1419 : unsigned_type_for (type);
1420 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1421 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1422 return fold_convert (type, temp);
1434 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1435 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1436 return NULL_TREE. */
1439 negate_expr (tree t)
1446 type = TREE_TYPE (t);
1447 STRIP_SIGN_NOPS (t);
1449 tem = fold_negate_expr (t);
1451 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1452 return fold_convert (type, tem);
1455 /* Split a tree IN into a constant, literal and variable parts that could be
1456 combined with CODE to make IN. "constant" means an expression with
1457 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1458 commutative arithmetic operation. Store the constant part into *CONP,
1459 the literal in *LITP and return the variable part. If a part isn't
1460 present, set it to null. If the tree does not decompose in this way,
1461 return the entire tree as the variable part and the other parts as null.
1463 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1464 case, we negate an operand that was subtracted. Except if it is a
1465 literal for which we use *MINUS_LITP instead.
1467 If NEGATE_P is true, we are negating all of IN, again except a literal
1468 for which we use *MINUS_LITP instead.
1470 If IN is itself a literal or constant, return it as appropriate.
1472 Note that we do not guarantee that any of the three values will be the
1473 same type as IN, but they will have the same signedness and mode. */
1476 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1477 tree *minus_litp, int negate_p)
1485 /* Strip any conversions that don't change the machine mode or signedness. */
1486 STRIP_SIGN_NOPS (in);
1488 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1489 || TREE_CODE (in) == FIXED_CST)
1491 else if (TREE_CODE (in) == code
1492 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1493 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1494 /* We can associate addition and subtraction together (even
1495 though the C standard doesn't say so) for integers because
1496 the value is not affected. For reals, the value might be
1497 affected, so we can't. */
1498 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1499 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1501 tree op0 = TREE_OPERAND (in, 0);
1502 tree op1 = TREE_OPERAND (in, 1);
1503 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1504 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1506 /* First see if either of the operands is a literal, then a constant. */
1507 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1508 || TREE_CODE (op0) == FIXED_CST)
1509 *litp = op0, op0 = 0;
1510 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1511 || TREE_CODE (op1) == FIXED_CST)
1512 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1514 if (op0 != 0 && TREE_CONSTANT (op0))
1515 *conp = op0, op0 = 0;
1516 else if (op1 != 0 && TREE_CONSTANT (op1))
1517 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1519 /* If we haven't dealt with either operand, this is not a case we can
1520 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1521 if (op0 != 0 && op1 != 0)
1526 var = op1, neg_var_p = neg1_p;
1528 /* Now do any needed negations. */
1530 *minus_litp = *litp, *litp = 0;
1532 *conp = negate_expr (*conp);
1534 var = negate_expr (var);
1536 else if (TREE_CONSTANT (in))
1544 *minus_litp = *litp, *litp = 0;
1545 else if (*minus_litp)
1546 *litp = *minus_litp, *minus_litp = 0;
1547 *conp = negate_expr (*conp);
1548 var = negate_expr (var);
1554 /* Re-associate trees split by the above function. T1 and T2 are either
1555 expressions to associate or null. Return the new expression, if any. If
1556 we build an operation, do it in TYPE and with CODE. */
1559 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1566 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1567 try to fold this since we will have infinite recursion. But do
1568 deal with any NEGATE_EXPRs. */
1569 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1570 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1572 if (code == PLUS_EXPR)
1574 if (TREE_CODE (t1) == NEGATE_EXPR)
1575 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1576 fold_convert (type, TREE_OPERAND (t1, 0)));
1577 else if (TREE_CODE (t2) == NEGATE_EXPR)
1578 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1579 fold_convert (type, TREE_OPERAND (t2, 0)));
1580 else if (integer_zerop (t2))
1581 return fold_convert (type, t1);
1583 else if (code == MINUS_EXPR)
1585 if (integer_zerop (t2))
1586 return fold_convert (type, t1);
1589 return build2 (code, type, fold_convert (type, t1),
1590 fold_convert (type, t2));
1593 return fold_build2 (code, type, fold_convert (type, t1),
1594 fold_convert (type, t2));
1597 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1598 for use in int_const_binop, size_binop and size_diffop. */
1601 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1603 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1605 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1620 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1621 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1622 && TYPE_MODE (type1) == TYPE_MODE (type2);
1626 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1627 to produce a new constant. Return NULL_TREE if we don't know how
1628 to evaluate CODE at compile-time.
1630 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1633 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1635 unsigned HOST_WIDE_INT int1l, int2l;
1636 HOST_WIDE_INT int1h, int2h;
1637 unsigned HOST_WIDE_INT low;
1639 unsigned HOST_WIDE_INT garbagel;
1640 HOST_WIDE_INT garbageh;
1642 tree type = TREE_TYPE (arg1);
1643 int uns = TYPE_UNSIGNED (type);
1645 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1648 int1l = TREE_INT_CST_LOW (arg1);
1649 int1h = TREE_INT_CST_HIGH (arg1);
1650 int2l = TREE_INT_CST_LOW (arg2);
1651 int2h = TREE_INT_CST_HIGH (arg2);
1656 low = int1l | int2l, hi = int1h | int2h;
1660 low = int1l ^ int2l, hi = int1h ^ int2h;
1664 low = int1l & int2l, hi = int1h & int2h;
1670 /* It's unclear from the C standard whether shifts can overflow.
1671 The following code ignores overflow; perhaps a C standard
1672 interpretation ruling is needed. */
1673 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1680 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1685 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1689 neg_double (int2l, int2h, &low, &hi);
1690 add_double (int1l, int1h, low, hi, &low, &hi);
1691 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1695 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1698 case TRUNC_DIV_EXPR:
1699 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1700 case EXACT_DIV_EXPR:
1701 /* This is a shortcut for a common special case. */
1702 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1703 && !TREE_OVERFLOW (arg1)
1704 && !TREE_OVERFLOW (arg2)
1705 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1707 if (code == CEIL_DIV_EXPR)
1710 low = int1l / int2l, hi = 0;
1714 /* ... fall through ... */
1716 case ROUND_DIV_EXPR:
1717 if (int2h == 0 && int2l == 0)
1719 if (int2h == 0 && int2l == 1)
1721 low = int1l, hi = int1h;
1724 if (int1l == int2l && int1h == int2h
1725 && ! (int1l == 0 && int1h == 0))
1730 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1731 &low, &hi, &garbagel, &garbageh);
1734 case TRUNC_MOD_EXPR:
1735 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1736 /* This is a shortcut for a common special case. */
1737 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1738 && !TREE_OVERFLOW (arg1)
1739 && !TREE_OVERFLOW (arg2)
1740 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1742 if (code == CEIL_MOD_EXPR)
1744 low = int1l % int2l, hi = 0;
1748 /* ... fall through ... */
1750 case ROUND_MOD_EXPR:
1751 if (int2h == 0 && int2l == 0)
1753 overflow = div_and_round_double (code, uns,
1754 int1l, int1h, int2l, int2h,
1755 &garbagel, &garbageh, &low, &hi);
1761 low = (((unsigned HOST_WIDE_INT) int1h
1762 < (unsigned HOST_WIDE_INT) int2h)
1763 || (((unsigned HOST_WIDE_INT) int1h
1764 == (unsigned HOST_WIDE_INT) int2h)
1767 low = (int1h < int2h
1768 || (int1h == int2h && int1l < int2l));
1770 if (low == (code == MIN_EXPR))
1771 low = int1l, hi = int1h;
1773 low = int2l, hi = int2h;
1782 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1784 /* Propagate overflow flags ourselves. */
1785 if (((!uns || is_sizetype) && overflow)
1786 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1789 TREE_OVERFLOW (t) = 1;
1793 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1794 ((!uns || is_sizetype) && overflow)
1795 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1800 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1801 constant. We assume ARG1 and ARG2 have the same data type, or at least
1802 are the same kind of constant and the same machine mode. Return zero if
1803 combining the constants is not allowed in the current operating mode.
1805 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1808 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1810 /* Sanity check for the recursive cases. */
1817 if (TREE_CODE (arg1) == INTEGER_CST)
1818 return int_const_binop (code, arg1, arg2, notrunc);
1820 if (TREE_CODE (arg1) == REAL_CST)
1822 enum machine_mode mode;
1825 REAL_VALUE_TYPE value;
1826 REAL_VALUE_TYPE result;
1830 /* The following codes are handled by real_arithmetic. */
1845 d1 = TREE_REAL_CST (arg1);
1846 d2 = TREE_REAL_CST (arg2);
1848 type = TREE_TYPE (arg1);
1849 mode = TYPE_MODE (type);
1851 /* Don't perform operation if we honor signaling NaNs and
1852 either operand is a NaN. */
1853 if (HONOR_SNANS (mode)
1854 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1857 /* Don't perform operation if it would raise a division
1858 by zero exception. */
1859 if (code == RDIV_EXPR
1860 && REAL_VALUES_EQUAL (d2, dconst0)
1861 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1864 /* If either operand is a NaN, just return it. Otherwise, set up
1865 for floating-point trap; we return an overflow. */
1866 if (REAL_VALUE_ISNAN (d1))
1868 else if (REAL_VALUE_ISNAN (d2))
1871 inexact = real_arithmetic (&value, code, &d1, &d2);
1872 real_convert (&result, mode, &value);
1874 /* Don't constant fold this floating point operation if
1875 the result has overflowed and flag_trapping_math. */
1876 if (flag_trapping_math
1877 && MODE_HAS_INFINITIES (mode)
1878 && REAL_VALUE_ISINF (result)
1879 && !REAL_VALUE_ISINF (d1)
1880 && !REAL_VALUE_ISINF (d2))
1883 /* Don't constant fold this floating point operation if the
1884 result may dependent upon the run-time rounding mode and
1885 flag_rounding_math is set, or if GCC's software emulation
1886 is unable to accurately represent the result. */
1887 if ((flag_rounding_math
1888 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1889 && (inexact || !real_identical (&result, &value)))
1892 t = build_real (type, result);
1894 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1898 if (TREE_CODE (arg1) == FIXED_CST)
1900 FIXED_VALUE_TYPE f1;
1901 FIXED_VALUE_TYPE f2;
1902 FIXED_VALUE_TYPE result;
1907 /* The following codes are handled by fixed_arithmetic. */
1913 case TRUNC_DIV_EXPR:
1914 f2 = TREE_FIXED_CST (arg2);
1919 f2.data.high = TREE_INT_CST_HIGH (arg2);
1920 f2.data.low = TREE_INT_CST_LOW (arg2);
1928 f1 = TREE_FIXED_CST (arg1);
1929 type = TREE_TYPE (arg1);
1930 sat_p = TYPE_SATURATING (type);
1931 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1932 t = build_fixed (type, result);
1933 /* Propagate overflow flags. */
1934 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1937 TREE_CONSTANT_OVERFLOW (t) = 1;
1939 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1944 if (TREE_CODE (arg1) == COMPLEX_CST)
1946 tree type = TREE_TYPE (arg1);
1947 tree r1 = TREE_REALPART (arg1);
1948 tree i1 = TREE_IMAGPART (arg1);
1949 tree r2 = TREE_REALPART (arg2);
1950 tree i2 = TREE_IMAGPART (arg2);
1957 real = const_binop (code, r1, r2, notrunc);
1958 imag = const_binop (code, i1, i2, notrunc);
1962 real = const_binop (MINUS_EXPR,
1963 const_binop (MULT_EXPR, r1, r2, notrunc),
1964 const_binop (MULT_EXPR, i1, i2, notrunc),
1966 imag = const_binop (PLUS_EXPR,
1967 const_binop (MULT_EXPR, r1, i2, notrunc),
1968 const_binop (MULT_EXPR, i1, r2, notrunc),
1975 = const_binop (PLUS_EXPR,
1976 const_binop (MULT_EXPR, r2, r2, notrunc),
1977 const_binop (MULT_EXPR, i2, i2, notrunc),
1980 = const_binop (PLUS_EXPR,
1981 const_binop (MULT_EXPR, r1, r2, notrunc),
1982 const_binop (MULT_EXPR, i1, i2, notrunc),
1985 = const_binop (MINUS_EXPR,
1986 const_binop (MULT_EXPR, i1, r2, notrunc),
1987 const_binop (MULT_EXPR, r1, i2, notrunc),
1990 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1991 code = TRUNC_DIV_EXPR;
1993 real = const_binop (code, t1, magsquared, notrunc);
1994 imag = const_binop (code, t2, magsquared, notrunc);
2003 return build_complex (type, real, imag);
2009 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2010 indicates which particular sizetype to create. */
2013 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2015 return build_int_cst (sizetype_tab[(int) kind], number);
2018 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2019 is a tree code. The type of the result is taken from the operands.
2020 Both must be equivalent integer types, ala int_binop_types_match_p.
2021 If the operands are constant, so is the result. */
2024 size_binop (enum tree_code code, tree arg0, tree arg1)
2026 tree type = TREE_TYPE (arg0);
2028 if (arg0 == error_mark_node || arg1 == error_mark_node)
2029 return error_mark_node;
2031 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2034 /* Handle the special case of two integer constants faster. */
2035 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2037 /* And some specific cases even faster than that. */
2038 if (code == PLUS_EXPR)
2040 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2042 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2045 else if (code == MINUS_EXPR)
2047 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2050 else if (code == MULT_EXPR)
2052 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2056 /* Handle general case of two integer constants. */
2057 return int_const_binop (code, arg0, arg1, 0);
2060 return fold_build2 (code, type, arg0, arg1);
2063 /* Given two values, either both of sizetype or both of bitsizetype,
2064 compute the difference between the two values. Return the value
2065 in signed type corresponding to the type of the operands. */
2068 size_diffop (tree arg0, tree arg1)
2070 tree type = TREE_TYPE (arg0);
2073 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2076 /* If the type is already signed, just do the simple thing. */
2077 if (!TYPE_UNSIGNED (type))
2078 return size_binop (MINUS_EXPR, arg0, arg1);
2080 if (type == sizetype)
2082 else if (type == bitsizetype)
2083 ctype = sbitsizetype;
2085 ctype = signed_type_for (type);
2087 /* If either operand is not a constant, do the conversions to the signed
2088 type and subtract. The hardware will do the right thing with any
2089 overflow in the subtraction. */
2090 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2091 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2092 fold_convert (ctype, arg1));
2094 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2095 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2096 overflow) and negate (which can't either). Special-case a result
2097 of zero while we're here. */
2098 if (tree_int_cst_equal (arg0, arg1))
2099 return build_int_cst (ctype, 0);
2100 else if (tree_int_cst_lt (arg1, arg0))
2101 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2103 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2104 fold_convert (ctype, size_binop (MINUS_EXPR,
2108 /* A subroutine of fold_convert_const handling conversions of an
2109 INTEGER_CST to another integer type. */
2112 fold_convert_const_int_from_int (tree type, const_tree arg1)
2116 /* Given an integer constant, make new constant with new type,
2117 appropriately sign-extended or truncated. */
2118 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2119 TREE_INT_CST_HIGH (arg1),
2120 /* Don't set the overflow when
2121 converting from a pointer, */
2122 !POINTER_TYPE_P (TREE_TYPE (arg1))
2123 /* or to a sizetype with same signedness
2124 and the precision is unchanged.
2125 ??? sizetype is always sign-extended,
2126 but its signedness depends on the
2127 frontend. Thus we see spurious overflows
2128 here if we do not check this. */
2129 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2130 == TYPE_PRECISION (type))
2131 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2132 == TYPE_UNSIGNED (type))
2133 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2134 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2135 || (TREE_CODE (type) == INTEGER_TYPE
2136 && TYPE_IS_SIZETYPE (type)))),
2137 (TREE_INT_CST_HIGH (arg1) < 0
2138 && (TYPE_UNSIGNED (type)
2139 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2140 | TREE_OVERFLOW (arg1));
2145 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2146 to an integer type. */
2149 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2154 /* The following code implements the floating point to integer
2155 conversion rules required by the Java Language Specification,
2156 that IEEE NaNs are mapped to zero and values that overflow
2157 the target precision saturate, i.e. values greater than
2158 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2159 are mapped to INT_MIN. These semantics are allowed by the
2160 C and C++ standards that simply state that the behavior of
2161 FP-to-integer conversion is unspecified upon overflow. */
2163 HOST_WIDE_INT high, low;
2165 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2169 case FIX_TRUNC_EXPR:
2170 real_trunc (&r, VOIDmode, &x);
2177 /* If R is NaN, return zero and show we have an overflow. */
2178 if (REAL_VALUE_ISNAN (r))
2185 /* See if R is less than the lower bound or greater than the
2190 tree lt = TYPE_MIN_VALUE (type);
2191 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2192 if (REAL_VALUES_LESS (r, l))
2195 high = TREE_INT_CST_HIGH (lt);
2196 low = TREE_INT_CST_LOW (lt);
2202 tree ut = TYPE_MAX_VALUE (type);
2205 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2206 if (REAL_VALUES_LESS (u, r))
2209 high = TREE_INT_CST_HIGH (ut);
2210 low = TREE_INT_CST_LOW (ut);
2216 REAL_VALUE_TO_INT (&low, &high, r);
2218 t = force_fit_type_double (type, low, high, -1,
2219 overflow | TREE_OVERFLOW (arg1));
2223 /* A subroutine of fold_convert_const handling conversions of a
2224 FIXED_CST to an integer type. */
2227 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2230 double_int temp, temp_trunc;
2233 /* Right shift FIXED_CST to temp by fbit. */
2234 temp = TREE_FIXED_CST (arg1).data;
2235 mode = TREE_FIXED_CST (arg1).mode;
2236 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2238 lshift_double (temp.low, temp.high,
2239 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2240 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2242 /* Left shift temp to temp_trunc by fbit. */
2243 lshift_double (temp.low, temp.high,
2244 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2245 &temp_trunc.low, &temp_trunc.high,
2246 SIGNED_FIXED_POINT_MODE_P (mode));
2253 temp_trunc.high = 0;
2256 /* If FIXED_CST is negative, we need to round the value toward 0.
2257 By checking if the fractional bits are not zero to add 1 to temp. */
2258 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2259 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2264 temp = double_int_add (temp, one);
2267 /* Given a fixed-point constant, make new constant with new type,
2268 appropriately sign-extended or truncated. */
2269 t = force_fit_type_double (type, temp.low, temp.high, -1,
2271 && (TYPE_UNSIGNED (type)
2272 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2273 | TREE_OVERFLOW (arg1));
2278 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2279 to another floating point type. */
2282 fold_convert_const_real_from_real (tree type, const_tree arg1)
2284 REAL_VALUE_TYPE value;
2287 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2288 t = build_real (type, value);
2290 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2294 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2295 to a floating point type. */
2298 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2300 REAL_VALUE_TYPE value;
2303 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2304 t = build_real (type, value);
2306 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2307 TREE_CONSTANT_OVERFLOW (t)
2308 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2312 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2313 to another fixed-point type. */
2316 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2318 FIXED_VALUE_TYPE value;
2322 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2323 TYPE_SATURATING (type));
2324 t = build_fixed (type, value);
2326 /* Propagate overflow flags. */
2327 if (overflow_p | TREE_OVERFLOW (arg1))
2329 TREE_OVERFLOW (t) = 1;
2330 TREE_CONSTANT_OVERFLOW (t) = 1;
2332 else if (TREE_CONSTANT_OVERFLOW (arg1))
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2337 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2338 to a fixed-point type. */
2341 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2343 FIXED_VALUE_TYPE value;
2347 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2348 TREE_INT_CST (arg1),
2349 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2350 TYPE_SATURATING (type));
2351 t = build_fixed (type, value);
2353 /* Propagate overflow flags. */
2354 if (overflow_p | TREE_OVERFLOW (arg1))
2356 TREE_OVERFLOW (t) = 1;
2357 TREE_CONSTANT_OVERFLOW (t) = 1;
2359 else if (TREE_CONSTANT_OVERFLOW (arg1))
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2364 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2365 to a fixed-point type. */
2368 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2370 FIXED_VALUE_TYPE value;
2374 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2375 &TREE_REAL_CST (arg1),
2376 TYPE_SATURATING (type));
2377 t = build_fixed (type, value);
2379 /* Propagate overflow flags. */
2380 if (overflow_p | TREE_OVERFLOW (arg1))
2382 TREE_OVERFLOW (t) = 1;
2383 TREE_CONSTANT_OVERFLOW (t) = 1;
2385 else if (TREE_CONSTANT_OVERFLOW (arg1))
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2390 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2391 type TYPE. If no simplification can be done return NULL_TREE. */
2394 fold_convert_const (enum tree_code code, tree type, tree arg1)
2396 if (TREE_TYPE (arg1) == type)
2399 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2400 || TREE_CODE (type) == OFFSET_TYPE)
2402 if (TREE_CODE (arg1) == INTEGER_CST)
2403 return fold_convert_const_int_from_int (type, arg1);
2404 else if (TREE_CODE (arg1) == REAL_CST)
2405 return fold_convert_const_int_from_real (code, type, arg1);
2406 else if (TREE_CODE (arg1) == FIXED_CST)
2407 return fold_convert_const_int_from_fixed (type, arg1);
2409 else if (TREE_CODE (type) == REAL_TYPE)
2411 if (TREE_CODE (arg1) == INTEGER_CST)
2412 return build_real_from_int_cst (type, arg1);
2413 else if (TREE_CODE (arg1) == REAL_CST)
2414 return fold_convert_const_real_from_real (type, arg1);
2415 else if (TREE_CODE (arg1) == FIXED_CST)
2416 return fold_convert_const_real_from_fixed (type, arg1);
2418 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2420 if (TREE_CODE (arg1) == FIXED_CST)
2421 return fold_convert_const_fixed_from_fixed (type, arg1);
2422 else if (TREE_CODE (arg1) == INTEGER_CST)
2423 return fold_convert_const_fixed_from_int (type, arg1);
2424 else if (TREE_CODE (arg1) == REAL_CST)
2425 return fold_convert_const_fixed_from_real (type, arg1);
2430 /* Construct a vector of zero elements of vector type TYPE. */
2433 build_zero_vector (tree type)
2438 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2439 units = TYPE_VECTOR_SUBPARTS (type);
2442 for (i = 0; i < units; i++)
2443 list = tree_cons (NULL_TREE, elem, list);
2444 return build_vector (type, list);
2447 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2450 fold_convertible_p (const_tree type, const_tree arg)
2452 tree orig = TREE_TYPE (arg);
2457 if (TREE_CODE (arg) == ERROR_MARK
2458 || TREE_CODE (type) == ERROR_MARK
2459 || TREE_CODE (orig) == ERROR_MARK)
2462 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2465 switch (TREE_CODE (type))
2467 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2468 case POINTER_TYPE: case REFERENCE_TYPE:
2470 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2471 || TREE_CODE (orig) == OFFSET_TYPE)
2473 return (TREE_CODE (orig) == VECTOR_TYPE
2474 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2477 case FIXED_POINT_TYPE:
2481 return TREE_CODE (type) == TREE_CODE (orig);
2488 /* Convert expression ARG to type TYPE. Used by the middle-end for
2489 simple conversions in preference to calling the front-end's convert. */
2492 fold_convert (tree type, tree arg)
2494 tree orig = TREE_TYPE (arg);
2500 if (TREE_CODE (arg) == ERROR_MARK
2501 || TREE_CODE (type) == ERROR_MARK
2502 || TREE_CODE (orig) == ERROR_MARK)
2503 return error_mark_node;
2505 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2506 return fold_build1 (NOP_EXPR, type, arg);
2508 switch (TREE_CODE (type))
2510 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2511 case POINTER_TYPE: case REFERENCE_TYPE:
2513 if (TREE_CODE (arg) == INTEGER_CST)
2515 tem = fold_convert_const (NOP_EXPR, type, arg);
2516 if (tem != NULL_TREE)
2519 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2520 || TREE_CODE (orig) == OFFSET_TYPE)
2521 return fold_build1 (NOP_EXPR, type, arg);
2522 if (TREE_CODE (orig) == COMPLEX_TYPE)
2524 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2525 return fold_convert (type, tem);
2527 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2528 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2529 return fold_build1 (NOP_EXPR, type, arg);
2532 if (TREE_CODE (arg) == INTEGER_CST)
2534 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2535 if (tem != NULL_TREE)
2538 else if (TREE_CODE (arg) == REAL_CST)
2540 tem = fold_convert_const (NOP_EXPR, type, arg);
2541 if (tem != NULL_TREE)
2544 else if (TREE_CODE (arg) == FIXED_CST)
2546 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2547 if (tem != NULL_TREE)
2551 switch (TREE_CODE (orig))
2554 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2555 case POINTER_TYPE: case REFERENCE_TYPE:
2556 return fold_build1 (FLOAT_EXPR, type, arg);
2559 return fold_build1 (NOP_EXPR, type, arg);
2561 case FIXED_POINT_TYPE:
2562 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2565 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2566 return fold_convert (type, tem);
2572 case FIXED_POINT_TYPE:
2573 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2574 || TREE_CODE (arg) == REAL_CST)
2576 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2577 if (tem != NULL_TREE)
2581 switch (TREE_CODE (orig))
2583 case FIXED_POINT_TYPE:
2588 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2591 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2592 return fold_convert (type, tem);
2599 switch (TREE_CODE (orig))
2602 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2603 case POINTER_TYPE: case REFERENCE_TYPE:
2605 case FIXED_POINT_TYPE:
2606 return build2 (COMPLEX_EXPR, type,
2607 fold_convert (TREE_TYPE (type), arg),
2608 fold_convert (TREE_TYPE (type), integer_zero_node));
2613 if (TREE_CODE (arg) == COMPLEX_EXPR)
2615 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2616 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2617 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2620 arg = save_expr (arg);
2621 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2622 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2623 rpart = fold_convert (TREE_TYPE (type), rpart);
2624 ipart = fold_convert (TREE_TYPE (type), ipart);
2625 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2633 if (integer_zerop (arg))
2634 return build_zero_vector (type);
2635 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2636 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2637 || TREE_CODE (orig) == VECTOR_TYPE);
2638 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2641 tem = fold_ignored_result (arg);
2642 if (TREE_CODE (tem) == MODIFY_EXPR)
2644 return fold_build1 (NOP_EXPR, type, tem);
2651 /* Return false if expr can be assumed not to be an lvalue, true
2655 maybe_lvalue_p (const_tree x)
2657 /* We only need to wrap lvalue tree codes. */
2658 switch (TREE_CODE (x))
2669 case ALIGN_INDIRECT_REF:
2670 case MISALIGNED_INDIRECT_REF:
2672 case ARRAY_RANGE_REF:
2678 case PREINCREMENT_EXPR:
2679 case PREDECREMENT_EXPR:
2681 case TRY_CATCH_EXPR:
2682 case WITH_CLEANUP_EXPR:
2693 /* Assume the worst for front-end tree codes. */
2694 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2702 /* Return an expr equal to X but certainly not valid as an lvalue. */
2707 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2712 if (! maybe_lvalue_p (x))
2714 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2717 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2718 Zero means allow extended lvalues. */
2720 int pedantic_lvalues;
2722 /* When pedantic, return an expr equal to X but certainly not valid as a
2723 pedantic lvalue. Otherwise, return X. */
2726 pedantic_non_lvalue (tree x)
2728 if (pedantic_lvalues)
2729 return non_lvalue (x);
2734 /* Given a tree comparison code, return the code that is the logical inverse
2735 of the given code. It is not safe to do this for floating-point
2736 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2737 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2740 invert_tree_comparison (enum tree_code code, bool honor_nans)
2742 if (honor_nans && flag_trapping_math)
2752 return honor_nans ? UNLE_EXPR : LE_EXPR;
2754 return honor_nans ? UNLT_EXPR : LT_EXPR;
2756 return honor_nans ? UNGE_EXPR : GE_EXPR;
2758 return honor_nans ? UNGT_EXPR : GT_EXPR;
2772 return UNORDERED_EXPR;
2773 case UNORDERED_EXPR:
2774 return ORDERED_EXPR;
2780 /* Similar, but return the comparison that results if the operands are
2781 swapped. This is safe for floating-point. */
2784 swap_tree_comparison (enum tree_code code)
2791 case UNORDERED_EXPR:
2817 /* Convert a comparison tree code from an enum tree_code representation
2818 into a compcode bit-based encoding. This function is the inverse of
2819 compcode_to_comparison. */
2821 static enum comparison_code
2822 comparison_to_compcode (enum tree_code code)
2839 return COMPCODE_ORD;
2840 case UNORDERED_EXPR:
2841 return COMPCODE_UNORD;
2843 return COMPCODE_UNLT;
2845 return COMPCODE_UNEQ;
2847 return COMPCODE_UNLE;
2849 return COMPCODE_UNGT;
2851 return COMPCODE_LTGT;
2853 return COMPCODE_UNGE;
2859 /* Convert a compcode bit-based encoding of a comparison operator back
2860 to GCC's enum tree_code representation. This function is the
2861 inverse of comparison_to_compcode. */
2863 static enum tree_code
2864 compcode_to_comparison (enum comparison_code code)
2881 return ORDERED_EXPR;
2882 case COMPCODE_UNORD:
2883 return UNORDERED_EXPR;
2901 /* Return a tree for the comparison which is the combination of
2902 doing the AND or OR (depending on CODE) of the two operations LCODE
2903 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2904 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2905 if this makes the transformation invalid. */
2908 combine_comparisons (enum tree_code code, enum tree_code lcode,
2909 enum tree_code rcode, tree truth_type,
2910 tree ll_arg, tree lr_arg)
2912 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2913 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2914 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2915 enum comparison_code compcode;
2919 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2920 compcode = lcompcode & rcompcode;
2923 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2924 compcode = lcompcode | rcompcode;
2933 /* Eliminate unordered comparisons, as well as LTGT and ORD
2934 which are not used unless the mode has NaNs. */
2935 compcode &= ~COMPCODE_UNORD;
2936 if (compcode == COMPCODE_LTGT)
2937 compcode = COMPCODE_NE;
2938 else if (compcode == COMPCODE_ORD)
2939 compcode = COMPCODE_TRUE;
2941 else if (flag_trapping_math)
2943 /* Check that the original operation and the optimized ones will trap
2944 under the same condition. */
2945 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2946 && (lcompcode != COMPCODE_EQ)
2947 && (lcompcode != COMPCODE_ORD);
2948 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2949 && (rcompcode != COMPCODE_EQ)
2950 && (rcompcode != COMPCODE_ORD);
2951 bool trap = (compcode & COMPCODE_UNORD) == 0
2952 && (compcode != COMPCODE_EQ)
2953 && (compcode != COMPCODE_ORD);
2955 /* In a short-circuited boolean expression the LHS might be
2956 such that the RHS, if evaluated, will never trap. For
2957 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2958 if neither x nor y is NaN. (This is a mixed blessing: for
2959 example, the expression above will never trap, hence
2960 optimizing it to x < y would be invalid). */
2961 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2962 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2965 /* If the comparison was short-circuited, and only the RHS
2966 trapped, we may now generate a spurious trap. */
2968 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2971 /* If we changed the conditions that cause a trap, we lose. */
2972 if ((ltrap || rtrap) != trap)
2976 if (compcode == COMPCODE_TRUE)
2977 return constant_boolean_node (true, truth_type);
2978 else if (compcode == COMPCODE_FALSE)
2979 return constant_boolean_node (false, truth_type);
2981 return fold_build2 (compcode_to_comparison (compcode),
2982 truth_type, ll_arg, lr_arg);
2985 /* Return nonzero if CODE is a tree code that represents a truth value. */
2988 truth_value_p (enum tree_code code)
2990 return (TREE_CODE_CLASS (code) == tcc_comparison
2991 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2992 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2993 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2996 /* Return nonzero if two operands (typically of the same tree node)
2997 are necessarily equal. If either argument has side-effects this
2998 function returns zero. FLAGS modifies behavior as follows:
3000 If OEP_ONLY_CONST is set, only return nonzero for constants.
3001 This function tests whether the operands are indistinguishable;
3002 it does not test whether they are equal using C's == operation.
3003 The distinction is important for IEEE floating point, because
3004 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3005 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3007 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3008 even though it may hold multiple values during a function.
3009 This is because a GCC tree node guarantees that nothing else is
3010 executed between the evaluation of its "operands" (which may often
3011 be evaluated in arbitrary order). Hence if the operands themselves
3012 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3013 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3014 unset means assuming isochronic (or instantaneous) tree equivalence.
3015 Unless comparing arbitrary expression trees, such as from different
3016 statements, this flag can usually be left unset.
3018 If OEP_PURE_SAME is set, then pure functions with identical arguments
3019 are considered the same. It is used when the caller has other ways
3020 to ensure that global memory is unchanged in between. */
3023 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3025 /* If either is ERROR_MARK, they aren't equal. */
3026 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3029 /* Check equality of integer constants before bailing out due to
3030 precision differences. */
3031 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3032 return tree_int_cst_equal (arg0, arg1);
3034 /* If both types don't have the same signedness, then we can't consider
3035 them equal. We must check this before the STRIP_NOPS calls
3036 because they may change the signedness of the arguments. As pointers
3037 strictly don't have a signedness, require either two pointers or
3038 two non-pointers as well. */
3039 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3040 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3043 /* If both types don't have the same precision, then it is not safe
3045 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3051 /* In case both args are comparisons but with different comparison
3052 code, try to swap the comparison operands of one arg to produce
3053 a match and compare that variant. */
3054 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3055 && COMPARISON_CLASS_P (arg0)
3056 && COMPARISON_CLASS_P (arg1))
3058 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3060 if (TREE_CODE (arg0) == swap_code)
3061 return operand_equal_p (TREE_OPERAND (arg0, 0),
3062 TREE_OPERAND (arg1, 1), flags)
3063 && operand_equal_p (TREE_OPERAND (arg0, 1),
3064 TREE_OPERAND (arg1, 0), flags);
3067 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3068 /* This is needed for conversions and for COMPONENT_REF.
3069 Might as well play it safe and always test this. */
3070 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3071 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3072 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3075 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3076 We don't care about side effects in that case because the SAVE_EXPR
3077 takes care of that for us. In all other cases, two expressions are
3078 equal if they have no side effects. If we have two identical
3079 expressions with side effects that should be treated the same due
3080 to the only side effects being identical SAVE_EXPR's, that will
3081 be detected in the recursive calls below. */
3082 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3083 && (TREE_CODE (arg0) == SAVE_EXPR
3084 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3087 /* Next handle constant cases, those for which we can return 1 even
3088 if ONLY_CONST is set. */
3089 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3090 switch (TREE_CODE (arg0))
3093 return tree_int_cst_equal (arg0, arg1);
3096 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3097 TREE_FIXED_CST (arg1));
3100 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3101 TREE_REAL_CST (arg1)))
3105 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3107 /* If we do not distinguish between signed and unsigned zero,
3108 consider them equal. */
3109 if (real_zerop (arg0) && real_zerop (arg1))
3118 v1 = TREE_VECTOR_CST_ELTS (arg0);
3119 v2 = TREE_VECTOR_CST_ELTS (arg1);
3122 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3125 v1 = TREE_CHAIN (v1);
3126 v2 = TREE_CHAIN (v2);
3133 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3135 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3139 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3140 && ! memcmp (TREE_STRING_POINTER (arg0),
3141 TREE_STRING_POINTER (arg1),
3142 TREE_STRING_LENGTH (arg0)));
3145 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3151 if (flags & OEP_ONLY_CONST)
3154 /* Define macros to test an operand from arg0 and arg1 for equality and a
3155 variant that allows null and views null as being different from any
3156 non-null value. In the latter case, if either is null, the both
3157 must be; otherwise, do the normal comparison. */
3158 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3159 TREE_OPERAND (arg1, N), flags)
3161 #define OP_SAME_WITH_NULL(N) \
3162 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3163 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3165 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3168 /* Two conversions are equal only if signedness and modes match. */
3169 switch (TREE_CODE (arg0))
3172 case FIX_TRUNC_EXPR:
3173 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3174 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3184 case tcc_comparison:
3186 if (OP_SAME (0) && OP_SAME (1))
3189 /* For commutative ops, allow the other order. */
3190 return (commutative_tree_code (TREE_CODE (arg0))
3191 && operand_equal_p (TREE_OPERAND (arg0, 0),
3192 TREE_OPERAND (arg1, 1), flags)
3193 && operand_equal_p (TREE_OPERAND (arg0, 1),
3194 TREE_OPERAND (arg1, 0), flags));
3197 /* If either of the pointer (or reference) expressions we are
3198 dereferencing contain a side effect, these cannot be equal. */
3199 if (TREE_SIDE_EFFECTS (arg0)
3200 || TREE_SIDE_EFFECTS (arg1))
3203 switch (TREE_CODE (arg0))
3206 case ALIGN_INDIRECT_REF:
3207 case MISALIGNED_INDIRECT_REF:
3213 case ARRAY_RANGE_REF:
3214 /* Operands 2 and 3 may be null.
3215 Compare the array index by value if it is constant first as we
3216 may have different types but same value here. */
3218 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3219 TREE_OPERAND (arg1, 1))
3221 && OP_SAME_WITH_NULL (2)
3222 && OP_SAME_WITH_NULL (3));
3225 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3226 may be NULL when we're called to compare MEM_EXPRs. */
3227 return OP_SAME_WITH_NULL (0)
3229 && OP_SAME_WITH_NULL (2);
3232 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3238 case tcc_expression:
3239 switch (TREE_CODE (arg0))
3242 case TRUTH_NOT_EXPR:
3245 case TRUTH_ANDIF_EXPR:
3246 case TRUTH_ORIF_EXPR:
3247 return OP_SAME (0) && OP_SAME (1);
3249 case TRUTH_AND_EXPR:
3251 case TRUTH_XOR_EXPR:
3252 if (OP_SAME (0) && OP_SAME (1))
3255 /* Otherwise take into account this is a commutative operation. */
3256 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3257 TREE_OPERAND (arg1, 1), flags)
3258 && operand_equal_p (TREE_OPERAND (arg0, 1),
3259 TREE_OPERAND (arg1, 0), flags));
3262 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3269 switch (TREE_CODE (arg0))
3272 /* If the CALL_EXPRs call different functions, then they
3273 clearly can not be equal. */
3274 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3279 unsigned int cef = call_expr_flags (arg0);
3280 if (flags & OEP_PURE_SAME)
3281 cef &= ECF_CONST | ECF_PURE;
3288 /* Now see if all the arguments are the same. */
3290 const_call_expr_arg_iterator iter0, iter1;
3292 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3293 a1 = first_const_call_expr_arg (arg1, &iter1);
3295 a0 = next_const_call_expr_arg (&iter0),
3296 a1 = next_const_call_expr_arg (&iter1))
3297 if (! operand_equal_p (a0, a1, flags))
3300 /* If we get here and both argument lists are exhausted
3301 then the CALL_EXPRs are equal. */
3302 return ! (a0 || a1);
3308 case tcc_declaration:
3309 /* Consider __builtin_sqrt equal to sqrt. */
3310 return (TREE_CODE (arg0) == FUNCTION_DECL
3311 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3312 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3313 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3320 #undef OP_SAME_WITH_NULL
3323 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3324 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3326 When in doubt, return 0. */
3329 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3331 int unsignedp1, unsignedpo;
3332 tree primarg0, primarg1, primother;
3333 unsigned int correct_width;
3335 if (operand_equal_p (arg0, arg1, 0))
3338 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3339 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3342 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3343 and see if the inner values are the same. This removes any
3344 signedness comparison, which doesn't matter here. */
3345 primarg0 = arg0, primarg1 = arg1;
3346 STRIP_NOPS (primarg0);
3347 STRIP_NOPS (primarg1);
3348 if (operand_equal_p (primarg0, primarg1, 0))
3351 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3352 actual comparison operand, ARG0.
3354 First throw away any conversions to wider types
3355 already present in the operands. */
3357 primarg1 = get_narrower (arg1, &unsignedp1);
3358 primother = get_narrower (other, &unsignedpo);
3360 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3361 if (unsignedp1 == unsignedpo
3362 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3363 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3365 tree type = TREE_TYPE (arg0);
3367 /* Make sure shorter operand is extended the right way
3368 to match the longer operand. */
3369 primarg1 = fold_convert (signed_or_unsigned_type_for
3370 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3372 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3379 /* See if ARG is an expression that is either a comparison or is performing
3380 arithmetic on comparisons. The comparisons must only be comparing
3381 two different values, which will be stored in *CVAL1 and *CVAL2; if
3382 they are nonzero it means that some operands have already been found.
3383 No variables may be used anywhere else in the expression except in the
3384 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3385 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3387 If this is true, return 1. Otherwise, return zero. */
3390 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3392 enum tree_code code = TREE_CODE (arg);
3393 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3395 /* We can handle some of the tcc_expression cases here. */
3396 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3398 else if (tclass == tcc_expression
3399 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3400 || code == COMPOUND_EXPR))
3401 tclass = tcc_binary;
3403 else if (tclass == tcc_expression && code == SAVE_EXPR
3404 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3406 /* If we've already found a CVAL1 or CVAL2, this expression is
3407 two complex to handle. */
3408 if (*cval1 || *cval2)
3418 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3421 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3422 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3423 cval1, cval2, save_p));
3428 case tcc_expression:
3429 if (code == COND_EXPR)
3430 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3431 cval1, cval2, save_p)
3432 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3433 cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3435 cval1, cval2, save_p));
3438 case tcc_comparison:
3439 /* First see if we can handle the first operand, then the second. For
3440 the second operand, we know *CVAL1 can't be zero. It must be that
3441 one side of the comparison is each of the values; test for the
3442 case where this isn't true by failing if the two operands
3445 if (operand_equal_p (TREE_OPERAND (arg, 0),
3446 TREE_OPERAND (arg, 1), 0))
3450 *cval1 = TREE_OPERAND (arg, 0);
3451 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3453 else if (*cval2 == 0)
3454 *cval2 = TREE_OPERAND (arg, 0);
3455 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3460 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3462 else if (*cval2 == 0)
3463 *cval2 = TREE_OPERAND (arg, 1);
3464 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3476 /* ARG is a tree that is known to contain just arithmetic operations and
3477 comparisons. Evaluate the operations in the tree substituting NEW0 for
3478 any occurrence of OLD0 as an operand of a comparison and likewise for
3482 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3484 tree type = TREE_TYPE (arg);
3485 enum tree_code code = TREE_CODE (arg);
3486 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3488 /* We can handle some of the tcc_expression cases here. */
3489 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3491 else if (tclass == tcc_expression
3492 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3493 tclass = tcc_binary;
3498 return fold_build1 (code, type,
3499 eval_subst (TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1));
3503 return fold_build2 (code, type,
3504 eval_subst (TREE_OPERAND (arg, 0),
3505 old0, new0, old1, new1),
3506 eval_subst (TREE_OPERAND (arg, 1),
3507 old0, new0, old1, new1));
3509 case tcc_expression:
3513 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3516 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3519 return fold_build3 (code, type,
3520 eval_subst (TREE_OPERAND (arg, 0),
3521 old0, new0, old1, new1),
3522 eval_subst (TREE_OPERAND (arg, 1),
3523 old0, new0, old1, new1),
3524 eval_subst (TREE_OPERAND (arg, 2),
3525 old0, new0, old1, new1));
3529 /* Fall through - ??? */
3531 case tcc_comparison:
3533 tree arg0 = TREE_OPERAND (arg, 0);
3534 tree arg1 = TREE_OPERAND (arg, 1);
3536 /* We need to check both for exact equality and tree equality. The
3537 former will be true if the operand has a side-effect. In that
3538 case, we know the operand occurred exactly once. */
3540 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3542 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3545 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3547 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3550 return fold_build2 (code, type, arg0, arg1);
3558 /* Return a tree for the case when the result of an expression is RESULT
3559 converted to TYPE and OMITTED was previously an operand of the expression
3560 but is now not needed (e.g., we folded OMITTED * 0).
3562 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3563 the conversion of RESULT to TYPE. */
3566 omit_one_operand (tree type, tree result, tree omitted)
3568 tree t = fold_convert (type, result);
3570 /* If the resulting operand is an empty statement, just return the omitted
3571 statement casted to void. */
3572 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3573 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3575 if (TREE_SIDE_EFFECTS (omitted))
3576 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3578 return non_lvalue (t);
3581 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3584 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3586 tree t = fold_convert (type, result);
3588 /* If the resulting operand is an empty statement, just return the omitted
3589 statement casted to void. */
3590 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3591 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3593 if (TREE_SIDE_EFFECTS (omitted))
3594 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3596 return pedantic_non_lvalue (t);
3599 /* Return a tree for the case when the result of an expression is RESULT
3600 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3601 of the expression but are now not needed.
3603 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3604 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3605 evaluated before OMITTED2. Otherwise, if neither has side effects,
3606 just do the conversion of RESULT to TYPE. */
3609 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3611 tree t = fold_convert (type, result);
3613 if (TREE_SIDE_EFFECTS (omitted2))
3614 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3615 if (TREE_SIDE_EFFECTS (omitted1))
3616 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3618 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3622 /* Return a simplified tree node for the truth-negation of ARG. This
3623 never alters ARG itself. We assume that ARG is an operation that
3624 returns a truth value (0 or 1).
3626 FIXME: one would think we would fold the result, but it causes
3627 problems with the dominator optimizer. */
3630 fold_truth_not_expr (tree arg)
3632 tree type = TREE_TYPE (arg);
3633 enum tree_code code = TREE_CODE (arg);
3635 /* If this is a comparison, we can simply invert it, except for
3636 floating-point non-equality comparisons, in which case we just
3637 enclose a TRUTH_NOT_EXPR around what we have. */
3639 if (TREE_CODE_CLASS (code) == tcc_comparison)
3641 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3642 if (FLOAT_TYPE_P (op_type)
3643 && flag_trapping_math
3644 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3645 && code != NE_EXPR && code != EQ_EXPR)
3649 code = invert_tree_comparison (code,
3650 HONOR_NANS (TYPE_MODE (op_type)));
3651 if (code == ERROR_MARK)
3654 return build2 (code, type,
3655 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3662 return constant_boolean_node (integer_zerop (arg), type);
3664 case TRUTH_AND_EXPR:
3665 return build2 (TRUTH_OR_EXPR, type,
3666 invert_truthvalue (TREE_OPERAND (arg, 0)),
3667 invert_truthvalue (TREE_OPERAND (arg, 1)));
3670 return build2 (TRUTH_AND_EXPR, type,
3671 invert_truthvalue (TREE_OPERAND (arg, 0)),
3672 invert_truthvalue (TREE_OPERAND (arg, 1)));
3674 case TRUTH_XOR_EXPR:
3675 /* Here we can invert either operand. We invert the first operand
3676 unless the second operand is a TRUTH_NOT_EXPR in which case our
3677 result is the XOR of the first operand with the inside of the
3678 negation of the second operand. */
3680 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3681 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3682 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3684 return build2 (TRUTH_XOR_EXPR, type,
3685 invert_truthvalue (TREE_OPERAND (arg, 0)),
3686 TREE_OPERAND (arg, 1));
3688 case TRUTH_ANDIF_EXPR:
3689 return build2 (TRUTH_ORIF_EXPR, type,
3690 invert_truthvalue (TREE_OPERAND (arg, 0)),
3691 invert_truthvalue (TREE_OPERAND (arg, 1)));
3693 case TRUTH_ORIF_EXPR:
3694 return build2 (TRUTH_ANDIF_EXPR, type,
3695 invert_truthvalue (TREE_OPERAND (arg, 0)),
3696 invert_truthvalue (TREE_OPERAND (arg, 1)));
3698 case TRUTH_NOT_EXPR:
3699 return TREE_OPERAND (arg, 0);
3703 tree arg1 = TREE_OPERAND (arg, 1);
3704 tree arg2 = TREE_OPERAND (arg, 2);
3705 /* A COND_EXPR may have a throw as one operand, which
3706 then has void type. Just leave void operands
3708 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3709 VOID_TYPE_P (TREE_TYPE (arg1))
3710 ? arg1 : invert_truthvalue (arg1),
3711 VOID_TYPE_P (TREE_TYPE (arg2))
3712 ? arg2 : invert_truthvalue (arg2));
3716 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3717 invert_truthvalue (TREE_OPERAND (arg, 1)));
3719 case NON_LVALUE_EXPR:
3720 return invert_truthvalue (TREE_OPERAND (arg, 0));
3723 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3724 return build1 (TRUTH_NOT_EXPR, type, arg);
3728 return build1 (TREE_CODE (arg), type,
3729 invert_truthvalue (TREE_OPERAND (arg, 0)));
3732 if (!integer_onep (TREE_OPERAND (arg, 1)))
3734 return build2 (EQ_EXPR, type, arg,
3735 build_int_cst (type, 0));
3738 return build1 (TRUTH_NOT_EXPR, type, arg);
3740 case CLEANUP_POINT_EXPR:
3741 return build1 (CLEANUP_POINT_EXPR, type,
3742 invert_truthvalue (TREE_OPERAND (arg, 0)));
3751 /* Return a simplified tree node for the truth-negation of ARG. This
3752 never alters ARG itself. We assume that ARG is an operation that
3753 returns a truth value (0 or 1).
3755 FIXME: one would think we would fold the result, but it causes
3756 problems with the dominator optimizer. */
3759 invert_truthvalue (tree arg)
3763 if (TREE_CODE (arg) == ERROR_MARK)
3766 tem = fold_truth_not_expr (arg);
3768 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3773 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3774 operands are another bit-wise operation with a common input. If so,
3775 distribute the bit operations to save an operation and possibly two if
3776 constants are involved. For example, convert
3777 (A | B) & (A | C) into A | (B & C)
3778 Further simplification will occur if B and C are constants.
3780 If this optimization cannot be done, 0 will be returned. */
3783 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3788 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3789 || TREE_CODE (arg0) == code
3790 || (TREE_CODE (arg0) != BIT_AND_EXPR
3791 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3794 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3796 common = TREE_OPERAND (arg0, 0);
3797 left = TREE_OPERAND (arg0, 1);
3798 right = TREE_OPERAND (arg1, 1);
3800 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3802 common = TREE_OPERAND (arg0, 0);
3803 left = TREE_OPERAND (arg0, 1);
3804 right = TREE_OPERAND (arg1, 0);
3806 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3808 common = TREE_OPERAND (arg0, 1);
3809 left = TREE_OPERAND (arg0, 0);
3810 right = TREE_OPERAND (arg1, 1);
3812 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3814 common = TREE_OPERAND (arg0, 1);
3815 left = TREE_OPERAND (arg0, 0);
3816 right = TREE_OPERAND (arg1, 0);
3821 return fold_build2 (TREE_CODE (arg0), type, common,
3822 fold_build2 (code, type, left, right));
3825 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3826 with code CODE. This optimization is unsafe. */
3828 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3830 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3831 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3833 /* (A / C) +- (B / C) -> (A +- B) / C. */
3835 && operand_equal_p (TREE_OPERAND (arg0, 1),
3836 TREE_OPERAND (arg1, 1), 0))
3837 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3838 fold_build2 (code, type,
3839 TREE_OPERAND (arg0, 0),
3840 TREE_OPERAND (arg1, 0)),
3841 TREE_OPERAND (arg0, 1));
3843 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3844 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3845 TREE_OPERAND (arg1, 0), 0)
3846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3847 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3849 REAL_VALUE_TYPE r0, r1;
3850 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3851 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3853 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3855 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3856 real_arithmetic (&r0, code, &r0, &r1);
3857 return fold_build2 (MULT_EXPR, type,
3858 TREE_OPERAND (arg0, 0),
3859 build_real (type, r0));
3865 /* Subroutine for fold_truthop: decode a field reference.
3867 If EXP is a comparison reference, we return the innermost reference.
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3878 *PUNSIGNEDP is set to the signedness of the field.
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3889 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3894 tree outer_type = 0;
3896 tree mask, inner, offset;
3898 unsigned int precision;
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3940 mask = build_int_cst_type (unsigned_type, -1);
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3947 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3948 fold_convert (unsigned_type, and_mask), mask);
3951 *pand_mask = and_mask;
3955 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3956 represents the sign bit of EXP's type. If EXP represents a sign
3957 or zero extension, also test VAL against the unextended type.
3958 The return value is the (sub)expression whose sign bit is VAL,
3959 or NULL_TREE otherwise. */
3962 sign_bit_p (tree exp, const_tree val)
3964 unsigned HOST_WIDE_INT mask_lo, lo;
3965 HOST_WIDE_INT mask_hi, hi;
3969 /* Tree EXP must have an integral type. */
3970 t = TREE_TYPE (exp);
3971 if (! INTEGRAL_TYPE_P (t))
3974 /* Tree VAL must be an integer constant. */
3975 if (TREE_CODE (val) != INTEGER_CST
3976 || TREE_OVERFLOW (val))
3979 width = TYPE_PRECISION (t);
3980 if (width > HOST_BITS_PER_WIDE_INT)
3982 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3985 mask_hi = ((unsigned HOST_WIDE_INT) -1
3986 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3992 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3995 mask_lo = ((unsigned HOST_WIDE_INT) -1
3996 >> (HOST_BITS_PER_WIDE_INT - width));
3999 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4000 treat VAL as if it were unsigned. */
4001 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4002 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4005 /* Handle extension from a narrower type. */
4006 if (TREE_CODE (exp) == NOP_EXPR
4007 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4008 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4013 /* Subroutine for fold_truthop: determine if an operand is simple enough
4014 to be evaluated unconditionally. */
4017 simple_operand_p (const_tree exp)
4019 /* Strip any conversions that don't change the machine mode. */
4022 return (CONSTANT_CLASS_P (exp)
4023 || TREE_CODE (exp) == SSA_NAME
4025 && ! TREE_ADDRESSABLE (exp)
4026 && ! TREE_THIS_VOLATILE (exp)
4027 && ! DECL_NONLOCAL (exp)
4028 /* Don't regard global variables as simple. They may be
4029 allocated in ways unknown to the compiler (shared memory,
4030 #pragma weak, etc). */
4031 && ! TREE_PUBLIC (exp)
4032 && ! DECL_EXTERNAL (exp)
4033 /* Loading a static variable is unduly expensive, but global
4034 registers aren't expensive. */
4035 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4038 /* The following functions are subroutines to fold_range_test and allow it to
4039 try to change a logical combination of comparisons into a range test.
4042 X == 2 || X == 3 || X == 4 || X == 5
4046 (unsigned) (X - 2) <= 3
4048 We describe each set of comparisons as being either inside or outside
4049 a range, using a variable named like IN_P, and then describe the
4050 range with a lower and upper bound. If one of the bounds is omitted,
4051 it represents either the highest or lowest value of the type.
4053 In the comments below, we represent a range by two numbers in brackets
4054 preceded by a "+" to designate being inside that range, or a "-" to
4055 designate being outside that range, so the condition can be inverted by
4056 flipping the prefix. An omitted bound is represented by a "-". For
4057 example, "- [-, 10]" means being outside the range starting at the lowest
4058 possible value and ending at 10, in other words, being greater than 10.
4059 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4062 We set up things so that the missing bounds are handled in a consistent
4063 manner so neither a missing bound nor "true" and "false" need to be
4064 handled using a special case. */
4066 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4067 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4068 and UPPER1_P are nonzero if the respective argument is an upper bound
4069 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4070 must be specified for a comparison. ARG1 will be converted to ARG0's
4071 type if both are specified. */
4074 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4075 tree arg1, int upper1_p)
4081 /* If neither arg represents infinity, do the normal operation.
4082 Else, if not a comparison, return infinity. Else handle the special
4083 comparison rules. Note that most of the cases below won't occur, but
4084 are handled for consistency. */
4086 if (arg0 != 0 && arg1 != 0)
4088 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4089 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4091 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4094 if (TREE_CODE_CLASS (code) != tcc_comparison)
4097 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4098 for neither. In real maths, we cannot assume open ended ranges are
4099 the same. But, this is computer arithmetic, where numbers are finite.
4100 We can therefore make the transformation of any unbounded range with
4101 the value Z, Z being greater than any representable number. This permits
4102 us to treat unbounded ranges as equal. */
4103 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4104 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4108 result = sgn0 == sgn1;
4111 result = sgn0 != sgn1;
4114 result = sgn0 < sgn1;
4117 result = sgn0 <= sgn1;
4120 result = sgn0 > sgn1;
4123 result = sgn0 >= sgn1;
4129 return constant_boolean_node (result, type);
4132 /* Given EXP, a logical expression, set the range it is testing into
4133 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4134 actually being tested. *PLOW and *PHIGH will be made of the same
4135 type as the returned expression. If EXP is not a comparison, we
4136 will most likely not be returning a useful value and range. Set
4137 *STRICT_OVERFLOW_P to true if the return value is only valid
4138 because signed overflow is undefined; otherwise, do not change
4139 *STRICT_OVERFLOW_P. */
4142 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4143 bool *strict_overflow_p)
4145 enum tree_code code;
4146 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4147 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4149 tree low, high, n_low, n_high;
4151 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4152 and see if we can refine the range. Some of the cases below may not
4153 happen, but it doesn't seem worth worrying about this. We "continue"
4154 the outer loop when we've changed something; otherwise we "break"
4155 the switch, which will "break" the while. */
4158 low = high = build_int_cst (TREE_TYPE (exp), 0);
4162 code = TREE_CODE (exp);
4163 exp_type = TREE_TYPE (exp);
4165 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4167 if (TREE_OPERAND_LENGTH (exp) > 0)
4168 arg0 = TREE_OPERAND (exp, 0);
4169 if (TREE_CODE_CLASS (code) == tcc_comparison
4170 || TREE_CODE_CLASS (code) == tcc_unary
4171 || TREE_CODE_CLASS (code) == tcc_binary)
4172 arg0_type = TREE_TYPE (arg0);
4173 if (TREE_CODE_CLASS (code) == tcc_binary
4174 || TREE_CODE_CLASS (code) == tcc_comparison
4175 || (TREE_CODE_CLASS (code) == tcc_expression
4176 && TREE_OPERAND_LENGTH (exp) > 1))
4177 arg1 = TREE_OPERAND (exp, 1);
4182 case TRUTH_NOT_EXPR:
4183 in_p = ! in_p, exp = arg0;
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == 0 || high == 0
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4200 case NE_EXPR: /* - [c, c] */
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4231 build_int_cst (arg0_type, 0),
4235 in_p = n_in_p, low = n_low, high = n_high;
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4240 if (high == 0 && low && ! integer_zerop (low))
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 integer_one_node, 0);
4245 low = build_int_cst (arg0_type, 0);
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4257 n_high = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4260 low = n_low, high = n_high;
4266 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4267 build_int_cst (exp_type, 1));
4270 case PLUS_EXPR: case MINUS_EXPR:
4271 if (TREE_CODE (arg1) != INTEGER_CST)
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, low, 0, arg1, 0);
4286 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4287 arg0_type, high, 1, arg1, 0);
4288 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4289 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4293 *strict_overflow_p = true;
4295 /* Check for an unsigned range which has wrapped around the maximum
4296 value thus making n_high < n_low, and normalize it. */
4297 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4299 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4300 integer_one_node, 0);
4301 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4302 integer_one_node, 0);
4304 /* If the range is of the form +/- [ x+1, x ], we won't
4305 be able to normalize it. But then, it represents the
4306 whole range or the empty set, so make it
4308 if (tree_int_cst_equal (n_low, low)
4309 && tree_int_cst_equal (n_high, high))
4315 low = n_low, high = n_high;
4320 CASE_CONVERT: case NON_LVALUE_EXPR:
4321 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4324 if (! INTEGRAL_TYPE_P (arg0_type)
4325 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4326 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4329 n_low = low, n_high = high;
4332 n_low = fold_convert (arg0_type, n_low);
4335 n_high = fold_convert (arg0_type, n_high);
4338 /* If we're converting arg0 from an unsigned type, to exp,
4339 a signed type, we will be doing the comparison as unsigned.
4340 The tests above have already verified that LOW and HIGH
4343 So we have to ensure that we will handle large unsigned
4344 values the same way that the current signed bounds treat
4347 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4351 /* For fixed-point modes, we need to pass the saturating flag
4352 as the 2nd parameter. */
4353 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4354 equiv_type = lang_hooks.types.type_for_mode
4355 (TYPE_MODE (arg0_type),
4356 TYPE_SATURATING (arg0_type));
4358 equiv_type = lang_hooks.types.type_for_mode
4359 (TYPE_MODE (arg0_type), 1);
4361 /* A range without an upper bound is, naturally, unbounded.
4362 Since convert would have cropped a very large value, use
4363 the max value for the destination type. */
4365 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4366 : TYPE_MAX_VALUE (arg0_type);
4368 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4369 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4370 fold_convert (arg0_type,
4372 build_int_cst (arg0_type, 1));
4374 /* If the low bound is specified, "and" the range with the
4375 range for which the original unsigned value will be
4379 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4380 1, n_low, n_high, 1,
4381 fold_convert (arg0_type,
4386 in_p = (n_in_p == in_p);
4390 /* Otherwise, "or" the range with the range of the input
4391 that will be interpreted as negative. */
4392 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4393 0, n_low, n_high, 1,
4394 fold_convert (arg0_type,
4399 in_p = (in_p != n_in_p);
4404 low = n_low, high = n_high;
4414 /* If EXP is a constant, we can evaluate whether this is true or false. */
4415 if (TREE_CODE (exp) == INTEGER_CST)
4417 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4419 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4425 *pin_p = in_p, *plow = low, *phigh = high;
4429 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4430 type, TYPE, return an expression to test if EXP is in (or out of, depending
4431 on IN_P) the range. Return 0 if the test couldn't be created. */
4434 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4436 tree etype = TREE_TYPE (exp);
4439 #ifdef HAVE_canonicalize_funcptr_for_compare
4440 /* Disable this optimization for function pointer expressions
4441 on targets that require function pointer canonicalization. */
4442 if (HAVE_canonicalize_funcptr_for_compare
4443 && TREE_CODE (etype) == POINTER_TYPE
4444 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4450 value = build_range_check (type, exp, 1, low, high);
4452 return invert_truthvalue (value);
4457 if (low == 0 && high == 0)
4458 return build_int_cst (type, 1);
4461 return fold_build2 (LE_EXPR, type, exp,
4462 fold_convert (etype, high));
4465 return fold_build2 (GE_EXPR, type, exp,
4466 fold_convert (etype, low));
4468 if (operand_equal_p (low, high, 0))
4469 return fold_build2 (EQ_EXPR, type, exp,
4470 fold_convert (etype, low));
4472 if (integer_zerop (low))
4474 if (! TYPE_UNSIGNED (etype))
4476 etype = unsigned_type_for (etype);
4477 high = fold_convert (etype, high);
4478 exp = fold_convert (etype, exp);
4480 return build_range_check (type, exp, 1, 0, high);
4483 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4484 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4486 unsigned HOST_WIDE_INT lo;
4490 prec = TYPE_PRECISION (etype);
4491 if (prec <= HOST_BITS_PER_WIDE_INT)
4494 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4498 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4499 lo = (unsigned HOST_WIDE_INT) -1;
4502 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4504 if (TYPE_UNSIGNED (etype))
4506 tree signed_etype = signed_type_for (etype);
4507 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4509 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4511 etype = signed_etype;
4512 exp = fold_convert (etype, exp);
4514 return fold_build2 (GT_EXPR, type, exp,
4515 build_int_cst (etype, 0));
4519 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4520 This requires wrap-around arithmetics for the type of the expression. */
4521 switch (TREE_CODE (etype))
4524 /* There is no requirement that LOW be within the range of ETYPE
4525 if the latter is a subtype. It must, however, be within the base
4526 type of ETYPE. So be sure we do the subtraction in that type. */
4527 if (TREE_TYPE (etype))
4528 etype = TREE_TYPE (etype);
4533 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4534 TYPE_UNSIGNED (etype));
4541 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4542 if (TREE_CODE (etype) == INTEGER_TYPE
4543 && !TYPE_OVERFLOW_WRAPS (etype))
4545 tree utype, minv, maxv;
4547 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4548 for the type in question, as we rely on this here. */
4549 utype = unsigned_type_for (etype);
4550 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4551 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4552 integer_one_node, 1);
4553 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4555 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4562 high = fold_convert (etype, high);
4563 low = fold_convert (etype, low);
4564 exp = fold_convert (etype, exp);
4566 value = const_binop (MINUS_EXPR, high, low, 0);
4569 if (POINTER_TYPE_P (etype))
4571 if (value != 0 && !TREE_OVERFLOW (value))
4573 low = fold_convert (sizetype, low);
4574 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4575 return build_range_check (type,
4576 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4577 1, build_int_cst (etype, 0), value);
4582 if (value != 0 && !TREE_OVERFLOW (value))
4583 return build_range_check (type,
4584 fold_build2 (MINUS_EXPR, etype, exp, low),
4585 1, build_int_cst (etype, 0), value);
4590 /* Return the predecessor of VAL in its type, handling the infinite case. */
4593 range_predecessor (tree val)
4595 tree type = TREE_TYPE (val);
4597 if (INTEGRAL_TYPE_P (type)
4598 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4601 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4604 /* Return the successor of VAL in its type, handling the infinite case. */
4607 range_successor (tree val)
4609 tree type = TREE_TYPE (val);
4611 if (INTEGRAL_TYPE_P (type)
4612 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4615 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4618 /* Given two ranges, see if we can merge them into one. Return 1 if we
4619 can, 0 if we can't. Set the output range into the specified parameters. */
4622 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4623 tree high0, int in1_p, tree low1, tree high1)
4631 int lowequal = ((low0 == 0 && low1 == 0)
4632 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4633 low0, 0, low1, 0)));
4634 int highequal = ((high0 == 0 && high1 == 0)
4635 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4636 high0, 1, high1, 1)));
4638 /* Make range 0 be the range that starts first, or ends last if they
4639 start at the same value. Swap them if it isn't. */
4640 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4643 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4644 high1, 1, high0, 1))))
4646 temp = in0_p, in0_p = in1_p, in1_p = temp;
4647 tem = low0, low0 = low1, low1 = tem;
4648 tem = high0, high0 = high1, high1 = tem;
4651 /* Now flag two cases, whether the ranges are disjoint or whether the
4652 second range is totally subsumed in the first. Note that the tests
4653 below are simplified by the ones above. */
4654 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4655 high0, 1, low1, 0));
4656 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4657 high1, 1, high0, 1));
4659 /* We now have four cases, depending on whether we are including or
4660 excluding the two ranges. */
4663 /* If they don't overlap, the result is false. If the second range
4664 is a subset it is the result. Otherwise, the range is from the start
4665 of the second to the end of the first. */
4667 in_p = 0, low = high = 0;
4669 in_p = 1, low = low1, high = high1;
4671 in_p = 1, low = low1, high = high0;
4674 else if (in0_p && ! in1_p)
4676 /* If they don't overlap, the result is the first range. If they are
4677 equal, the result is false. If the second range is a subset of the
4678 first, and the ranges begin at the same place, we go from just after
4679 the end of the second range to the end of the first. If the second
4680 range is not a subset of the first, or if it is a subset and both
4681 ranges end at the same place, the range starts at the start of the
4682 first range and ends just before the second range.
4683 Otherwise, we can't describe this as a single range. */
4685 in_p = 1, low = low0, high = high0;
4686 else if (lowequal && highequal)
4687 in_p = 0, low = high = 0;
4688 else if (subset && lowequal)
4690 low = range_successor (high1);
4695 /* We are in the weird situation where high0 > high1 but
4696 high1 has no successor. Punt. */
4700 else if (! subset || highequal)
4703 high = range_predecessor (low1);
4707 /* low0 < low1 but low1 has no predecessor. Punt. */
4715 else if (! in0_p && in1_p)
4717 /* If they don't overlap, the result is the second range. If the second
4718 is a subset of the first, the result is false. Otherwise,
4719 the range starts just after the first range and ends at the
4720 end of the second. */
4722 in_p = 1, low = low1, high = high1;
4723 else if (subset || highequal)
4724 in_p = 0, low = high = 0;
4727 low = range_successor (high0);
4732 /* high1 > high0 but high0 has no successor. Punt. */
4740 /* The case where we are excluding both ranges. Here the complex case
4741 is if they don't overlap. In that case, the only time we have a
4742 range is if they are adjacent. If the second is a subset of the
4743 first, the result is the first. Otherwise, the range to exclude
4744 starts at the beginning of the first range and ends at the end of the
4748 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4749 range_successor (high0),
4751 in_p = 0, low = low0, high = high1;
4754 /* Canonicalize - [min, x] into - [-, x]. */
4755 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4756 switch (TREE_CODE (TREE_TYPE (low0)))
4759 if (TYPE_PRECISION (TREE_TYPE (low0))
4760 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4764 if (tree_int_cst_equal (low0,
4765 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4769 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4770 && integer_zerop (low0))
4777 /* Canonicalize - [x, max] into - [x, -]. */
4778 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4779 switch (TREE_CODE (TREE_TYPE (high1)))
4782 if (TYPE_PRECISION (TREE_TYPE (high1))
4783 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4787 if (tree_int_cst_equal (high1,
4788 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4792 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4793 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4795 integer_one_node, 1)))
4802 /* The ranges might be also adjacent between the maximum and
4803 minimum values of the given type. For
4804 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4805 return + [x + 1, y - 1]. */
4806 if (low0 == 0 && high1 == 0)
4808 low = range_successor (high0);
4809 high = range_predecessor (low1);
4810 if (low == 0 || high == 0)
4820 in_p = 0, low = low0, high = high0;
4822 in_p = 0, low = low0, high = high1;
4825 *pin_p = in_p, *plow = low, *phigh = high;
4830 /* Subroutine of fold, looking inside expressions of the form
4831 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4832 of the COND_EXPR. This function is being used also to optimize
4833 A op B ? C : A, by reversing the comparison first.
4835 Return a folded expression whose code is not a COND_EXPR
4836 anymore, or NULL_TREE if no folding opportunity is found. */
4839 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4841 enum tree_code comp_code = TREE_CODE (arg0);
4842 tree arg00 = TREE_OPERAND (arg0, 0);
4843 tree arg01 = TREE_OPERAND (arg0, 1);
4844 tree arg1_type = TREE_TYPE (arg1);
4850 /* If we have A op 0 ? A : -A, consider applying the following
4853 A == 0? A : -A same as -A
4854 A != 0? A : -A same as A
4855 A >= 0? A : -A same as abs (A)
4856 A > 0? A : -A same as abs (A)
4857 A <= 0? A : -A same as -abs (A)
4858 A < 0? A : -A same as -abs (A)
4860 None of these transformations work for modes with signed
4861 zeros. If A is +/-0, the first two transformations will
4862 change the sign of the result (from +0 to -0, or vice
4863 versa). The last four will fix the sign of the result,
4864 even though the original expressions could be positive or
4865 negative, depending on the sign of A.
4867 Note that all these transformations are correct if A is
4868 NaN, since the two alternatives (A and -A) are also NaNs. */
4869 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4870 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4871 ? real_zerop (arg01)
4872 : integer_zerop (arg01))
4873 && ((TREE_CODE (arg2) == NEGATE_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4875 /* In the case that A is of the form X-Y, '-A' (arg2) may
4876 have already been folded to Y-X, check for that. */
4877 || (TREE_CODE (arg1) == MINUS_EXPR
4878 && TREE_CODE (arg2) == MINUS_EXPR
4879 && operand_equal_p (TREE_OPERAND (arg1, 0),
4880 TREE_OPERAND (arg2, 1), 0)
4881 && operand_equal_p (TREE_OPERAND (arg1, 1),
4882 TREE_OPERAND (arg2, 0), 0))))
4887 tem = fold_convert (arg1_type, arg1);
4888 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4891 return pedantic_non_lvalue (fold_convert (type, arg1));
4894 if (flag_trapping_math)
4899 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4900 arg1 = fold_convert (signed_type_for
4901 (TREE_TYPE (arg1)), arg1);
4902 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4903 return pedantic_non_lvalue (fold_convert (type, tem));
4906 if (flag_trapping_math)
4910 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4911 arg1 = fold_convert (signed_type_for
4912 (TREE_TYPE (arg1)), arg1);
4913 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4914 return negate_expr (fold_convert (type, tem));
4916 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4920 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4921 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4922 both transformations are correct when A is NaN: A != 0
4923 is then true, and A == 0 is false. */
4925 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4926 && integer_zerop (arg01) && integer_zerop (arg2))
4928 if (comp_code == NE_EXPR)
4929 return pedantic_non_lvalue (fold_convert (type, arg1));
4930 else if (comp_code == EQ_EXPR)
4931 return build_int_cst (type, 0);
4934 /* Try some transformations of A op B ? A : B.
4936 A == B? A : B same as B
4937 A != B? A : B same as A
4938 A >= B? A : B same as max (A, B)
4939 A > B? A : B same as max (B, A)
4940 A <= B? A : B same as min (A, B)
4941 A < B? A : B same as min (B, A)
4943 As above, these transformations don't work in the presence
4944 of signed zeros. For example, if A and B are zeros of
4945 opposite sign, the first two transformations will change
4946 the sign of the result. In the last four, the original
4947 expressions give different results for (A=+0, B=-0) and
4948 (A=-0, B=+0), but the transformed expressions do not.
4950 The first two transformations are correct if either A or B
4951 is a NaN. In the first transformation, the condition will
4952 be false, and B will indeed be chosen. In the case of the
4953 second transformation, the condition A != B will be true,
4954 and A will be chosen.
4956 The conversions to max() and min() are not correct if B is
4957 a number and A is not. The conditions in the original
4958 expressions will be false, so all four give B. The min()
4959 and max() versions would give a NaN instead. */
4960 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4961 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4962 /* Avoid these transformations if the COND_EXPR may be used
4963 as an lvalue in the C++ front-end. PR c++/19199. */
4965 || (strcmp (lang_hooks.name, "GNU C++") != 0
4966 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4967 || ! maybe_lvalue_p (arg1)
4968 || ! maybe_lvalue_p (arg2)))
4970 tree comp_op0 = arg00;
4971 tree comp_op1 = arg01;
4972 tree comp_type = TREE_TYPE (comp_op0);
4974 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4975 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4985 return pedantic_non_lvalue (fold_convert (type, arg2));
4987 return pedantic_non_lvalue (fold_convert (type, arg1));
4992 /* In C++ a ?: expression can be an lvalue, so put the
4993 operand which will be used if they are equal first
4994 so that we can convert this back to the
4995 corresponding COND_EXPR. */
4996 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4998 comp_op0 = fold_convert (comp_type, comp_op0);
4999 comp_op1 = fold_convert (comp_type, comp_op1);
5000 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5001 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5002 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5003 return pedantic_non_lvalue (fold_convert (type, tem));
5010 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5012 comp_op0 = fold_convert (comp_type, comp_op0);
5013 comp_op1 = fold_convert (comp_type, comp_op1);
5014 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5015 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5016 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5017 return pedantic_non_lvalue (fold_convert (type, tem));
5021 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5022 return pedantic_non_lvalue (fold_convert (type, arg2));
5025 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5026 return pedantic_non_lvalue (fold_convert (type, arg1));
5029 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5034 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5035 we might still be able to simplify this. For example,
5036 if C1 is one less or one more than C2, this might have started
5037 out as a MIN or MAX and been transformed by this function.
5038 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5040 if (INTEGRAL_TYPE_P (type)
5041 && TREE_CODE (arg01) == INTEGER_CST
5042 && TREE_CODE (arg2) == INTEGER_CST)
5046 /* We can replace A with C1 in this case. */
5047 arg1 = fold_convert (type, arg01);
5048 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5051 /* If C1 is C2 + 1, this is min(A, C2). */
5052 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5054 && operand_equal_p (arg01,
5055 const_binop (PLUS_EXPR, arg2,
5056 build_int_cst (type, 1), 0),
5058 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5060 fold_convert (type, arg1),
5065 /* If C1 is C2 - 1, this is min(A, C2). */
5066 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5068 && operand_equal_p (arg01,
5069 const_binop (MINUS_EXPR, arg2,
5070 build_int_cst (type, 1), 0),
5072 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5074 fold_convert (type, arg1),
5079 /* If C1 is C2 - 1, this is max(A, C2). */
5080 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5082 && operand_equal_p (arg01,
5083 const_binop (MINUS_EXPR, arg2,
5084 build_int_cst (type, 1), 0),
5086 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5088 fold_convert (type, arg1),
5093 /* If C1 is C2 + 1, this is max(A, C2). */
5094 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5096 && operand_equal_p (arg01,
5097 const_binop (PLUS_EXPR, arg2,
5098 build_int_cst (type, 1), 0),
5100 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5102 fold_convert (type, arg1),
5116 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5117 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5118 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5122 /* EXP is some logical combination of boolean tests. See if we can
5123 merge it into some range test. Return the new tree if so. */
5126 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5128 int or_op = (code == TRUTH_ORIF_EXPR
5129 || code == TRUTH_OR_EXPR);
5130 int in0_p, in1_p, in_p;
5131 tree low0, low1, low, high0, high1, high;
5132 bool strict_overflow_p = false;
5133 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5134 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5136 const char * const warnmsg = G_("assuming signed overflow does not occur "
5137 "when simplifying range test");
5139 /* If this is an OR operation, invert both sides; we will invert
5140 again at the end. */
5142 in0_p = ! in0_p, in1_p = ! in1_p;
5144 /* If both expressions are the same, if we can merge the ranges, and we
5145 can build the range test, return it or it inverted. If one of the
5146 ranges is always true or always false, consider it to be the same
5147 expression as the other. */
5148 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5149 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5151 && 0 != (tem = (build_range_check (type,
5153 : rhs != 0 ? rhs : integer_zero_node,
5156 if (strict_overflow_p)
5157 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5158 return or_op ? invert_truthvalue (tem) : tem;
5161 /* On machines where the branch cost is expensive, if this is a
5162 short-circuited branch and the underlying object on both sides
5163 is the same, make a non-short-circuit operation. */
5164 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5165 && lhs != 0 && rhs != 0
5166 && (code == TRUTH_ANDIF_EXPR
5167 || code == TRUTH_ORIF_EXPR)
5168 && operand_equal_p (lhs, rhs, 0))
5170 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5171 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5172 which cases we can't do this. */
5173 if (simple_operand_p (lhs))
5174 return build2 (code == TRUTH_ANDIF_EXPR
5175 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5178 else if (lang_hooks.decls.global_bindings_p () == 0
5179 && ! CONTAINS_PLACEHOLDER_P (lhs))
5181 tree common = save_expr (lhs);
5183 if (0 != (lhs = build_range_check (type, common,
5184 or_op ? ! in0_p : in0_p,
5186 && (0 != (rhs = build_range_check (type, common,
5187 or_op ? ! in1_p : in1_p,
5190 if (strict_overflow_p)
5191 fold_overflow_warning (warnmsg,
5192 WARN_STRICT_OVERFLOW_COMPARISON);
5193 return build2 (code == TRUTH_ANDIF_EXPR
5194 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5203 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5204 bit value. Arrange things so the extra bits will be set to zero if and
5205 only if C is signed-extended to its full width. If MASK is nonzero,
5206 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5209 unextend (tree c, int p, int unsignedp, tree mask)
5211 tree type = TREE_TYPE (c);
5212 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5215 if (p == modesize || unsignedp)
5218 /* We work by getting just the sign bit into the low-order bit, then
5219 into the high-order bit, then sign-extend. We then XOR that value
5221 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5222 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5224 /* We must use a signed type in order to get an arithmetic right shift.
5225 However, we must also avoid introducing accidental overflows, so that
5226 a subsequent call to integer_zerop will work. Hence we must
5227 do the type conversion here. At this point, the constant is either
5228 zero or one, and the conversion to a signed type can never overflow.
5229 We could get an overflow if this conversion is done anywhere else. */
5230 if (TYPE_UNSIGNED (type))
5231 temp = fold_convert (signed_type_for (type), temp);
5233 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5234 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5236 temp = const_binop (BIT_AND_EXPR, temp,
5237 fold_convert (TREE_TYPE (c), mask), 0);
5238 /* If necessary, convert the type back to match the type of C. */
5239 if (TYPE_UNSIGNED (type))
5240 temp = fold_convert (type, temp);
5242 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5245 /* Find ways of folding logical expressions of LHS and RHS:
5246 Try to merge two comparisons to the same innermost item.
5247 Look for range tests like "ch >= '0' && ch <= '9'".
5248 Look for combinations of simple terms on machines with expensive branches
5249 and evaluate the RHS unconditionally.
5251 For example, if we have p->a == 2 && p->b == 4 and we can make an
5252 object large enough to span both A and B, we can do this with a comparison
5253 against the object ANDed with the a mask.
5255 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5256 operations to do this with one comparison.
5258 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5259 function and the one above.
5261 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5262 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5264 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5267 We return the simplified tree or 0 if no optimization is possible. */
5270 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5272 /* If this is the "or" of two comparisons, we can do something if
5273 the comparisons are NE_EXPR. If this is the "and", we can do something
5274 if the comparisons are EQ_EXPR. I.e.,
5275 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5277 WANTED_CODE is this operation code. For single bit fields, we can
5278 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5279 comparison for one-bit fields. */
5281 enum tree_code wanted_code;
5282 enum tree_code lcode, rcode;
5283 tree ll_arg, lr_arg, rl_arg, rr_arg;
5284 tree ll_inner, lr_inner, rl_inner, rr_inner;
5285 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5286 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5287 HOST_WIDE_INT xll_bitpos, xrl_bitpos;
5288 HOST_WIDE_INT lnbitsize, lnbitpos;
5289 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5290 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5291 enum machine_mode lnmode;
5292 tree ll_mask, lr_mask, rl_mask, rr_mask;
5293 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5294 tree l_const, r_const;
5295 tree lntype, result;
5296 int first_bit, end_bit;
5298 tree orig_lhs = lhs, orig_rhs = rhs;
5299 enum tree_code orig_code = code;
5301 /* Start by getting the comparison codes. Fail if anything is volatile.
5302 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5303 it were surrounded with a NE_EXPR. */
5305 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5308 lcode = TREE_CODE (lhs);
5309 rcode = TREE_CODE (rhs);
5311 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5313 lhs = build2 (NE_EXPR, truth_type, lhs,
5314 build_int_cst (TREE_TYPE (lhs), 0));
5318 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5320 rhs = build2 (NE_EXPR, truth_type, rhs,
5321 build_int_cst (TREE_TYPE (rhs), 0));
5325 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5326 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5329 ll_arg = TREE_OPERAND (lhs, 0);
5330 lr_arg = TREE_OPERAND (lhs, 1);
5331 rl_arg = TREE_OPERAND (rhs, 0);
5332 rr_arg = TREE_OPERAND (rhs, 1);
5334 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5335 if (simple_operand_p (ll_arg)
5336 && simple_operand_p (lr_arg))
5339 if (operand_equal_p (ll_arg, rl_arg, 0)
5340 && operand_equal_p (lr_arg, rr_arg, 0))
5342 result = combine_comparisons (code, lcode, rcode,
5343 truth_type, ll_arg, lr_arg);
5347 else if (operand_equal_p (ll_arg, rr_arg, 0)
5348 && operand_equal_p (lr_arg, rl_arg, 0))
5350 result = combine_comparisons (code, lcode,
5351 swap_tree_comparison (rcode),
5352 truth_type, ll_arg, lr_arg);
5358 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5359 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5361 /* If the RHS can be evaluated unconditionally and its operands are
5362 simple, it wins to evaluate the RHS unconditionally on machines
5363 with expensive branches. In this case, this isn't a comparison
5364 that can be merged. Avoid doing this if the RHS is a floating-point
5365 comparison since those can trap. */
5367 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5369 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5370 && simple_operand_p (rl_arg)
5371 && simple_operand_p (rr_arg))
5373 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5374 if (code == TRUTH_OR_EXPR
5375 && lcode == NE_EXPR && integer_zerop (lr_arg)
5376 && rcode == NE_EXPR && integer_zerop (rr_arg)
5377 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5378 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5379 return build2 (NE_EXPR, truth_type,
5380 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5382 build_int_cst (TREE_TYPE (ll_arg), 0));
5384 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5385 if (code == TRUTH_AND_EXPR
5386 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5387 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5388 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5389 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5390 return build2 (EQ_EXPR, truth_type,
5391 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5393 build_int_cst (TREE_TYPE (ll_arg), 0));
5395 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5397 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5398 return build2 (code, truth_type, lhs, rhs);
5403 /* See if the comparisons can be merged. Then get all the parameters for
5406 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5407 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5411 ll_inner = decode_field_reference (ll_arg,
5412 &ll_bitsize, &ll_bitpos, &ll_mode,
5413 &ll_unsignedp, &volatilep, &ll_mask,
5415 lr_inner = decode_field_reference (lr_arg,
5416 &lr_bitsize, &lr_bitpos, &lr_mode,
5417 &lr_unsignedp, &volatilep, &lr_mask,
5419 rl_inner = decode_field_reference (rl_arg,
5420 &rl_bitsize, &rl_bitpos, &rl_mode,
5421 &rl_unsignedp, &volatilep, &rl_mask,
5423 rr_inner = decode_field_reference (rr_arg,
5424 &rr_bitsize, &rr_bitpos, &rr_mode,
5425 &rr_unsignedp, &volatilep, &rr_mask,
5428 /* It must be true that the inner operation on the lhs of each
5429 comparison must be the same if we are to be able to do anything.
5430 Then see if we have constants. If not, the same must be true for
5432 if (volatilep || ll_inner == 0 || rl_inner == 0
5433 || ! operand_equal_p (ll_inner, rl_inner, 0))
5436 if (TREE_CODE (lr_arg) == INTEGER_CST
5437 && TREE_CODE (rr_arg) == INTEGER_CST)
5438 l_const = lr_arg, r_const = rr_arg;
5439 else if (lr_inner == 0 || rr_inner == 0
5440 || ! operand_equal_p (lr_inner, rr_inner, 0))
5443 l_const = r_const = 0;
5445 /* If either comparison code is not correct for our logical operation,
5446 fail. However, we can convert a one-bit comparison against zero into
5447 the opposite comparison against that bit being set in the field. */
5449 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5450 if (lcode != wanted_code)
5452 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5454 /* Make the left operand unsigned, since we are only interested
5455 in the value of one bit. Otherwise we are doing the wrong
5464 /* This is analogous to the code for l_const above. */
5465 if (rcode != wanted_code)
5467 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5476 /* See if we can find a mode that contains both fields being compared on
5477 the left. If we can't, fail. Otherwise, update all constants and masks
5478 to be relative to a field of that size. */
5479 first_bit = MIN (ll_bitpos, rl_bitpos);
5480 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5481 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5482 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5484 if (lnmode == VOIDmode)
5487 lnbitsize = GET_MODE_BITSIZE (lnmode);
5488 lnbitpos = first_bit & ~ (lnbitsize - 1);
5489 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5490 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5492 if (BYTES_BIG_ENDIAN)
5494 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5495 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5498 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5499 size_int (xll_bitpos), 0);
5500 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5501 size_int (xrl_bitpos), 0);
5505 l_const = fold_convert (lntype, l_const);
5506 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5507 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5508 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5509 fold_build1 (BIT_NOT_EXPR,
5513 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5515 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5520 r_const = fold_convert (lntype, r_const);
5521 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5522 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5523 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5524 fold_build1 (BIT_NOT_EXPR,
5528 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5530 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5534 /* Handle the case of comparisons with constants. If there is something in
5535 common between the masks, those bits of the constants must be the same.
5536 If not, the condition is always false. Test for this to avoid generating
5537 incorrect code below. */
5538 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5539 if (! integer_zerop (result)
5540 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5541 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5543 if (wanted_code == NE_EXPR)
5545 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5546 return constant_boolean_node (true, truth_type);
5550 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5551 return constant_boolean_node (false, truth_type);
5558 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5562 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5565 enum tree_code op_code;
5568 int consts_equal, consts_lt;
5571 STRIP_SIGN_NOPS (arg0);
5573 op_code = TREE_CODE (arg0);
5574 minmax_const = TREE_OPERAND (arg0, 1);
5575 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5576 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5577 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5578 inner = TREE_OPERAND (arg0, 0);
5580 /* If something does not permit us to optimize, return the original tree. */
5581 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5582 || TREE_CODE (comp_const) != INTEGER_CST
5583 || TREE_OVERFLOW (comp_const)
5584 || TREE_CODE (minmax_const) != INTEGER_CST
5585 || TREE_OVERFLOW (minmax_const))
5588 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5589 and GT_EXPR, doing the rest with recursive calls using logical
5593 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5595 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5598 return invert_truthvalue (tem);
5604 fold_build2 (TRUTH_ORIF_EXPR, type,
5605 optimize_minmax_comparison
5606 (EQ_EXPR, type, arg0, comp_const),
5607 optimize_minmax_comparison
5608 (GT_EXPR, type, arg0, comp_const));
5611 if (op_code == MAX_EXPR && consts_equal)
5612 /* MAX (X, 0) == 0 -> X <= 0 */
5613 return fold_build2 (LE_EXPR, type, inner, comp_const);
5615 else if (op_code == MAX_EXPR && consts_lt)
5616 /* MAX (X, 0) == 5 -> X == 5 */
5617 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5619 else if (op_code == MAX_EXPR)
5620 /* MAX (X, 0) == -1 -> false */
5621 return omit_one_operand (type, integer_zero_node, inner);
5623 else if (consts_equal)
5624 /* MIN (X, 0) == 0 -> X >= 0 */
5625 return fold_build2 (GE_EXPR, type, inner, comp_const);
5628 /* MIN (X, 0) == 5 -> false */
5629 return omit_one_operand (type, integer_zero_node, inner);
5632 /* MIN (X, 0) == -1 -> X == -1 */
5633 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5636 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5637 /* MAX (X, 0) > 0 -> X > 0
5638 MAX (X, 0) > 5 -> X > 5 */
5639 return fold_build2 (GT_EXPR, type, inner, comp_const);
5641 else if (op_code == MAX_EXPR)
5642 /* MAX (X, 0) > -1 -> true */
5643 return omit_one_operand (type, integer_one_node, inner);
5645 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5646 /* MIN (X, 0) > 0 -> false
5647 MIN (X, 0) > 5 -> false */
5648 return omit_one_operand (type, integer_zero_node, inner);
5651 /* MIN (X, 0) > -1 -> X > -1 */
5652 return fold_build2 (GT_EXPR, type, inner, comp_const);
5659 /* T is an integer expression that is being multiplied, divided, or taken a
5660 modulus (CODE says which and what kind of divide or modulus) by a
5661 constant C. See if we can eliminate that operation by folding it with
5662 other operations already in T. WIDE_TYPE, if non-null, is a type that
5663 should be used for the computation if wider than our type.
5665 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5666 (X * 2) + (Y * 4). We must, however, be assured that either the original
5667 expression would not overflow or that overflow is undefined for the type
5668 in the language in question.
5670 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5671 the machine has a multiply-accumulate insn or that this is part of an
5672 addressing calculation.
5674 If we return a non-null expression, it is an equivalent form of the
5675 original computation, but need not be in the original type.
5677 We set *STRICT_OVERFLOW_P to true if the return values depends on
5678 signed overflow being undefined. Otherwise we do not change
5679 *STRICT_OVERFLOW_P. */
5682 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5683 bool *strict_overflow_p)
5685 /* To avoid exponential search depth, refuse to allow recursion past
5686 three levels. Beyond that (1) it's highly unlikely that we'll find
5687 something interesting and (2) we've probably processed it before
5688 when we built the inner expression. */
5697 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5704 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5705 bool *strict_overflow_p)
5707 tree type = TREE_TYPE (t);
5708 enum tree_code tcode = TREE_CODE (t);
5709 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5710 > GET_MODE_SIZE (TYPE_MODE (type)))
5711 ? wide_type : type);
5713 int same_p = tcode == code;
5714 tree op0 = NULL_TREE, op1 = NULL_TREE;
5715 bool sub_strict_overflow_p;
5717 /* Don't deal with constants of zero here; they confuse the code below. */
5718 if (integer_zerop (c))
5721 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5722 op0 = TREE_OPERAND (t, 0);
5724 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5725 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5727 /* Note that we need not handle conditional operations here since fold
5728 already handles those cases. So just do arithmetic here. */
5732 /* For a constant, we can always simplify if we are a multiply
5733 or (for divide and modulus) if it is a multiple of our constant. */
5734 if (code == MULT_EXPR
5735 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5736 return const_binop (code, fold_convert (ctype, t),
5737 fold_convert (ctype, c), 0);
5740 CASE_CONVERT: case NON_LVALUE_EXPR:
5741 /* If op0 is an expression ... */
5742 if ((COMPARISON_CLASS_P (op0)
5743 || UNARY_CLASS_P (op0)
5744 || BINARY_CLASS_P (op0)
5745 || VL_EXP_CLASS_P (op0)
5746 || EXPRESSION_CLASS_P (op0))
5747 /* ... and has wrapping overflow, and its type is smaller
5748 than ctype, then we cannot pass through as widening. */
5749 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5750 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5751 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5752 && (TYPE_PRECISION (ctype)
5753 > TYPE_PRECISION (TREE_TYPE (op0))))
5754 /* ... or this is a truncation (t is narrower than op0),
5755 then we cannot pass through this narrowing. */
5756 || (TYPE_PRECISION (type)
5757 < TYPE_PRECISION (TREE_TYPE (op0)))
5758 /* ... or signedness changes for division or modulus,
5759 then we cannot pass through this conversion. */
5760 || (code != MULT_EXPR
5761 && (TYPE_UNSIGNED (ctype)
5762 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5763 /* ... or has undefined overflow while the converted to
5764 type has not, we cannot do the operation in the inner type
5765 as that would introduce undefined overflow. */
5766 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5767 && !TYPE_OVERFLOW_UNDEFINED (type))))
5770 /* Pass the constant down and see if we can make a simplification. If
5771 we can, replace this expression with the inner simplification for
5772 possible later conversion to our or some other type. */
5773 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5774 && TREE_CODE (t2) == INTEGER_CST
5775 && !TREE_OVERFLOW (t2)
5776 && (0 != (t1 = extract_muldiv (op0, t2, code,
5778 ? ctype : NULL_TREE,
5779 strict_overflow_p))))
5784 /* If widening the type changes it from signed to unsigned, then we
5785 must avoid building ABS_EXPR itself as unsigned. */
5786 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5788 tree cstype = (*signed_type_for) (ctype);
5789 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5792 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5793 return fold_convert (ctype, t1);
5797 /* If the constant is negative, we cannot simplify this. */
5798 if (tree_int_cst_sgn (c) == -1)
5802 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5804 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5807 case MIN_EXPR: case MAX_EXPR:
5808 /* If widening the type changes the signedness, then we can't perform
5809 this optimization as that changes the result. */
5810 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5813 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5814 sub_strict_overflow_p = false;
5815 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5816 &sub_strict_overflow_p)) != 0
5817 && (t2 = extract_muldiv (op1, c, code, wide_type,
5818 &sub_strict_overflow_p)) != 0)
5820 if (tree_int_cst_sgn (c) < 0)
5821 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5822 if (sub_strict_overflow_p)
5823 *strict_overflow_p = true;
5824 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5825 fold_convert (ctype, t2));
5829 case LSHIFT_EXPR: case RSHIFT_EXPR:
5830 /* If the second operand is constant, this is a multiplication
5831 or floor division, by a power of two, so we can treat it that
5832 way unless the multiplier or divisor overflows. Signed
5833 left-shift overflow is implementation-defined rather than
5834 undefined in C90, so do not convert signed left shift into
5836 if (TREE_CODE (op1) == INTEGER_CST
5837 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5838 /* const_binop may not detect overflow correctly,
5839 so check for it explicitly here. */
5840 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5841 && TREE_INT_CST_HIGH (op1) == 0
5842 && 0 != (t1 = fold_convert (ctype,
5843 const_binop (LSHIFT_EXPR,
5846 && !TREE_OVERFLOW (t1))
5847 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5848 ? MULT_EXPR : FLOOR_DIV_EXPR,
5849 ctype, fold_convert (ctype, op0), t1),
5850 c, code, wide_type, strict_overflow_p);
5853 case PLUS_EXPR: case MINUS_EXPR:
5854 /* See if we can eliminate the operation on both sides. If we can, we
5855 can return a new PLUS or MINUS. If we can't, the only remaining
5856 cases where we can do anything are if the second operand is a
5858 sub_strict_overflow_p = false;
5859 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5860 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5861 if (t1 != 0 && t2 != 0
5862 && (code == MULT_EXPR
5863 /* If not multiplication, we can only do this if both operands
5864 are divisible by c. */
5865 || (multiple_of_p (ctype, op0, c)
5866 && multiple_of_p (ctype, op1, c))))
5868 if (sub_strict_overflow_p)
5869 *strict_overflow_p = true;
5870 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5871 fold_convert (ctype, t2));
5874 /* If this was a subtraction, negate OP1 and set it to be an addition.
5875 This simplifies the logic below. */
5876 if (tcode == MINUS_EXPR)
5877 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5879 if (TREE_CODE (op1) != INTEGER_CST)
5882 /* If either OP1 or C are negative, this optimization is not safe for
5883 some of the division and remainder types while for others we need
5884 to change the code. */
5885 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5887 if (code == CEIL_DIV_EXPR)
5888 code = FLOOR_DIV_EXPR;
5889 else if (code == FLOOR_DIV_EXPR)
5890 code = CEIL_DIV_EXPR;
5891 else if (code != MULT_EXPR
5892 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5896 /* If it's a multiply or a division/modulus operation of a multiple
5897 of our constant, do the operation and verify it doesn't overflow. */
5898 if (code == MULT_EXPR
5899 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5901 op1 = const_binop (code, fold_convert (ctype, op1),
5902 fold_convert (ctype, c), 0);
5903 /* We allow the constant to overflow with wrapping semantics. */
5905 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5911 /* If we have an unsigned type is not a sizetype, we cannot widen
5912 the operation since it will change the result if the original
5913 computation overflowed. */
5914 if (TYPE_UNSIGNED (ctype)
5915 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5919 /* If we were able to eliminate our operation from the first side,
5920 apply our operation to the second side and reform the PLUS. */
5921 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5924 /* The last case is if we are a multiply. In that case, we can
5925 apply the distributive law to commute the multiply and addition
5926 if the multiplication of the constants doesn't overflow. */
5927 if (code == MULT_EXPR)
5928 return fold_build2 (tcode, ctype,
5929 fold_build2 (code, ctype,
5930 fold_convert (ctype, op0),
5931 fold_convert (ctype, c)),
5937 /* We have a special case here if we are doing something like
5938 (C * 8) % 4 since we know that's zero. */
5939 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5940 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5941 /* If the multiplication can overflow we cannot optimize this.
5942 ??? Until we can properly mark individual operations as
5943 not overflowing we need to treat sizetype special here as
5944 stor-layout relies on this opimization to make
5945 DECL_FIELD_BIT_OFFSET always a constant. */
5946 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5947 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
5948 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
5949 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5950 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5952 *strict_overflow_p = true;
5953 return omit_one_operand (type, integer_zero_node, op0);
5956 /* ... fall through ... */
5958 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5959 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5960 /* If we can extract our operation from the LHS, do so and return a
5961 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5962 do something only if the second operand is a constant. */
5964 && (t1 = extract_muldiv (op0, c, code, wide_type,
5965 strict_overflow_p)) != 0)
5966 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5967 fold_convert (ctype, op1));
5968 else if (tcode == MULT_EXPR && code == MULT_EXPR
5969 && (t1 = extract_muldiv (op1, c, code, wide_type,
5970 strict_overflow_p)) != 0)
5971 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5972 fold_convert (ctype, t1));
5973 else if (TREE_CODE (op1) != INTEGER_CST)
5976 /* If these are the same operation types, we can associate them
5977 assuming no overflow. */
5979 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
5980 fold_convert (ctype, c), 1))
5981 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
5982 TREE_INT_CST_HIGH (t1),
5983 (TYPE_UNSIGNED (ctype)
5984 && tcode != MULT_EXPR) ? -1 : 1,
5985 TREE_OVERFLOW (t1)))
5986 && !TREE_OVERFLOW (t1))
5987 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5989 /* If these operations "cancel" each other, we have the main
5990 optimizations of this pass, which occur when either constant is a
5991 multiple of the other, in which case we replace this with either an
5992 operation or CODE or TCODE.
5994 If we have an unsigned type that is not a sizetype, we cannot do
5995 this since it will change the result if the original computation
5997 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
5998 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5999 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6000 || (tcode == MULT_EXPR
6001 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6002 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6003 && code != MULT_EXPR)))
6005 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6007 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6008 *strict_overflow_p = true;
6009 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6010 fold_convert (ctype,
6011 const_binop (TRUNC_DIV_EXPR,
6014 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6016 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6017 *strict_overflow_p = true;
6018 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6019 fold_convert (ctype,
6020 const_binop (TRUNC_DIV_EXPR,
6033 /* Return a node which has the indicated constant VALUE (either 0 or
6034 1), and is of the indicated TYPE. */
6037 constant_boolean_node (int value, tree type)
6039 if (type == integer_type_node)
6040 return value ? integer_one_node : integer_zero_node;
6041 else if (type == boolean_type_node)
6042 return value ? boolean_true_node : boolean_false_node;
6044 return build_int_cst (type, value);
6048 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6049 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6050 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6051 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6052 COND is the first argument to CODE; otherwise (as in the example
6053 given here), it is the second argument. TYPE is the type of the
6054 original expression. Return NULL_TREE if no simplification is
6058 fold_binary_op_with_conditional_arg (enum tree_code code,
6059 tree type, tree op0, tree op1,
6060 tree cond, tree arg, int cond_first_p)
6062 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6063 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6064 tree test, true_value, false_value;
6065 tree lhs = NULL_TREE;
6066 tree rhs = NULL_TREE;
6068 /* This transformation is only worthwhile if we don't have to wrap
6069 arg in a SAVE_EXPR, and the operation can be simplified on at least
6070 one of the branches once its pushed inside the COND_EXPR. */
6071 if (!TREE_CONSTANT (arg))
6074 if (TREE_CODE (cond) == COND_EXPR)
6076 test = TREE_OPERAND (cond, 0);
6077 true_value = TREE_OPERAND (cond, 1);
6078 false_value = TREE_OPERAND (cond, 2);
6079 /* If this operand throws an expression, then it does not make
6080 sense to try to perform a logical or arithmetic operation
6082 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6084 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6089 tree testtype = TREE_TYPE (cond);
6091 true_value = constant_boolean_node (true, testtype);
6092 false_value = constant_boolean_node (false, testtype);
6095 arg = fold_convert (arg_type, arg);
6098 true_value = fold_convert (cond_type, true_value);
6100 lhs = fold_build2 (code, type, true_value, arg);
6102 lhs = fold_build2 (code, type, arg, true_value);
6106 false_value = fold_convert (cond_type, false_value);
6108 rhs = fold_build2 (code, type, false_value, arg);
6110 rhs = fold_build2 (code, type, arg, false_value);
6113 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6114 return fold_convert (type, test);
6118 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6120 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6121 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6122 ADDEND is the same as X.
6124 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6125 and finite. The problematic cases are when X is zero, and its mode
6126 has signed zeros. In the case of rounding towards -infinity,
6127 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6128 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6131 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6133 if (!real_zerop (addend))
6136 /* Don't allow the fold with -fsignaling-nans. */
6137 if (HONOR_SNANS (TYPE_MODE (type)))
6140 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6141 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6144 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6145 if (TREE_CODE (addend) == REAL_CST
6146 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6149 /* The mode has signed zeros, and we have to honor their sign.
6150 In this situation, there is only one case we can return true for.
6151 X - 0 is the same as X unless rounding towards -infinity is
6153 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6156 /* Subroutine of fold() that checks comparisons of built-in math
6157 functions against real constants.
6159 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6160 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6161 is the type of the result and ARG0 and ARG1 are the operands of the
6162 comparison. ARG1 must be a TREE_REAL_CST.
6164 The function returns the constant folded tree if a simplification
6165 can be made, and NULL_TREE otherwise. */
6168 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6169 tree type, tree arg0, tree arg1)
6173 if (BUILTIN_SQRT_P (fcode))
6175 tree arg = CALL_EXPR_ARG (arg0, 0);
6176 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6178 c = TREE_REAL_CST (arg1);
6179 if (REAL_VALUE_NEGATIVE (c))
6181 /* sqrt(x) < y is always false, if y is negative. */
6182 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6183 return omit_one_operand (type, integer_zero_node, arg);
6185 /* sqrt(x) > y is always true, if y is negative and we
6186 don't care about NaNs, i.e. negative values of x. */
6187 if (code == NE_EXPR || !HONOR_NANS (mode))
6188 return omit_one_operand (type, integer_one_node, arg);
6190 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6191 return fold_build2 (GE_EXPR, type, arg,
6192 build_real (TREE_TYPE (arg), dconst0));
6194 else if (code == GT_EXPR || code == GE_EXPR)
6198 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6199 real_convert (&c2, mode, &c2);
6201 if (REAL_VALUE_ISINF (c2))
6203 /* sqrt(x) > y is x == +Inf, when y is very large. */
6204 if (HONOR_INFINITIES (mode))
6205 return fold_build2 (EQ_EXPR, type, arg,
6206 build_real (TREE_TYPE (arg), c2));
6208 /* sqrt(x) > y is always false, when y is very large
6209 and we don't care about infinities. */
6210 return omit_one_operand (type, integer_zero_node, arg);
6213 /* sqrt(x) > c is the same as x > c*c. */
6214 return fold_build2 (code, type, arg,
6215 build_real (TREE_TYPE (arg), c2));
6217 else if (code == LT_EXPR || code == LE_EXPR)
6221 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6222 real_convert (&c2, mode, &c2);
6224 if (REAL_VALUE_ISINF (c2))
6226 /* sqrt(x) < y is always true, when y is a very large
6227 value and we don't care about NaNs or Infinities. */
6228 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6229 return omit_one_operand (type, integer_one_node, arg);
6231 /* sqrt(x) < y is x != +Inf when y is very large and we
6232 don't care about NaNs. */
6233 if (! HONOR_NANS (mode))
6234 return fold_build2 (NE_EXPR, type, arg,
6235 build_real (TREE_TYPE (arg), c2));
6237 /* sqrt(x) < y is x >= 0 when y is very large and we
6238 don't care about Infinities. */
6239 if (! HONOR_INFINITIES (mode))
6240 return fold_build2 (GE_EXPR, type, arg,
6241 build_real (TREE_TYPE (arg), dconst0));
6243 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6244 if (lang_hooks.decls.global_bindings_p () != 0
6245 || CONTAINS_PLACEHOLDER_P (arg))
6248 arg = save_expr (arg);
6249 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6250 fold_build2 (GE_EXPR, type, arg,
6251 build_real (TREE_TYPE (arg),
6253 fold_build2 (NE_EXPR, type, arg,
6254 build_real (TREE_TYPE (arg),
6258 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6259 if (! HONOR_NANS (mode))
6260 return fold_build2 (code, type, arg,
6261 build_real (TREE_TYPE (arg), c2));
6263 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6264 if (lang_hooks.decls.global_bindings_p () == 0
6265 && ! CONTAINS_PLACEHOLDER_P (arg))
6267 arg = save_expr (arg);
6268 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6269 fold_build2 (GE_EXPR, type, arg,
6270 build_real (TREE_TYPE (arg),
6272 fold_build2 (code, type, arg,
6273 build_real (TREE_TYPE (arg),
6282 /* Subroutine of fold() that optimizes comparisons against Infinities,
6283 either +Inf or -Inf.
6285 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6286 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6287 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6289 The function returns the constant folded tree if a simplification
6290 can be made, and NULL_TREE otherwise. */
6293 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6295 enum machine_mode mode;
6296 REAL_VALUE_TYPE max;
6300 mode = TYPE_MODE (TREE_TYPE (arg0));
6302 /* For negative infinity swap the sense of the comparison. */
6303 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6305 code = swap_tree_comparison (code);
6310 /* x > +Inf is always false, if with ignore sNANs. */
6311 if (HONOR_SNANS (mode))
6313 return omit_one_operand (type, integer_zero_node, arg0);
6316 /* x <= +Inf is always true, if we don't case about NaNs. */
6317 if (! HONOR_NANS (mode))
6318 return omit_one_operand (type, integer_one_node, arg0);
6320 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6321 if (lang_hooks.decls.global_bindings_p () == 0
6322 && ! CONTAINS_PLACEHOLDER_P (arg0))
6324 arg0 = save_expr (arg0);
6325 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6331 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6332 real_maxval (&max, neg, mode);
6333 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6334 arg0, build_real (TREE_TYPE (arg0), max));
6337 /* x < +Inf is always equal to x <= DBL_MAX. */
6338 real_maxval (&max, neg, mode);
6339 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6340 arg0, build_real (TREE_TYPE (arg0), max));
6343 /* x != +Inf is always equal to !(x > DBL_MAX). */
6344 real_maxval (&max, neg, mode);
6345 if (! HONOR_NANS (mode))
6346 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6347 arg0, build_real (TREE_TYPE (arg0), max));
6349 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6350 arg0, build_real (TREE_TYPE (arg0), max));
6351 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6360 /* Subroutine of fold() that optimizes comparisons of a division by
6361 a nonzero integer constant against an integer constant, i.e.
6364 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6365 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6366 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6368 The function returns the constant folded tree if a simplification
6369 can be made, and NULL_TREE otherwise. */
6372 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6374 tree prod, tmp, hi, lo;
6375 tree arg00 = TREE_OPERAND (arg0, 0);
6376 tree arg01 = TREE_OPERAND (arg0, 1);
6377 unsigned HOST_WIDE_INT lpart;
6378 HOST_WIDE_INT hpart;
6379 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6383 /* We have to do this the hard way to detect unsigned overflow.
6384 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6385 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6386 TREE_INT_CST_HIGH (arg01),
6387 TREE_INT_CST_LOW (arg1),
6388 TREE_INT_CST_HIGH (arg1),
6389 &lpart, &hpart, unsigned_p);
6390 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6392 neg_overflow = false;
6396 tmp = int_const_binop (MINUS_EXPR, arg01,
6397 build_int_cst (TREE_TYPE (arg01), 1), 0);
6400 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6401 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6402 TREE_INT_CST_HIGH (prod),
6403 TREE_INT_CST_LOW (tmp),
6404 TREE_INT_CST_HIGH (tmp),
6405 &lpart, &hpart, unsigned_p);
6406 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6407 -1, overflow | TREE_OVERFLOW (prod));
6409 else if (tree_int_cst_sgn (arg01) >= 0)
6411 tmp = int_const_binop (MINUS_EXPR, arg01,
6412 build_int_cst (TREE_TYPE (arg01), 1), 0);
6413 switch (tree_int_cst_sgn (arg1))
6416 neg_overflow = true;
6417 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6422 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6427 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6437 /* A negative divisor reverses the relational operators. */
6438 code = swap_tree_comparison (code);
6440 tmp = int_const_binop (PLUS_EXPR, arg01,
6441 build_int_cst (TREE_TYPE (arg01), 1), 0);
6442 switch (tree_int_cst_sgn (arg1))
6445 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6450 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6455 neg_overflow = true;
6456 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6468 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6469 return omit_one_operand (type, integer_zero_node, arg00);
6470 if (TREE_OVERFLOW (hi))
6471 return fold_build2 (GE_EXPR, type, arg00, lo);
6472 if (TREE_OVERFLOW (lo))
6473 return fold_build2 (LE_EXPR, type, arg00, hi);
6474 return build_range_check (type, arg00, 1, lo, hi);
6477 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6478 return omit_one_operand (type, integer_one_node, arg00);
6479 if (TREE_OVERFLOW (hi))
6480 return fold_build2 (LT_EXPR, type, arg00, lo);
6481 if (TREE_OVERFLOW (lo))
6482 return fold_build2 (GT_EXPR, type, arg00, hi);
6483 return build_range_check (type, arg00, 0, lo, hi);
6486 if (TREE_OVERFLOW (lo))
6488 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6489 return omit_one_operand (type, tmp, arg00);
6491 return fold_build2 (LT_EXPR, type, arg00, lo);
6494 if (TREE_OVERFLOW (hi))
6496 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6497 return omit_one_operand (type, tmp, arg00);
6499 return fold_build2 (LE_EXPR, type, arg00, hi);
6502 if (TREE_OVERFLOW (hi))
6504 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6505 return omit_one_operand (type, tmp, arg00);
6507 return fold_build2 (GT_EXPR, type, arg00, hi);
6510 if (TREE_OVERFLOW (lo))
6512 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6513 return omit_one_operand (type, tmp, arg00);
6515 return fold_build2 (GE_EXPR, type, arg00, lo);
6525 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6526 equality/inequality test, then return a simplified form of the test
6527 using a sign testing. Otherwise return NULL. TYPE is the desired
6531 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6534 /* If this is testing a single bit, we can optimize the test. */
6535 if ((code == NE_EXPR || code == EQ_EXPR)
6536 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6537 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6539 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6540 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6541 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6543 if (arg00 != NULL_TREE
6544 /* This is only a win if casting to a signed type is cheap,
6545 i.e. when arg00's type is not a partial mode. */
6546 && TYPE_PRECISION (TREE_TYPE (arg00))
6547 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6549 tree stype = signed_type_for (TREE_TYPE (arg00));
6550 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6551 result_type, fold_convert (stype, arg00),
6552 build_int_cst (stype, 0));
6559 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6560 equality/inequality test, then return a simplified form of
6561 the test using shifts and logical operations. Otherwise return
6562 NULL. TYPE is the desired result type. */
6565 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6568 /* If this is testing a single bit, we can optimize the test. */
6569 if ((code == NE_EXPR || code == EQ_EXPR)
6570 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6571 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6573 tree inner = TREE_OPERAND (arg0, 0);
6574 tree type = TREE_TYPE (arg0);
6575 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6576 enum machine_mode operand_mode = TYPE_MODE (type);
6578 tree signed_type, unsigned_type, intermediate_type;
6581 /* First, see if we can fold the single bit test into a sign-bit
6583 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6588 /* Otherwise we have (A & C) != 0 where C is a single bit,
6589 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6590 Similarly for (A & C) == 0. */
6592 /* If INNER is a right shift of a constant and it plus BITNUM does
6593 not overflow, adjust BITNUM and INNER. */
6594 if (TREE_CODE (inner) == RSHIFT_EXPR
6595 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6596 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6597 && bitnum < TYPE_PRECISION (type)
6598 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6599 bitnum - TYPE_PRECISION (type)))
6601 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6602 inner = TREE_OPERAND (inner, 0);
6605 /* If we are going to be able to omit the AND below, we must do our
6606 operations as unsigned. If we must use the AND, we have a choice.
6607 Normally unsigned is faster, but for some machines signed is. */
6608 #ifdef LOAD_EXTEND_OP
6609 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6610 && !flag_syntax_only) ? 0 : 1;
6615 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6616 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6617 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6618 inner = fold_convert (intermediate_type, inner);
6621 inner = build2 (RSHIFT_EXPR, intermediate_type,
6622 inner, size_int (bitnum));
6624 one = build_int_cst (intermediate_type, 1);
6626 if (code == EQ_EXPR)
6627 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6629 /* Put the AND last so it can combine with more things. */
6630 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6632 /* Make sure to return the proper type. */
6633 inner = fold_convert (result_type, inner);
6640 /* Check whether we are allowed to reorder operands arg0 and arg1,
6641 such that the evaluation of arg1 occurs before arg0. */
6644 reorder_operands_p (const_tree arg0, const_tree arg1)
6646 if (! flag_evaluation_order)
6648 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6650 return ! TREE_SIDE_EFFECTS (arg0)
6651 && ! TREE_SIDE_EFFECTS (arg1);
6654 /* Test whether it is preferable two swap two operands, ARG0 and
6655 ARG1, for example because ARG0 is an integer constant and ARG1
6656 isn't. If REORDER is true, only recommend swapping if we can
6657 evaluate the operands in reverse order. */
6660 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6662 STRIP_SIGN_NOPS (arg0);
6663 STRIP_SIGN_NOPS (arg1);
6665 if (TREE_CODE (arg1) == INTEGER_CST)
6667 if (TREE_CODE (arg0) == INTEGER_CST)
6670 if (TREE_CODE (arg1) == REAL_CST)
6672 if (TREE_CODE (arg0) == REAL_CST)
6675 if (TREE_CODE (arg1) == FIXED_CST)
6677 if (TREE_CODE (arg0) == FIXED_CST)
6680 if (TREE_CODE (arg1) == COMPLEX_CST)
6682 if (TREE_CODE (arg0) == COMPLEX_CST)
6685 if (TREE_CONSTANT (arg1))
6687 if (TREE_CONSTANT (arg0))
6690 if (optimize_function_for_size_p (cfun))
6693 if (reorder && flag_evaluation_order
6694 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6697 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6698 for commutative and comparison operators. Ensuring a canonical
6699 form allows the optimizers to find additional redundancies without
6700 having to explicitly check for both orderings. */
6701 if (TREE_CODE (arg0) == SSA_NAME
6702 && TREE_CODE (arg1) == SSA_NAME
6703 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6706 /* Put SSA_NAMEs last. */
6707 if (TREE_CODE (arg1) == SSA_NAME)
6709 if (TREE_CODE (arg0) == SSA_NAME)
6712 /* Put variables last. */
6721 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6722 ARG0 is extended to a wider type. */
6725 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6727 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6729 tree shorter_type, outer_type;
6733 if (arg0_unw == arg0)
6735 shorter_type = TREE_TYPE (arg0_unw);
6737 #ifdef HAVE_canonicalize_funcptr_for_compare
6738 /* Disable this optimization if we're casting a function pointer
6739 type on targets that require function pointer canonicalization. */
6740 if (HAVE_canonicalize_funcptr_for_compare
6741 && TREE_CODE (shorter_type) == POINTER_TYPE
6742 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6746 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6749 arg1_unw = get_unwidened (arg1, NULL_TREE);
6751 /* If possible, express the comparison in the shorter mode. */
6752 if ((code == EQ_EXPR || code == NE_EXPR
6753 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6754 && (TREE_TYPE (arg1_unw) == shorter_type
6755 || ((TYPE_PRECISION (shorter_type)
6756 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6757 && (TYPE_UNSIGNED (shorter_type)
6758 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6759 || (TREE_CODE (arg1_unw) == INTEGER_CST
6760 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6761 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6762 && int_fits_type_p (arg1_unw, shorter_type))))
6763 return fold_build2 (code, type, arg0_unw,
6764 fold_convert (shorter_type, arg1_unw));
6766 if (TREE_CODE (arg1_unw) != INTEGER_CST
6767 || TREE_CODE (shorter_type) != INTEGER_TYPE
6768 || !int_fits_type_p (arg1_unw, shorter_type))
6771 /* If we are comparing with the integer that does not fit into the range
6772 of the shorter type, the result is known. */
6773 outer_type = TREE_TYPE (arg1_unw);
6774 min = lower_bound_in_type (outer_type, shorter_type);
6775 max = upper_bound_in_type (outer_type, shorter_type);
6777 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6779 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6786 return omit_one_operand (type, integer_zero_node, arg0);
6791 return omit_one_operand (type, integer_one_node, arg0);
6797 return omit_one_operand (type, integer_one_node, arg0);
6799 return omit_one_operand (type, integer_zero_node, arg0);
6804 return omit_one_operand (type, integer_zero_node, arg0);
6806 return omit_one_operand (type, integer_one_node, arg0);
6815 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6816 ARG0 just the signedness is changed. */
6819 fold_sign_changed_comparison (enum tree_code code, tree type,
6820 tree arg0, tree arg1)
6823 tree inner_type, outer_type;
6825 if (!CONVERT_EXPR_P (arg0))
6828 outer_type = TREE_TYPE (arg0);
6829 arg0_inner = TREE_OPERAND (arg0, 0);
6830 inner_type = TREE_TYPE (arg0_inner);
6832 #ifdef HAVE_canonicalize_funcptr_for_compare
6833 /* Disable this optimization if we're casting a function pointer
6834 type on targets that require function pointer canonicalization. */
6835 if (HAVE_canonicalize_funcptr_for_compare
6836 && TREE_CODE (inner_type) == POINTER_TYPE
6837 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6841 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6844 /* If the conversion is from an integral subtype to its basetype
6846 if (TREE_TYPE (inner_type) == outer_type)
6849 if (TREE_CODE (arg1) != INTEGER_CST
6850 && !(CONVERT_EXPR_P (arg1)
6851 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6854 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6855 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6860 if (TREE_CODE (arg1) == INTEGER_CST)
6861 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6862 TREE_INT_CST_HIGH (arg1), 0,
6863 TREE_OVERFLOW (arg1));
6865 arg1 = fold_convert (inner_type, arg1);
6867 return fold_build2 (code, type, arg0_inner, arg1);
6870 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6871 step of the array. Reconstructs s and delta in the case of s * delta
6872 being an integer constant (and thus already folded).
6873 ADDR is the address. MULT is the multiplicative expression.
6874 If the function succeeds, the new address expression is returned. Otherwise
6875 NULL_TREE is returned. */
6878 try_move_mult_to_index (tree addr, tree op1)
6880 tree s, delta, step;
6881 tree ref = TREE_OPERAND (addr, 0), pref;
6886 /* Strip the nops that might be added when converting op1 to sizetype. */
6889 /* Canonicalize op1 into a possibly non-constant delta
6890 and an INTEGER_CST s. */
6891 if (TREE_CODE (op1) == MULT_EXPR)
6893 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6898 if (TREE_CODE (arg0) == INTEGER_CST)
6903 else if (TREE_CODE (arg1) == INTEGER_CST)
6911 else if (TREE_CODE (op1) == INTEGER_CST)
6918 /* Simulate we are delta * 1. */
6920 s = integer_one_node;
6923 for (;; ref = TREE_OPERAND (ref, 0))
6925 if (TREE_CODE (ref) == ARRAY_REF)
6927 /* Remember if this was a multi-dimensional array. */
6928 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6931 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6935 step = array_ref_element_size (ref);
6936 if (TREE_CODE (step) != INTEGER_CST)
6941 if (! tree_int_cst_equal (step, s))
6946 /* Try if delta is a multiple of step. */
6947 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6953 /* Only fold here if we can verify we do not overflow one
6954 dimension of a multi-dimensional array. */
6959 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
6960 || !INTEGRAL_TYPE_P (itype)
6961 || !TYPE_MAX_VALUE (itype)
6962 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
6965 tmp = fold_binary (PLUS_EXPR, itype,
6966 fold_convert (itype,
6967 TREE_OPERAND (ref, 1)),
6968 fold_convert (itype, delta));
6970 || TREE_CODE (tmp) != INTEGER_CST
6971 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
6980 if (!handled_component_p (ref))
6984 /* We found the suitable array reference. So copy everything up to it,
6985 and replace the index. */
6987 pref = TREE_OPERAND (addr, 0);
6988 ret = copy_node (pref);
6993 pref = TREE_OPERAND (pref, 0);
6994 TREE_OPERAND (pos, 0) = copy_node (pref);
6995 pos = TREE_OPERAND (pos, 0);
6998 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
6999 fold_convert (itype,
7000 TREE_OPERAND (pos, 1)),
7001 fold_convert (itype, delta));
7003 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7007 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7008 means A >= Y && A != MAX, but in this case we know that
7009 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7012 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7014 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7016 if (TREE_CODE (bound) == LT_EXPR)
7017 a = TREE_OPERAND (bound, 0);
7018 else if (TREE_CODE (bound) == GT_EXPR)
7019 a = TREE_OPERAND (bound, 1);
7023 typea = TREE_TYPE (a);
7024 if (!INTEGRAL_TYPE_P (typea)
7025 && !POINTER_TYPE_P (typea))
7028 if (TREE_CODE (ineq) == LT_EXPR)
7030 a1 = TREE_OPERAND (ineq, 1);
7031 y = TREE_OPERAND (ineq, 0);
7033 else if (TREE_CODE (ineq) == GT_EXPR)
7035 a1 = TREE_OPERAND (ineq, 0);
7036 y = TREE_OPERAND (ineq, 1);
7041 if (TREE_TYPE (a1) != typea)
7044 if (POINTER_TYPE_P (typea))
7046 /* Convert the pointer types into integer before taking the difference. */
7047 tree ta = fold_convert (ssizetype, a);
7048 tree ta1 = fold_convert (ssizetype, a1);
7049 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7052 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7054 if (!diff || !integer_onep (diff))
7057 return fold_build2 (GE_EXPR, type, a, y);
7060 /* Fold a sum or difference of at least one multiplication.
7061 Returns the folded tree or NULL if no simplification could be made. */
7064 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7066 tree arg00, arg01, arg10, arg11;
7067 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7069 /* (A * C) +- (B * C) -> (A+-B) * C.
7070 (A * C) +- A -> A * (C+-1).
7071 We are most concerned about the case where C is a constant,
7072 but other combinations show up during loop reduction. Since
7073 it is not difficult, try all four possibilities. */
7075 if (TREE_CODE (arg0) == MULT_EXPR)
7077 arg00 = TREE_OPERAND (arg0, 0);
7078 arg01 = TREE_OPERAND (arg0, 1);
7080 else if (TREE_CODE (arg0) == INTEGER_CST)
7082 arg00 = build_one_cst (type);
7087 /* We cannot generate constant 1 for fract. */
7088 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7091 arg01 = build_one_cst (type);
7093 if (TREE_CODE (arg1) == MULT_EXPR)
7095 arg10 = TREE_OPERAND (arg1, 0);
7096 arg11 = TREE_OPERAND (arg1, 1);
7098 else if (TREE_CODE (arg1) == INTEGER_CST)
7100 arg10 = build_one_cst (type);
7105 /* We cannot generate constant 1 for fract. */
7106 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7109 arg11 = build_one_cst (type);
7113 if (operand_equal_p (arg01, arg11, 0))
7114 same = arg01, alt0 = arg00, alt1 = arg10;
7115 else if (operand_equal_p (arg00, arg10, 0))
7116 same = arg00, alt0 = arg01, alt1 = arg11;
7117 else if (operand_equal_p (arg00, arg11, 0))
7118 same = arg00, alt0 = arg01, alt1 = arg10;
7119 else if (operand_equal_p (arg01, arg10, 0))
7120 same = arg01, alt0 = arg00, alt1 = arg11;
7122 /* No identical multiplicands; see if we can find a common
7123 power-of-two factor in non-power-of-two multiplies. This
7124 can help in multi-dimensional array access. */
7125 else if (host_integerp (arg01, 0)
7126 && host_integerp (arg11, 0))
7128 HOST_WIDE_INT int01, int11, tmp;
7131 int01 = TREE_INT_CST_LOW (arg01);
7132 int11 = TREE_INT_CST_LOW (arg11);
7134 /* Move min of absolute values to int11. */
7135 if ((int01 >= 0 ? int01 : -int01)
7136 < (int11 >= 0 ? int11 : -int11))
7138 tmp = int01, int01 = int11, int11 = tmp;
7139 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7146 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7148 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7149 build_int_cst (TREE_TYPE (arg00),
7154 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7159 return fold_build2 (MULT_EXPR, type,
7160 fold_build2 (code, type,
7161 fold_convert (type, alt0),
7162 fold_convert (type, alt1)),
7163 fold_convert (type, same));
7168 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7169 specified by EXPR into the buffer PTR of length LEN bytes.
7170 Return the number of bytes placed in the buffer, or zero
7174 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7176 tree type = TREE_TYPE (expr);
7177 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7178 int byte, offset, word, words;
7179 unsigned char value;
7181 if (total_bytes > len)
7183 words = total_bytes / UNITS_PER_WORD;
7185 for (byte = 0; byte < total_bytes; byte++)
7187 int bitpos = byte * BITS_PER_UNIT;
7188 if (bitpos < HOST_BITS_PER_WIDE_INT)
7189 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7191 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7192 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7194 if (total_bytes > UNITS_PER_WORD)
7196 word = byte / UNITS_PER_WORD;
7197 if (WORDS_BIG_ENDIAN)
7198 word = (words - 1) - word;
7199 offset = word * UNITS_PER_WORD;
7200 if (BYTES_BIG_ENDIAN)
7201 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7203 offset += byte % UNITS_PER_WORD;
7206 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7207 ptr[offset] = value;
7213 /* Subroutine of native_encode_expr. Encode the REAL_CST
7214 specified by EXPR into the buffer PTR of length LEN bytes.
7215 Return the number of bytes placed in the buffer, or zero
7219 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7221 tree type = TREE_TYPE (expr);
7222 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7223 int byte, offset, word, words, bitpos;
7224 unsigned char value;
7226 /* There are always 32 bits in each long, no matter the size of
7227 the hosts long. We handle floating point representations with
7231 if (total_bytes > len)
7233 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7235 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7237 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7238 bitpos += BITS_PER_UNIT)
7240 byte = (bitpos / BITS_PER_UNIT) & 3;
7241 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7243 if (UNITS_PER_WORD < 4)
7245 word = byte / UNITS_PER_WORD;
7246 if (WORDS_BIG_ENDIAN)
7247 word = (words - 1) - word;
7248 offset = word * UNITS_PER_WORD;
7249 if (BYTES_BIG_ENDIAN)
7250 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7252 offset += byte % UNITS_PER_WORD;
7255 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7256 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7261 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7262 specified by EXPR into the buffer PTR of length LEN bytes.
7263 Return the number of bytes placed in the buffer, or zero
7267 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7272 part = TREE_REALPART (expr);
7273 rsize = native_encode_expr (part, ptr, len);
7276 part = TREE_IMAGPART (expr);
7277 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7280 return rsize + isize;
7284 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7285 specified by EXPR into the buffer PTR of length LEN bytes.
7286 Return the number of bytes placed in the buffer, or zero
7290 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7292 int i, size, offset, count;
7293 tree itype, elem, elements;
7296 elements = TREE_VECTOR_CST_ELTS (expr);
7297 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7298 itype = TREE_TYPE (TREE_TYPE (expr));
7299 size = GET_MODE_SIZE (TYPE_MODE (itype));
7300 for (i = 0; i < count; i++)
7304 elem = TREE_VALUE (elements);
7305 elements = TREE_CHAIN (elements);
7312 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7317 if (offset + size > len)
7319 memset (ptr+offset, 0, size);
7327 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7328 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7329 buffer PTR of length LEN bytes. Return the number of bytes
7330 placed in the buffer, or zero upon failure. */
7333 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7335 switch (TREE_CODE (expr))
7338 return native_encode_int (expr, ptr, len);
7341 return native_encode_real (expr, ptr, len);
7344 return native_encode_complex (expr, ptr, len);
7347 return native_encode_vector (expr, ptr, len);
7355 /* Subroutine of native_interpret_expr. Interpret the contents of
7356 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7357 If the buffer cannot be interpreted, return NULL_TREE. */
7360 native_interpret_int (tree type, const unsigned char *ptr, int len)
7362 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7363 int byte, offset, word, words;
7364 unsigned char value;
7365 unsigned int HOST_WIDE_INT lo = 0;
7366 HOST_WIDE_INT hi = 0;
7368 if (total_bytes > len)
7370 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7372 words = total_bytes / UNITS_PER_WORD;
7374 for (byte = 0; byte < total_bytes; byte++)
7376 int bitpos = byte * BITS_PER_UNIT;
7377 if (total_bytes > UNITS_PER_WORD)
7379 word = byte / UNITS_PER_WORD;
7380 if (WORDS_BIG_ENDIAN)
7381 word = (words - 1) - word;
7382 offset = word * UNITS_PER_WORD;
7383 if (BYTES_BIG_ENDIAN)
7384 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7386 offset += byte % UNITS_PER_WORD;
7389 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7390 value = ptr[offset];
7392 if (bitpos < HOST_BITS_PER_WIDE_INT)
7393 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7395 hi |= (unsigned HOST_WIDE_INT) value
7396 << (bitpos - HOST_BITS_PER_WIDE_INT);
7399 return build_int_cst_wide_type (type, lo, hi);
7403 /* Subroutine of native_interpret_expr. Interpret the contents of
7404 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7405 If the buffer cannot be interpreted, return NULL_TREE. */
7408 native_interpret_real (tree type, const unsigned char *ptr, int len)
7410 enum machine_mode mode = TYPE_MODE (type);
7411 int total_bytes = GET_MODE_SIZE (mode);
7412 int byte, offset, word, words, bitpos;
7413 unsigned char value;
7414 /* There are always 32 bits in each long, no matter the size of
7415 the hosts long. We handle floating point representations with
7420 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7421 if (total_bytes > len || total_bytes > 24)
7423 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7425 memset (tmp, 0, sizeof (tmp));
7426 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7427 bitpos += BITS_PER_UNIT)
7429 byte = (bitpos / BITS_PER_UNIT) & 3;
7430 if (UNITS_PER_WORD < 4)
7432 word = byte / UNITS_PER_WORD;
7433 if (WORDS_BIG_ENDIAN)
7434 word = (words - 1) - word;
7435 offset = word * UNITS_PER_WORD;
7436 if (BYTES_BIG_ENDIAN)
7437 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7439 offset += byte % UNITS_PER_WORD;
7442 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7443 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7445 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7448 real_from_target (&r, tmp, mode);
7449 return build_real (type, r);
7453 /* Subroutine of native_interpret_expr. Interpret the contents of
7454 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7455 If the buffer cannot be interpreted, return NULL_TREE. */
7458 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7460 tree etype, rpart, ipart;
7463 etype = TREE_TYPE (type);
7464 size = GET_MODE_SIZE (TYPE_MODE (etype));
7467 rpart = native_interpret_expr (etype, ptr, size);
7470 ipart = native_interpret_expr (etype, ptr+size, size);
7473 return build_complex (type, rpart, ipart);
7477 /* Subroutine of native_interpret_expr. Interpret the contents of
7478 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7479 If the buffer cannot be interpreted, return NULL_TREE. */
7482 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7484 tree etype, elem, elements;
7487 etype = TREE_TYPE (type);
7488 size = GET_MODE_SIZE (TYPE_MODE (etype));
7489 count = TYPE_VECTOR_SUBPARTS (type);
7490 if (size * count > len)
7493 elements = NULL_TREE;
7494 for (i = count - 1; i >= 0; i--)
7496 elem = native_interpret_expr (etype, ptr+(i*size), size);
7499 elements = tree_cons (NULL_TREE, elem, elements);
7501 return build_vector (type, elements);
7505 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7506 the buffer PTR of length LEN as a constant of type TYPE. For
7507 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7508 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7509 return NULL_TREE. */
7512 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7514 switch (TREE_CODE (type))
7519 return native_interpret_int (type, ptr, len);
7522 return native_interpret_real (type, ptr, len);
7525 return native_interpret_complex (type, ptr, len);
7528 return native_interpret_vector (type, ptr, len);
7536 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7537 TYPE at compile-time. If we're unable to perform the conversion
7538 return NULL_TREE. */
7541 fold_view_convert_expr (tree type, tree expr)
7543 /* We support up to 512-bit values (for V8DFmode). */
7544 unsigned char buffer[64];
7547 /* Check that the host and target are sane. */
7548 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7551 len = native_encode_expr (expr, buffer, sizeof (buffer));
7555 return native_interpret_expr (type, buffer, len);
7558 /* Build an expression for the address of T. Folds away INDIRECT_REF
7559 to avoid confusing the gimplify process. When IN_FOLD is true
7560 avoid modifications of T. */
7563 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7565 /* The size of the object is not relevant when talking about its address. */
7566 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7567 t = TREE_OPERAND (t, 0);
7569 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7570 if (TREE_CODE (t) == INDIRECT_REF
7571 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7573 t = TREE_OPERAND (t, 0);
7575 if (TREE_TYPE (t) != ptrtype)
7576 t = build1 (NOP_EXPR, ptrtype, t);
7582 while (handled_component_p (base))
7583 base = TREE_OPERAND (base, 0);
7586 TREE_ADDRESSABLE (base) = 1;
7588 t = build1 (ADDR_EXPR, ptrtype, t);
7591 t = build1 (ADDR_EXPR, ptrtype, t);
7596 /* Build an expression for the address of T with type PTRTYPE. This
7597 function modifies the input parameter 'T' by sometimes setting the
7598 TREE_ADDRESSABLE flag. */
7601 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7603 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7606 /* Build an expression for the address of T. This function modifies
7607 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7608 flag. When called from fold functions, use fold_addr_expr instead. */
7611 build_fold_addr_expr (tree t)
7613 return build_fold_addr_expr_with_type_1 (t,
7614 build_pointer_type (TREE_TYPE (t)),
7618 /* Same as build_fold_addr_expr, builds an expression for the address
7619 of T, but avoids touching the input node 't'. Fold functions
7620 should use this version. */
7623 fold_addr_expr (tree t)
7625 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7627 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7630 /* Fold a unary expression of code CODE and type TYPE with operand
7631 OP0. Return the folded expression if folding is successful.
7632 Otherwise, return NULL_TREE. */
7635 fold_unary (enum tree_code code, tree type, tree op0)
7639 enum tree_code_class kind = TREE_CODE_CLASS (code);
7641 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7642 && TREE_CODE_LENGTH (code) == 1);
7647 if (CONVERT_EXPR_CODE_P (code)
7648 || code == FLOAT_EXPR || code == ABS_EXPR)
7650 /* Don't use STRIP_NOPS, because signedness of argument type
7652 STRIP_SIGN_NOPS (arg0);
7656 /* Strip any conversions that don't change the mode. This
7657 is safe for every expression, except for a comparison
7658 expression because its signedness is derived from its
7661 Note that this is done as an internal manipulation within
7662 the constant folder, in order to find the simplest
7663 representation of the arguments so that their form can be
7664 studied. In any cases, the appropriate type conversions
7665 should be put back in the tree that will get out of the
7671 if (TREE_CODE_CLASS (code) == tcc_unary)
7673 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7674 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7675 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7676 else if (TREE_CODE (arg0) == COND_EXPR)
7678 tree arg01 = TREE_OPERAND (arg0, 1);
7679 tree arg02 = TREE_OPERAND (arg0, 2);
7680 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7681 arg01 = fold_build1 (code, type, arg01);
7682 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7683 arg02 = fold_build1 (code, type, arg02);
7684 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7687 /* If this was a conversion, and all we did was to move into
7688 inside the COND_EXPR, bring it back out. But leave it if
7689 it is a conversion from integer to integer and the
7690 result precision is no wider than a word since such a
7691 conversion is cheap and may be optimized away by combine,
7692 while it couldn't if it were outside the COND_EXPR. Then return
7693 so we don't get into an infinite recursion loop taking the
7694 conversion out and then back in. */
7696 if ((CONVERT_EXPR_CODE_P (code)
7697 || code == NON_LVALUE_EXPR)
7698 && TREE_CODE (tem) == COND_EXPR
7699 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7700 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7701 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7702 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7703 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7704 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7705 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7707 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7708 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7709 || flag_syntax_only))
7710 tem = build1 (code, type,
7712 TREE_TYPE (TREE_OPERAND
7713 (TREE_OPERAND (tem, 1), 0)),
7714 TREE_OPERAND (tem, 0),
7715 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7716 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7719 else if (COMPARISON_CLASS_P (arg0))
7721 if (TREE_CODE (type) == BOOLEAN_TYPE)
7723 arg0 = copy_node (arg0);
7724 TREE_TYPE (arg0) = type;
7727 else if (TREE_CODE (type) != INTEGER_TYPE)
7728 return fold_build3 (COND_EXPR, type, arg0,
7729 fold_build1 (code, type,
7731 fold_build1 (code, type,
7732 integer_zero_node));
7739 /* Re-association barriers around constants and other re-association
7740 barriers can be removed. */
7741 if (CONSTANT_CLASS_P (op0)
7742 || TREE_CODE (op0) == PAREN_EXPR)
7743 return fold_convert (type, op0);
7748 case FIX_TRUNC_EXPR:
7749 if (TREE_TYPE (op0) == type)
7752 /* If we have (type) (a CMP b) and type is an integral type, return
7753 new expression involving the new type. */
7754 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7755 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7756 TREE_OPERAND (op0, 1));
7758 /* Handle cases of two conversions in a row. */
7759 if (CONVERT_EXPR_P (op0))
7761 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7762 tree inter_type = TREE_TYPE (op0);
7763 int inside_int = INTEGRAL_TYPE_P (inside_type);
7764 int inside_ptr = POINTER_TYPE_P (inside_type);
7765 int inside_float = FLOAT_TYPE_P (inside_type);
7766 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7767 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7768 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7769 int inter_int = INTEGRAL_TYPE_P (inter_type);
7770 int inter_ptr = POINTER_TYPE_P (inter_type);
7771 int inter_float = FLOAT_TYPE_P (inter_type);
7772 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7773 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7774 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7775 int final_int = INTEGRAL_TYPE_P (type);
7776 int final_ptr = POINTER_TYPE_P (type);
7777 int final_float = FLOAT_TYPE_P (type);
7778 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7779 unsigned int final_prec = TYPE_PRECISION (type);
7780 int final_unsignedp = TYPE_UNSIGNED (type);
7782 /* In addition to the cases of two conversions in a row
7783 handled below, if we are converting something to its own
7784 type via an object of identical or wider precision, neither
7785 conversion is needed. */
7786 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7787 && (((inter_int || inter_ptr) && final_int)
7788 || (inter_float && final_float))
7789 && inter_prec >= final_prec)
7790 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7792 /* Likewise, if the intermediate and final types are either both
7793 float or both integer, we don't need the middle conversion if
7794 it is wider than the final type and doesn't change the signedness
7795 (for integers). Avoid this if the final type is a pointer
7796 since then we sometimes need the inner conversion. Likewise if
7797 the outer has a precision not equal to the size of its mode. */
7798 if (((inter_int && inside_int)
7799 || (inter_float && inside_float)
7800 || (inter_vec && inside_vec))
7801 && inter_prec >= inside_prec
7802 && (inter_float || inter_vec
7803 || inter_unsignedp == inside_unsignedp)
7804 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7805 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7807 && (! final_vec || inter_prec == inside_prec))
7808 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7810 /* If we have a sign-extension of a zero-extended value, we can
7811 replace that by a single zero-extension. */
7812 if (inside_int && inter_int && final_int
7813 && inside_prec < inter_prec && inter_prec < final_prec
7814 && inside_unsignedp && !inter_unsignedp)
7815 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7817 /* Two conversions in a row are not needed unless:
7818 - some conversion is floating-point (overstrict for now), or
7819 - some conversion is a vector (overstrict for now), or
7820 - the intermediate type is narrower than both initial and
7822 - the intermediate type and innermost type differ in signedness,
7823 and the outermost type is wider than the intermediate, or
7824 - the initial type is a pointer type and the precisions of the
7825 intermediate and final types differ, or
7826 - the final type is a pointer type and the precisions of the
7827 initial and intermediate types differ. */
7828 if (! inside_float && ! inter_float && ! final_float
7829 && ! inside_vec && ! inter_vec && ! final_vec
7830 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7831 && ! (inside_int && inter_int
7832 && inter_unsignedp != inside_unsignedp
7833 && inter_prec < final_prec)
7834 && ((inter_unsignedp && inter_prec > inside_prec)
7835 == (final_unsignedp && final_prec > inter_prec))
7836 && ! (inside_ptr && inter_prec != final_prec)
7837 && ! (final_ptr && inside_prec != inter_prec)
7838 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7839 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7840 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7843 /* Handle (T *)&A.B.C for A being of type T and B and C
7844 living at offset zero. This occurs frequently in
7845 C++ upcasting and then accessing the base. */
7846 if (TREE_CODE (op0) == ADDR_EXPR
7847 && POINTER_TYPE_P (type)
7848 && handled_component_p (TREE_OPERAND (op0, 0)))
7850 HOST_WIDE_INT bitsize, bitpos;
7852 enum machine_mode mode;
7853 int unsignedp, volatilep;
7854 tree base = TREE_OPERAND (op0, 0);
7855 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7856 &mode, &unsignedp, &volatilep, false);
7857 /* If the reference was to a (constant) zero offset, we can use
7858 the address of the base if it has the same base type
7859 as the result type. */
7860 if (! offset && bitpos == 0
7861 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7862 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7863 return fold_convert (type, fold_addr_expr (base));
7866 if (TREE_CODE (op0) == MODIFY_EXPR
7867 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7868 /* Detect assigning a bitfield. */
7869 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7871 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7873 /* Don't leave an assignment inside a conversion
7874 unless assigning a bitfield. */
7875 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
7876 /* First do the assignment, then return converted constant. */
7877 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7878 TREE_NO_WARNING (tem) = 1;
7879 TREE_USED (tem) = 1;
7883 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7884 constants (if x has signed type, the sign bit cannot be set
7885 in c). This folds extension into the BIT_AND_EXPR.
7886 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7887 very likely don't have maximal range for their precision and this
7888 transformation effectively doesn't preserve non-maximal ranges. */
7889 if (TREE_CODE (type) == INTEGER_TYPE
7890 && TREE_CODE (op0) == BIT_AND_EXPR
7891 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
7892 /* Not if the conversion is to the sub-type. */
7893 && TREE_TYPE (type) != TREE_TYPE (op0))
7896 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7899 if (TYPE_UNSIGNED (TREE_TYPE (and))
7900 || (TYPE_PRECISION (type)
7901 <= TYPE_PRECISION (TREE_TYPE (and))))
7903 else if (TYPE_PRECISION (TREE_TYPE (and1))
7904 <= HOST_BITS_PER_WIDE_INT
7905 && host_integerp (and1, 1))
7907 unsigned HOST_WIDE_INT cst;
7909 cst = tree_low_cst (and1, 1);
7910 cst &= (HOST_WIDE_INT) -1
7911 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7912 change = (cst == 0);
7913 #ifdef LOAD_EXTEND_OP
7915 && !flag_syntax_only
7916 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7919 tree uns = unsigned_type_for (TREE_TYPE (and0));
7920 and0 = fold_convert (uns, and0);
7921 and1 = fold_convert (uns, and1);
7927 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7928 TREE_INT_CST_HIGH (and1), 0,
7929 TREE_OVERFLOW (and1));
7930 return fold_build2 (BIT_AND_EXPR, type,
7931 fold_convert (type, and0), tem);
7935 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
7936 when one of the new casts will fold away. Conservatively we assume
7937 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
7938 if (POINTER_TYPE_P (type)
7939 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7940 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7941 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7942 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
7944 tree arg00 = TREE_OPERAND (arg0, 0);
7945 tree arg01 = TREE_OPERAND (arg0, 1);
7947 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
7948 fold_convert (sizetype, arg01));
7951 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7952 of the same precision, and X is an integer type not narrower than
7953 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7954 if (INTEGRAL_TYPE_P (type)
7955 && TREE_CODE (op0) == BIT_NOT_EXPR
7956 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7957 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7958 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7960 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7961 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7962 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7963 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7966 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7967 type of X and Y (integer types only). */
7968 if (INTEGRAL_TYPE_P (type)
7969 && TREE_CODE (op0) == MULT_EXPR
7970 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7971 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7973 /* Be careful not to introduce new overflows. */
7975 if (TYPE_OVERFLOW_WRAPS (type))
7978 mult_type = unsigned_type_for (type);
7980 tem = fold_build2 (MULT_EXPR, mult_type,
7981 fold_convert (mult_type, TREE_OPERAND (op0, 0)),
7982 fold_convert (mult_type, TREE_OPERAND (op0, 1)));
7983 return fold_convert (type, tem);
7986 tem = fold_convert_const (code, type, op0);
7987 return tem ? tem : NULL_TREE;
7989 case FIXED_CONVERT_EXPR:
7990 tem = fold_convert_const (code, type, arg0);
7991 return tem ? tem : NULL_TREE;
7993 case VIEW_CONVERT_EXPR:
7994 if (TREE_TYPE (op0) == type)
7996 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7997 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7999 /* For integral conversions with the same precision or pointer
8000 conversions use a NOP_EXPR instead. */
8001 if ((INTEGRAL_TYPE_P (type)
8002 || POINTER_TYPE_P (type))
8003 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8004 || POINTER_TYPE_P (TREE_TYPE (op0)))
8005 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8006 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8007 a sub-type to its base type as generated by the Ada FE. */
8008 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8009 && TREE_TYPE (TREE_TYPE (op0))))
8010 return fold_convert (type, op0);
8012 /* Strip inner integral conversions that do not change the precision. */
8013 if (CONVERT_EXPR_P (op0)
8014 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8015 || POINTER_TYPE_P (TREE_TYPE (op0)))
8016 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8017 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8018 && (TYPE_PRECISION (TREE_TYPE (op0))
8019 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8020 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8022 return fold_view_convert_expr (type, op0);
8025 tem = fold_negate_expr (arg0);
8027 return fold_convert (type, tem);
8031 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8032 return fold_abs_const (arg0, type);
8033 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8034 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8035 /* Convert fabs((double)float) into (double)fabsf(float). */
8036 else if (TREE_CODE (arg0) == NOP_EXPR
8037 && TREE_CODE (type) == REAL_TYPE)
8039 tree targ0 = strip_float_extensions (arg0);
8041 return fold_convert (type, fold_build1 (ABS_EXPR,
8045 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8046 else if (TREE_CODE (arg0) == ABS_EXPR)
8048 else if (tree_expr_nonnegative_p (arg0))
8051 /* Strip sign ops from argument. */
8052 if (TREE_CODE (type) == REAL_TYPE)
8054 tem = fold_strip_sign_ops (arg0);
8056 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8061 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8062 return fold_convert (type, arg0);
8063 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8065 tree itype = TREE_TYPE (type);
8066 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8067 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8068 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8070 if (TREE_CODE (arg0) == COMPLEX_CST)
8072 tree itype = TREE_TYPE (type);
8073 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8074 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8075 return build_complex (type, rpart, negate_expr (ipart));
8077 if (TREE_CODE (arg0) == CONJ_EXPR)
8078 return fold_convert (type, TREE_OPERAND (arg0, 0));
8082 if (TREE_CODE (arg0) == INTEGER_CST)
8083 return fold_not_const (arg0, type);
8084 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8085 return fold_convert (type, TREE_OPERAND (arg0, 0));
8086 /* Convert ~ (-A) to A - 1. */
8087 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8088 return fold_build2 (MINUS_EXPR, type,
8089 fold_convert (type, TREE_OPERAND (arg0, 0)),
8090 build_int_cst (type, 1));
8091 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8092 else if (INTEGRAL_TYPE_P (type)
8093 && ((TREE_CODE (arg0) == MINUS_EXPR
8094 && integer_onep (TREE_OPERAND (arg0, 1)))
8095 || (TREE_CODE (arg0) == PLUS_EXPR
8096 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8097 return fold_build1 (NEGATE_EXPR, type,
8098 fold_convert (type, TREE_OPERAND (arg0, 0)));
8099 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8100 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8101 && (tem = fold_unary (BIT_NOT_EXPR, type,
8103 TREE_OPERAND (arg0, 0)))))
8104 return fold_build2 (BIT_XOR_EXPR, type, tem,
8105 fold_convert (type, TREE_OPERAND (arg0, 1)));
8106 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8107 && (tem = fold_unary (BIT_NOT_EXPR, type,
8109 TREE_OPERAND (arg0, 1)))))
8110 return fold_build2 (BIT_XOR_EXPR, type,
8111 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8112 /* Perform BIT_NOT_EXPR on each element individually. */
8113 else if (TREE_CODE (arg0) == VECTOR_CST)
8115 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8116 int count = TYPE_VECTOR_SUBPARTS (type), i;
8118 for (i = 0; i < count; i++)
8122 elem = TREE_VALUE (elements);
8123 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8124 if (elem == NULL_TREE)
8126 elements = TREE_CHAIN (elements);
8129 elem = build_int_cst (TREE_TYPE (type), -1);
8130 list = tree_cons (NULL_TREE, elem, list);
8133 return build_vector (type, nreverse (list));
8138 case TRUTH_NOT_EXPR:
8139 /* The argument to invert_truthvalue must have Boolean type. */
8140 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8141 arg0 = fold_convert (boolean_type_node, arg0);
8143 /* Note that the operand of this must be an int
8144 and its values must be 0 or 1.
8145 ("true" is a fixed value perhaps depending on the language,
8146 but we don't handle values other than 1 correctly yet.) */
8147 tem = fold_truth_not_expr (arg0);
8150 return fold_convert (type, tem);
8153 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8154 return fold_convert (type, arg0);
8155 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8156 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8157 TREE_OPERAND (arg0, 1));
8158 if (TREE_CODE (arg0) == COMPLEX_CST)
8159 return fold_convert (type, TREE_REALPART (arg0));
8160 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8162 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8163 tem = fold_build2 (TREE_CODE (arg0), itype,
8164 fold_build1 (REALPART_EXPR, itype,
8165 TREE_OPERAND (arg0, 0)),
8166 fold_build1 (REALPART_EXPR, itype,
8167 TREE_OPERAND (arg0, 1)));
8168 return fold_convert (type, tem);
8170 if (TREE_CODE (arg0) == CONJ_EXPR)
8172 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8173 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8174 return fold_convert (type, tem);
8176 if (TREE_CODE (arg0) == CALL_EXPR)
8178 tree fn = get_callee_fndecl (arg0);
8179 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8180 switch (DECL_FUNCTION_CODE (fn))
8182 CASE_FLT_FN (BUILT_IN_CEXPI):
8183 fn = mathfn_built_in (type, BUILT_IN_COS);
8185 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8195 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8196 return fold_convert (type, integer_zero_node);
8197 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8198 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8199 TREE_OPERAND (arg0, 0));
8200 if (TREE_CODE (arg0) == COMPLEX_CST)
8201 return fold_convert (type, TREE_IMAGPART (arg0));
8202 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8204 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8205 tem = fold_build2 (TREE_CODE (arg0), itype,
8206 fold_build1 (IMAGPART_EXPR, itype,
8207 TREE_OPERAND (arg0, 0)),
8208 fold_build1 (IMAGPART_EXPR, itype,
8209 TREE_OPERAND (arg0, 1)));
8210 return fold_convert (type, tem);
8212 if (TREE_CODE (arg0) == CONJ_EXPR)
8214 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8215 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8216 return fold_convert (type, negate_expr (tem));
8218 if (TREE_CODE (arg0) == CALL_EXPR)
8220 tree fn = get_callee_fndecl (arg0);
8221 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8222 switch (DECL_FUNCTION_CODE (fn))
8224 CASE_FLT_FN (BUILT_IN_CEXPI):
8225 fn = mathfn_built_in (type, BUILT_IN_SIN);
8227 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8238 } /* switch (code) */
8241 /* Fold a binary expression of code CODE and type TYPE with operands
8242 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8243 Return the folded expression if folding is successful. Otherwise,
8244 return NULL_TREE. */
8247 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8249 enum tree_code compl_code;
8251 if (code == MIN_EXPR)
8252 compl_code = MAX_EXPR;
8253 else if (code == MAX_EXPR)
8254 compl_code = MIN_EXPR;
8258 /* MIN (MAX (a, b), b) == b. */
8259 if (TREE_CODE (op0) == compl_code
8260 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8261 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8263 /* MIN (MAX (b, a), b) == b. */
8264 if (TREE_CODE (op0) == compl_code
8265 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8266 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8267 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8269 /* MIN (a, MAX (a, b)) == a. */
8270 if (TREE_CODE (op1) == compl_code
8271 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8272 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8273 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8275 /* MIN (a, MAX (b, a)) == a. */
8276 if (TREE_CODE (op1) == compl_code
8277 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8278 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8279 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8284 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8285 by changing CODE to reduce the magnitude of constants involved in
8286 ARG0 of the comparison.
8287 Returns a canonicalized comparison tree if a simplification was
8288 possible, otherwise returns NULL_TREE.
8289 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8290 valid if signed overflow is undefined. */
8293 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8294 tree arg0, tree arg1,
8295 bool *strict_overflow_p)
8297 enum tree_code code0 = TREE_CODE (arg0);
8298 tree t, cst0 = NULL_TREE;
8302 /* Match A +- CST code arg1 and CST code arg1. We can change the
8303 first form only if overflow is undefined. */
8304 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8305 /* In principle pointers also have undefined overflow behavior,
8306 but that causes problems elsewhere. */
8307 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8308 && (code0 == MINUS_EXPR
8309 || code0 == PLUS_EXPR)
8310 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8311 || code0 == INTEGER_CST))
8314 /* Identify the constant in arg0 and its sign. */
8315 if (code0 == INTEGER_CST)
8318 cst0 = TREE_OPERAND (arg0, 1);
8319 sgn0 = tree_int_cst_sgn (cst0);
8321 /* Overflowed constants and zero will cause problems. */
8322 if (integer_zerop (cst0)
8323 || TREE_OVERFLOW (cst0))
8326 /* See if we can reduce the magnitude of the constant in
8327 arg0 by changing the comparison code. */
8328 if (code0 == INTEGER_CST)
8330 /* CST <= arg1 -> CST-1 < arg1. */
8331 if (code == LE_EXPR && sgn0 == 1)
8333 /* -CST < arg1 -> -CST-1 <= arg1. */
8334 else if (code == LT_EXPR && sgn0 == -1)
8336 /* CST > arg1 -> CST-1 >= arg1. */
8337 else if (code == GT_EXPR && sgn0 == 1)
8339 /* -CST >= arg1 -> -CST-1 > arg1. */
8340 else if (code == GE_EXPR && sgn0 == -1)
8344 /* arg1 code' CST' might be more canonical. */
8349 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8351 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8353 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8354 else if (code == GT_EXPR
8355 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8357 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8358 else if (code == LE_EXPR
8359 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8361 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8362 else if (code == GE_EXPR
8363 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8367 *strict_overflow_p = true;
8370 /* Now build the constant reduced in magnitude. But not if that
8371 would produce one outside of its types range. */
8372 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8374 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8375 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8377 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8378 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8379 /* We cannot swap the comparison here as that would cause us to
8380 endlessly recurse. */
8383 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8384 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8385 if (code0 != INTEGER_CST)
8386 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8388 /* If swapping might yield to a more canonical form, do so. */
8390 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8392 return fold_build2 (code, type, t, arg1);
8395 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8396 overflow further. Try to decrease the magnitude of constants involved
8397 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8398 and put sole constants at the second argument position.
8399 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8402 maybe_canonicalize_comparison (enum tree_code code, tree type,
8403 tree arg0, tree arg1)
8406 bool strict_overflow_p;
8407 const char * const warnmsg = G_("assuming signed overflow does not occur "
8408 "when reducing constant in comparison");
8410 /* Try canonicalization by simplifying arg0. */
8411 strict_overflow_p = false;
8412 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8413 &strict_overflow_p);
8416 if (strict_overflow_p)
8417 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8421 /* Try canonicalization by simplifying arg1 using the swapped
8423 code = swap_tree_comparison (code);
8424 strict_overflow_p = false;
8425 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8426 &strict_overflow_p);
8427 if (t && strict_overflow_p)
8428 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8432 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8433 space. This is used to avoid issuing overflow warnings for
8434 expressions like &p->x which can not wrap. */
8437 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8439 unsigned HOST_WIDE_INT offset_low, total_low;
8440 HOST_WIDE_INT size, offset_high, total_high;
8442 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8448 if (offset == NULL_TREE)
8453 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8457 offset_low = TREE_INT_CST_LOW (offset);
8458 offset_high = TREE_INT_CST_HIGH (offset);
8461 if (add_double_with_sign (offset_low, offset_high,
8462 bitpos / BITS_PER_UNIT, 0,
8463 &total_low, &total_high,
8467 if (total_high != 0)
8470 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8474 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8476 if (TREE_CODE (base) == ADDR_EXPR)
8478 HOST_WIDE_INT base_size;
8480 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8481 if (base_size > 0 && size < base_size)
8485 return total_low > (unsigned HOST_WIDE_INT) size;
8488 /* Subroutine of fold_binary. This routine performs all of the
8489 transformations that are common to the equality/inequality
8490 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8491 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8492 fold_binary should call fold_binary. Fold a comparison with
8493 tree code CODE and type TYPE with operands OP0 and OP1. Return
8494 the folded comparison or NULL_TREE. */
8497 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8499 tree arg0, arg1, tem;
8504 STRIP_SIGN_NOPS (arg0);
8505 STRIP_SIGN_NOPS (arg1);
8507 tem = fold_relational_const (code, type, arg0, arg1);
8508 if (tem != NULL_TREE)
8511 /* If one arg is a real or integer constant, put it last. */
8512 if (tree_swap_operands_p (arg0, arg1, true))
8513 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8515 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8516 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8517 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8518 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8519 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8520 && (TREE_CODE (arg1) == INTEGER_CST
8521 && !TREE_OVERFLOW (arg1)))
8523 tree const1 = TREE_OPERAND (arg0, 1);
8525 tree variable = TREE_OPERAND (arg0, 0);
8528 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8530 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8531 TREE_TYPE (arg1), const2, const1);
8533 /* If the constant operation overflowed this can be
8534 simplified as a comparison against INT_MAX/INT_MIN. */
8535 if (TREE_CODE (lhs) == INTEGER_CST
8536 && TREE_OVERFLOW (lhs))
8538 int const1_sgn = tree_int_cst_sgn (const1);
8539 enum tree_code code2 = code;
8541 /* Get the sign of the constant on the lhs if the
8542 operation were VARIABLE + CONST1. */
8543 if (TREE_CODE (arg0) == MINUS_EXPR)
8544 const1_sgn = -const1_sgn;
8546 /* The sign of the constant determines if we overflowed
8547 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8548 Canonicalize to the INT_MIN overflow by swapping the comparison
8550 if (const1_sgn == -1)
8551 code2 = swap_tree_comparison (code);
8553 /* We now can look at the canonicalized case
8554 VARIABLE + 1 CODE2 INT_MIN
8555 and decide on the result. */
8556 if (code2 == LT_EXPR
8558 || code2 == EQ_EXPR)
8559 return omit_one_operand (type, boolean_false_node, variable);
8560 else if (code2 == NE_EXPR
8562 || code2 == GT_EXPR)
8563 return omit_one_operand (type, boolean_true_node, variable);
8566 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8567 && (TREE_CODE (lhs) != INTEGER_CST
8568 || !TREE_OVERFLOW (lhs)))
8570 fold_overflow_warning (("assuming signed overflow does not occur "
8571 "when changing X +- C1 cmp C2 to "
8573 WARN_STRICT_OVERFLOW_COMPARISON);
8574 return fold_build2 (code, type, variable, lhs);
8578 /* For comparisons of pointers we can decompose it to a compile time
8579 comparison of the base objects and the offsets into the object.
8580 This requires at least one operand being an ADDR_EXPR or a
8581 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8582 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8583 && (TREE_CODE (arg0) == ADDR_EXPR
8584 || TREE_CODE (arg1) == ADDR_EXPR
8585 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8586 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8588 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8589 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8590 enum machine_mode mode;
8591 int volatilep, unsignedp;
8592 bool indirect_base0 = false, indirect_base1 = false;
8594 /* Get base and offset for the access. Strip ADDR_EXPR for
8595 get_inner_reference, but put it back by stripping INDIRECT_REF
8596 off the base object if possible. indirect_baseN will be true
8597 if baseN is not an address but refers to the object itself. */
8599 if (TREE_CODE (arg0) == ADDR_EXPR)
8601 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8602 &bitsize, &bitpos0, &offset0, &mode,
8603 &unsignedp, &volatilep, false);
8604 if (TREE_CODE (base0) == INDIRECT_REF)
8605 base0 = TREE_OPERAND (base0, 0);
8607 indirect_base0 = true;
8609 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8611 base0 = TREE_OPERAND (arg0, 0);
8612 offset0 = TREE_OPERAND (arg0, 1);
8616 if (TREE_CODE (arg1) == ADDR_EXPR)
8618 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8619 &bitsize, &bitpos1, &offset1, &mode,
8620 &unsignedp, &volatilep, false);
8621 if (TREE_CODE (base1) == INDIRECT_REF)
8622 base1 = TREE_OPERAND (base1, 0);
8624 indirect_base1 = true;
8626 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8628 base1 = TREE_OPERAND (arg1, 0);
8629 offset1 = TREE_OPERAND (arg1, 1);
8632 /* If we have equivalent bases we might be able to simplify. */
8633 if (indirect_base0 == indirect_base1
8634 && operand_equal_p (base0, base1, 0))
8636 /* We can fold this expression to a constant if the non-constant
8637 offset parts are equal. */
8638 if ((offset0 == offset1
8639 || (offset0 && offset1
8640 && operand_equal_p (offset0, offset1, 0)))
8643 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8648 && bitpos0 != bitpos1
8649 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8650 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8651 fold_overflow_warning (("assuming pointer wraparound does not "
8652 "occur when comparing P +- C1 with "
8654 WARN_STRICT_OVERFLOW_CONDITIONAL);
8659 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8661 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8663 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8665 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8667 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8669 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8673 /* We can simplify the comparison to a comparison of the variable
8674 offset parts if the constant offset parts are equal.
8675 Be careful to use signed size type here because otherwise we
8676 mess with array offsets in the wrong way. This is possible
8677 because pointer arithmetic is restricted to retain within an
8678 object and overflow on pointer differences is undefined as of
8679 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8680 else if (bitpos0 == bitpos1
8681 && ((code == EQ_EXPR || code == NE_EXPR)
8682 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8684 tree signed_size_type_node;
8685 signed_size_type_node = signed_type_for (size_type_node);
8687 /* By converting to signed size type we cover middle-end pointer
8688 arithmetic which operates on unsigned pointer types of size
8689 type size and ARRAY_REF offsets which are properly sign or
8690 zero extended from their type in case it is narrower than
8692 if (offset0 == NULL_TREE)
8693 offset0 = build_int_cst (signed_size_type_node, 0);
8695 offset0 = fold_convert (signed_size_type_node, offset0);
8696 if (offset1 == NULL_TREE)
8697 offset1 = build_int_cst (signed_size_type_node, 0);
8699 offset1 = fold_convert (signed_size_type_node, offset1);
8703 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8704 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8705 fold_overflow_warning (("assuming pointer wraparound does not "
8706 "occur when comparing P +- C1 with "
8708 WARN_STRICT_OVERFLOW_COMPARISON);
8710 return fold_build2 (code, type, offset0, offset1);
8713 /* For non-equal bases we can simplify if they are addresses
8714 of local binding decls or constants. */
8715 else if (indirect_base0 && indirect_base1
8716 /* We know that !operand_equal_p (base0, base1, 0)
8717 because the if condition was false. But make
8718 sure two decls are not the same. */
8720 && TREE_CODE (arg0) == ADDR_EXPR
8721 && TREE_CODE (arg1) == ADDR_EXPR
8722 && (((TREE_CODE (base0) == VAR_DECL
8723 || TREE_CODE (base0) == PARM_DECL)
8724 && (targetm.binds_local_p (base0)
8725 || CONSTANT_CLASS_P (base1)))
8726 || CONSTANT_CLASS_P (base0))
8727 && (((TREE_CODE (base1) == VAR_DECL
8728 || TREE_CODE (base1) == PARM_DECL)
8729 && (targetm.binds_local_p (base1)
8730 || CONSTANT_CLASS_P (base0)))
8731 || CONSTANT_CLASS_P (base1)))
8733 if (code == EQ_EXPR)
8734 return omit_two_operands (type, boolean_false_node, arg0, arg1);
8735 else if (code == NE_EXPR)
8736 return omit_two_operands (type, boolean_true_node, arg0, arg1);
8738 /* For equal offsets we can simplify to a comparison of the
8740 else if (bitpos0 == bitpos1
8742 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8744 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8745 && ((offset0 == offset1)
8746 || (offset0 && offset1
8747 && operand_equal_p (offset0, offset1, 0))))
8750 base0 = fold_addr_expr (base0);
8752 base1 = fold_addr_expr (base1);
8753 return fold_build2 (code, type, base0, base1);
8757 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8758 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8759 the resulting offset is smaller in absolute value than the
8761 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8762 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8763 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8764 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8765 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8766 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8767 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8769 tree const1 = TREE_OPERAND (arg0, 1);
8770 tree const2 = TREE_OPERAND (arg1, 1);
8771 tree variable1 = TREE_OPERAND (arg0, 0);
8772 tree variable2 = TREE_OPERAND (arg1, 0);
8774 const char * const warnmsg = G_("assuming signed overflow does not "
8775 "occur when combining constants around "
8778 /* Put the constant on the side where it doesn't overflow and is
8779 of lower absolute value than before. */
8780 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8781 ? MINUS_EXPR : PLUS_EXPR,
8783 if (!TREE_OVERFLOW (cst)
8784 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8786 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8787 return fold_build2 (code, type,
8789 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8793 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8794 ? MINUS_EXPR : PLUS_EXPR,
8796 if (!TREE_OVERFLOW (cst)
8797 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8799 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8800 return fold_build2 (code, type,
8801 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8807 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8808 signed arithmetic case. That form is created by the compiler
8809 often enough for folding it to be of value. One example is in
8810 computing loop trip counts after Operator Strength Reduction. */
8811 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8812 && TREE_CODE (arg0) == MULT_EXPR
8813 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8814 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8815 && integer_zerop (arg1))
8817 tree const1 = TREE_OPERAND (arg0, 1);
8818 tree const2 = arg1; /* zero */
8819 tree variable1 = TREE_OPERAND (arg0, 0);
8820 enum tree_code cmp_code = code;
8822 gcc_assert (!integer_zerop (const1));
8824 fold_overflow_warning (("assuming signed overflow does not occur when "
8825 "eliminating multiplication in comparison "
8827 WARN_STRICT_OVERFLOW_COMPARISON);
8829 /* If const1 is negative we swap the sense of the comparison. */
8830 if (tree_int_cst_sgn (const1) < 0)
8831 cmp_code = swap_tree_comparison (cmp_code);
8833 return fold_build2 (cmp_code, type, variable1, const2);
8836 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8840 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8842 tree targ0 = strip_float_extensions (arg0);
8843 tree targ1 = strip_float_extensions (arg1);
8844 tree newtype = TREE_TYPE (targ0);
8846 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8847 newtype = TREE_TYPE (targ1);
8849 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8850 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8851 return fold_build2 (code, type, fold_convert (newtype, targ0),
8852 fold_convert (newtype, targ1));
8854 /* (-a) CMP (-b) -> b CMP a */
8855 if (TREE_CODE (arg0) == NEGATE_EXPR
8856 && TREE_CODE (arg1) == NEGATE_EXPR)
8857 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8858 TREE_OPERAND (arg0, 0));
8860 if (TREE_CODE (arg1) == REAL_CST)
8862 REAL_VALUE_TYPE cst;
8863 cst = TREE_REAL_CST (arg1);
8865 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8866 if (TREE_CODE (arg0) == NEGATE_EXPR)
8867 return fold_build2 (swap_tree_comparison (code), type,
8868 TREE_OPERAND (arg0, 0),
8869 build_real (TREE_TYPE (arg1),
8870 REAL_VALUE_NEGATE (cst)));
8872 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8873 /* a CMP (-0) -> a CMP 0 */
8874 if (REAL_VALUE_MINUS_ZERO (cst))
8875 return fold_build2 (code, type, arg0,
8876 build_real (TREE_TYPE (arg1), dconst0));
8878 /* x != NaN is always true, other ops are always false. */
8879 if (REAL_VALUE_ISNAN (cst)
8880 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8882 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8883 return omit_one_operand (type, tem, arg0);
8886 /* Fold comparisons against infinity. */
8887 if (REAL_VALUE_ISINF (cst))
8889 tem = fold_inf_compare (code, type, arg0, arg1);
8890 if (tem != NULL_TREE)
8895 /* If this is a comparison of a real constant with a PLUS_EXPR
8896 or a MINUS_EXPR of a real constant, we can convert it into a
8897 comparison with a revised real constant as long as no overflow
8898 occurs when unsafe_math_optimizations are enabled. */
8899 if (flag_unsafe_math_optimizations
8900 && TREE_CODE (arg1) == REAL_CST
8901 && (TREE_CODE (arg0) == PLUS_EXPR
8902 || TREE_CODE (arg0) == MINUS_EXPR)
8903 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8904 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8905 ? MINUS_EXPR : PLUS_EXPR,
8906 arg1, TREE_OPERAND (arg0, 1), 0))
8907 && !TREE_OVERFLOW (tem))
8908 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8910 /* Likewise, we can simplify a comparison of a real constant with
8911 a MINUS_EXPR whose first operand is also a real constant, i.e.
8912 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
8913 floating-point types only if -fassociative-math is set. */
8914 if (flag_associative_math
8915 && TREE_CODE (arg1) == REAL_CST
8916 && TREE_CODE (arg0) == MINUS_EXPR
8917 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8918 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8920 && !TREE_OVERFLOW (tem))
8921 return fold_build2 (swap_tree_comparison (code), type,
8922 TREE_OPERAND (arg0, 1), tem);
8924 /* Fold comparisons against built-in math functions. */
8925 if (TREE_CODE (arg1) == REAL_CST
8926 && flag_unsafe_math_optimizations
8927 && ! flag_errno_math)
8929 enum built_in_function fcode = builtin_mathfn_code (arg0);
8931 if (fcode != END_BUILTINS)
8933 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8934 if (tem != NULL_TREE)
8940 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8941 && CONVERT_EXPR_P (arg0))
8943 /* If we are widening one operand of an integer comparison,
8944 see if the other operand is similarly being widened. Perhaps we
8945 can do the comparison in the narrower type. */
8946 tem = fold_widened_comparison (code, type, arg0, arg1);
8950 /* Or if we are changing signedness. */
8951 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8956 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8957 constant, we can simplify it. */
8958 if (TREE_CODE (arg1) == INTEGER_CST
8959 && (TREE_CODE (arg0) == MIN_EXPR
8960 || TREE_CODE (arg0) == MAX_EXPR)
8961 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8963 tem = optimize_minmax_comparison (code, type, op0, op1);
8968 /* Simplify comparison of something with itself. (For IEEE
8969 floating-point, we can only do some of these simplifications.) */
8970 if (operand_equal_p (arg0, arg1, 0))
8975 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8976 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8977 return constant_boolean_node (1, type);
8982 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8983 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8984 return constant_boolean_node (1, type);
8985 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8988 /* For NE, we can only do this simplification if integer
8989 or we don't honor IEEE floating point NaNs. */
8990 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8991 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8993 /* ... fall through ... */
8996 return constant_boolean_node (0, type);
9002 /* If we are comparing an expression that just has comparisons
9003 of two integer values, arithmetic expressions of those comparisons,
9004 and constants, we can simplify it. There are only three cases
9005 to check: the two values can either be equal, the first can be
9006 greater, or the second can be greater. Fold the expression for
9007 those three values. Since each value must be 0 or 1, we have
9008 eight possibilities, each of which corresponds to the constant 0
9009 or 1 or one of the six possible comparisons.
9011 This handles common cases like (a > b) == 0 but also handles
9012 expressions like ((x > y) - (y > x)) > 0, which supposedly
9013 occur in macroized code. */
9015 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9017 tree cval1 = 0, cval2 = 0;
9020 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9021 /* Don't handle degenerate cases here; they should already
9022 have been handled anyway. */
9023 && cval1 != 0 && cval2 != 0
9024 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9025 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9026 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9027 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9028 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9029 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9030 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9032 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9033 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9035 /* We can't just pass T to eval_subst in case cval1 or cval2
9036 was the same as ARG1. */
9039 = fold_build2 (code, type,
9040 eval_subst (arg0, cval1, maxval,
9044 = fold_build2 (code, type,
9045 eval_subst (arg0, cval1, maxval,
9049 = fold_build2 (code, type,
9050 eval_subst (arg0, cval1, minval,
9054 /* All three of these results should be 0 or 1. Confirm they are.
9055 Then use those values to select the proper code to use. */
9057 if (TREE_CODE (high_result) == INTEGER_CST
9058 && TREE_CODE (equal_result) == INTEGER_CST
9059 && TREE_CODE (low_result) == INTEGER_CST)
9061 /* Make a 3-bit mask with the high-order bit being the
9062 value for `>', the next for '=', and the low for '<'. */
9063 switch ((integer_onep (high_result) * 4)
9064 + (integer_onep (equal_result) * 2)
9065 + integer_onep (low_result))
9069 return omit_one_operand (type, integer_zero_node, arg0);
9090 return omit_one_operand (type, integer_one_node, arg0);
9094 return save_expr (build2 (code, type, cval1, cval2));
9095 return fold_build2 (code, type, cval1, cval2);
9100 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9101 into a single range test. */
9102 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9103 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9104 && TREE_CODE (arg1) == INTEGER_CST
9105 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9106 && !integer_zerop (TREE_OPERAND (arg0, 1))
9107 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9108 && !TREE_OVERFLOW (arg1))
9110 tem = fold_div_compare (code, type, arg0, arg1);
9111 if (tem != NULL_TREE)
9115 /* Fold ~X op ~Y as Y op X. */
9116 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9117 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9119 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9120 return fold_build2 (code, type,
9121 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9122 TREE_OPERAND (arg0, 0));
9125 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9126 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9127 && TREE_CODE (arg1) == INTEGER_CST)
9129 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9130 return fold_build2 (swap_tree_comparison (code), type,
9131 TREE_OPERAND (arg0, 0),
9132 fold_build1 (BIT_NOT_EXPR, cmp_type,
9133 fold_convert (cmp_type, arg1)));
9140 /* Subroutine of fold_binary. Optimize complex multiplications of the
9141 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9142 argument EXPR represents the expression "z" of type TYPE. */
9145 fold_mult_zconjz (tree type, tree expr)
9147 tree itype = TREE_TYPE (type);
9148 tree rpart, ipart, tem;
9150 if (TREE_CODE (expr) == COMPLEX_EXPR)
9152 rpart = TREE_OPERAND (expr, 0);
9153 ipart = TREE_OPERAND (expr, 1);
9155 else if (TREE_CODE (expr) == COMPLEX_CST)
9157 rpart = TREE_REALPART (expr);
9158 ipart = TREE_IMAGPART (expr);
9162 expr = save_expr (expr);
9163 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9164 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9167 rpart = save_expr (rpart);
9168 ipart = save_expr (ipart);
9169 tem = fold_build2 (PLUS_EXPR, itype,
9170 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9171 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9172 return fold_build2 (COMPLEX_EXPR, type, tem,
9173 fold_convert (itype, integer_zero_node));
9177 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9178 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9179 guarantees that P and N have the same least significant log2(M) bits.
9180 N is not otherwise constrained. In particular, N is not normalized to
9181 0 <= N < M as is common. In general, the precise value of P is unknown.
9182 M is chosen as large as possible such that constant N can be determined.
9184 Returns M and sets *RESIDUE to N. */
9186 static unsigned HOST_WIDE_INT
9187 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9189 enum tree_code code;
9193 code = TREE_CODE (expr);
9194 if (code == ADDR_EXPR)
9196 expr = TREE_OPERAND (expr, 0);
9197 if (handled_component_p (expr))
9199 HOST_WIDE_INT bitsize, bitpos;
9201 enum machine_mode mode;
9202 int unsignedp, volatilep;
9204 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9205 &mode, &unsignedp, &volatilep, false);
9206 *residue = bitpos / BITS_PER_UNIT;
9209 if (TREE_CODE (offset) == INTEGER_CST)
9210 *residue += TREE_INT_CST_LOW (offset);
9212 /* We don't handle more complicated offset expressions. */
9217 if (DECL_P (expr) && TREE_CODE (expr) != FUNCTION_DECL)
9218 return DECL_ALIGN_UNIT (expr);
9220 else if (code == POINTER_PLUS_EXPR)
9223 unsigned HOST_WIDE_INT modulus;
9224 enum tree_code inner_code;
9226 op0 = TREE_OPERAND (expr, 0);
9228 modulus = get_pointer_modulus_and_residue (op0, residue);
9230 op1 = TREE_OPERAND (expr, 1);
9232 inner_code = TREE_CODE (op1);
9233 if (inner_code == INTEGER_CST)
9235 *residue += TREE_INT_CST_LOW (op1);
9238 else if (inner_code == MULT_EXPR)
9240 op1 = TREE_OPERAND (op1, 1);
9241 if (TREE_CODE (op1) == INTEGER_CST)
9243 unsigned HOST_WIDE_INT align;
9245 /* Compute the greatest power-of-2 divisor of op1. */
9246 align = TREE_INT_CST_LOW (op1);
9249 /* If align is non-zero and less than *modulus, replace
9250 *modulus with align., If align is 0, then either op1 is 0
9251 or the greatest power-of-2 divisor of op1 doesn't fit in an
9252 unsigned HOST_WIDE_INT. In either case, no additional
9253 constraint is imposed. */
9255 modulus = MIN (modulus, align);
9262 /* If we get here, we were unable to determine anything useful about the
9268 /* Fold a binary expression of code CODE and type TYPE with operands
9269 OP0 and OP1. Return the folded expression if folding is
9270 successful. Otherwise, return NULL_TREE. */
9273 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9275 enum tree_code_class kind = TREE_CODE_CLASS (code);
9276 tree arg0, arg1, tem;
9277 tree t1 = NULL_TREE;
9278 bool strict_overflow_p;
9280 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9281 && TREE_CODE_LENGTH (code) == 2
9283 && op1 != NULL_TREE);
9288 /* Strip any conversions that don't change the mode. This is
9289 safe for every expression, except for a comparison expression
9290 because its signedness is derived from its operands. So, in
9291 the latter case, only strip conversions that don't change the
9292 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9295 Note that this is done as an internal manipulation within the
9296 constant folder, in order to find the simplest representation
9297 of the arguments so that their form can be studied. In any
9298 cases, the appropriate type conversions should be put back in
9299 the tree that will get out of the constant folder. */
9301 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9303 STRIP_SIGN_NOPS (arg0);
9304 STRIP_SIGN_NOPS (arg1);
9312 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9313 constant but we can't do arithmetic on them. */
9314 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9315 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9316 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9317 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9318 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9319 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9321 if (kind == tcc_binary)
9323 /* Make sure type and arg0 have the same saturating flag. */
9324 gcc_assert (TYPE_SATURATING (type)
9325 == TYPE_SATURATING (TREE_TYPE (arg0)));
9326 tem = const_binop (code, arg0, arg1, 0);
9328 else if (kind == tcc_comparison)
9329 tem = fold_relational_const (code, type, arg0, arg1);
9333 if (tem != NULL_TREE)
9335 if (TREE_TYPE (tem) != type)
9336 tem = fold_convert (type, tem);
9341 /* If this is a commutative operation, and ARG0 is a constant, move it
9342 to ARG1 to reduce the number of tests below. */
9343 if (commutative_tree_code (code)
9344 && tree_swap_operands_p (arg0, arg1, true))
9345 return fold_build2 (code, type, op1, op0);
9347 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9349 First check for cases where an arithmetic operation is applied to a
9350 compound, conditional, or comparison operation. Push the arithmetic
9351 operation inside the compound or conditional to see if any folding
9352 can then be done. Convert comparison to conditional for this purpose.
9353 The also optimizes non-constant cases that used to be done in
9356 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9357 one of the operands is a comparison and the other is a comparison, a
9358 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9359 code below would make the expression more complex. Change it to a
9360 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9361 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9363 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9364 || code == EQ_EXPR || code == NE_EXPR)
9365 && ((truth_value_p (TREE_CODE (arg0))
9366 && (truth_value_p (TREE_CODE (arg1))
9367 || (TREE_CODE (arg1) == BIT_AND_EXPR
9368 && integer_onep (TREE_OPERAND (arg1, 1)))))
9369 || (truth_value_p (TREE_CODE (arg1))
9370 && (truth_value_p (TREE_CODE (arg0))
9371 || (TREE_CODE (arg0) == BIT_AND_EXPR
9372 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9374 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9375 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9378 fold_convert (boolean_type_node, arg0),
9379 fold_convert (boolean_type_node, arg1));
9381 if (code == EQ_EXPR)
9382 tem = invert_truthvalue (tem);
9384 return fold_convert (type, tem);
9387 if (TREE_CODE_CLASS (code) == tcc_binary
9388 || TREE_CODE_CLASS (code) == tcc_comparison)
9390 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9391 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9392 fold_build2 (code, type,
9393 fold_convert (TREE_TYPE (op0),
9394 TREE_OPERAND (arg0, 1)),
9396 if (TREE_CODE (arg1) == COMPOUND_EXPR
9397 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9398 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9399 fold_build2 (code, type, op0,
9400 fold_convert (TREE_TYPE (op1),
9401 TREE_OPERAND (arg1, 1))));
9403 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9405 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9407 /*cond_first_p=*/1);
9408 if (tem != NULL_TREE)
9412 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9414 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9416 /*cond_first_p=*/0);
9417 if (tem != NULL_TREE)
9424 case POINTER_PLUS_EXPR:
9425 /* 0 +p index -> (type)index */
9426 if (integer_zerop (arg0))
9427 return non_lvalue (fold_convert (type, arg1));
9429 /* PTR +p 0 -> PTR */
9430 if (integer_zerop (arg1))
9431 return non_lvalue (fold_convert (type, arg0));
9433 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9434 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9435 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9436 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9437 fold_convert (sizetype, arg1),
9438 fold_convert (sizetype, arg0)));
9440 /* index +p PTR -> PTR +p index */
9441 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9442 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9443 return fold_build2 (POINTER_PLUS_EXPR, type,
9444 fold_convert (type, arg1),
9445 fold_convert (sizetype, arg0));
9447 /* (PTR +p B) +p A -> PTR +p (B + A) */
9448 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9451 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9452 tree arg00 = TREE_OPERAND (arg0, 0);
9453 inner = fold_build2 (PLUS_EXPR, sizetype,
9454 arg01, fold_convert (sizetype, arg1));
9455 return fold_convert (type,
9456 fold_build2 (POINTER_PLUS_EXPR,
9457 TREE_TYPE (arg00), arg00, inner));
9460 /* PTR_CST +p CST -> CST1 */
9461 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9462 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9464 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9465 of the array. Loop optimizer sometimes produce this type of
9467 if (TREE_CODE (arg0) == ADDR_EXPR)
9469 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9471 return fold_convert (type, tem);
9477 /* PTR + INT -> (INT)(PTR p+ INT) */
9478 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9479 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9480 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9483 fold_convert (sizetype, arg1)));
9484 /* INT + PTR -> (INT)(PTR p+ INT) */
9485 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9486 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9487 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9490 fold_convert (sizetype, arg0)));
9491 /* A + (-B) -> A - B */
9492 if (TREE_CODE (arg1) == NEGATE_EXPR)
9493 return fold_build2 (MINUS_EXPR, type,
9494 fold_convert (type, arg0),
9495 fold_convert (type, TREE_OPERAND (arg1, 0)));
9496 /* (-A) + B -> B - A */
9497 if (TREE_CODE (arg0) == NEGATE_EXPR
9498 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9499 return fold_build2 (MINUS_EXPR, type,
9500 fold_convert (type, arg1),
9501 fold_convert (type, TREE_OPERAND (arg0, 0)));
9503 if (INTEGRAL_TYPE_P (type))
9505 /* Convert ~A + 1 to -A. */
9506 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9507 && integer_onep (arg1))
9508 return fold_build1 (NEGATE_EXPR, type,
9509 fold_convert (type, TREE_OPERAND (arg0, 0)));
9512 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9513 && !TYPE_OVERFLOW_TRAPS (type))
9515 tree tem = TREE_OPERAND (arg0, 0);
9518 if (operand_equal_p (tem, arg1, 0))
9520 t1 = build_int_cst_type (type, -1);
9521 return omit_one_operand (type, t1, arg1);
9526 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9527 && !TYPE_OVERFLOW_TRAPS (type))
9529 tree tem = TREE_OPERAND (arg1, 0);
9532 if (operand_equal_p (arg0, tem, 0))
9534 t1 = build_int_cst_type (type, -1);
9535 return omit_one_operand (type, t1, arg0);
9539 /* X + (X / CST) * -CST is X % CST. */
9540 if (TREE_CODE (arg1) == MULT_EXPR
9541 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9542 && operand_equal_p (arg0,
9543 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9545 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9546 tree cst1 = TREE_OPERAND (arg1, 1);
9547 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9548 if (sum && integer_zerop (sum))
9549 return fold_convert (type,
9550 fold_build2 (TRUNC_MOD_EXPR,
9551 TREE_TYPE (arg0), arg0, cst0));
9555 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9556 same or one. Make sure type is not saturating.
9557 fold_plusminus_mult_expr will re-associate. */
9558 if ((TREE_CODE (arg0) == MULT_EXPR
9559 || TREE_CODE (arg1) == MULT_EXPR)
9560 && !TYPE_SATURATING (type)
9561 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9563 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9568 if (! FLOAT_TYPE_P (type))
9570 if (integer_zerop (arg1))
9571 return non_lvalue (fold_convert (type, arg0));
9573 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9574 with a constant, and the two constants have no bits in common,
9575 we should treat this as a BIT_IOR_EXPR since this may produce more
9577 if (TREE_CODE (arg0) == BIT_AND_EXPR
9578 && TREE_CODE (arg1) == BIT_AND_EXPR
9579 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9580 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9581 && integer_zerop (const_binop (BIT_AND_EXPR,
9582 TREE_OPERAND (arg0, 1),
9583 TREE_OPERAND (arg1, 1), 0)))
9585 code = BIT_IOR_EXPR;
9589 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9590 (plus (plus (mult) (mult)) (foo)) so that we can
9591 take advantage of the factoring cases below. */
9592 if (((TREE_CODE (arg0) == PLUS_EXPR
9593 || TREE_CODE (arg0) == MINUS_EXPR)
9594 && TREE_CODE (arg1) == MULT_EXPR)
9595 || ((TREE_CODE (arg1) == PLUS_EXPR
9596 || TREE_CODE (arg1) == MINUS_EXPR)
9597 && TREE_CODE (arg0) == MULT_EXPR))
9599 tree parg0, parg1, parg, marg;
9600 enum tree_code pcode;
9602 if (TREE_CODE (arg1) == MULT_EXPR)
9603 parg = arg0, marg = arg1;
9605 parg = arg1, marg = arg0;
9606 pcode = TREE_CODE (parg);
9607 parg0 = TREE_OPERAND (parg, 0);
9608 parg1 = TREE_OPERAND (parg, 1);
9612 if (TREE_CODE (parg0) == MULT_EXPR
9613 && TREE_CODE (parg1) != MULT_EXPR)
9614 return fold_build2 (pcode, type,
9615 fold_build2 (PLUS_EXPR, type,
9616 fold_convert (type, parg0),
9617 fold_convert (type, marg)),
9618 fold_convert (type, parg1));
9619 if (TREE_CODE (parg0) != MULT_EXPR
9620 && TREE_CODE (parg1) == MULT_EXPR)
9621 return fold_build2 (PLUS_EXPR, type,
9622 fold_convert (type, parg0),
9623 fold_build2 (pcode, type,
9624 fold_convert (type, marg),
9631 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9632 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9633 return non_lvalue (fold_convert (type, arg0));
9635 /* Likewise if the operands are reversed. */
9636 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9637 return non_lvalue (fold_convert (type, arg1));
9639 /* Convert X + -C into X - C. */
9640 if (TREE_CODE (arg1) == REAL_CST
9641 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9643 tem = fold_negate_const (arg1, type);
9644 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9645 return fold_build2 (MINUS_EXPR, type,
9646 fold_convert (type, arg0),
9647 fold_convert (type, tem));
9650 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9651 to __complex__ ( x, y ). This is not the same for SNaNs or
9652 if signed zeros are involved. */
9653 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9654 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9655 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9657 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9658 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9659 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9660 bool arg0rz = false, arg0iz = false;
9661 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9662 || (arg0i && (arg0iz = real_zerop (arg0i))))
9664 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9665 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9666 if (arg0rz && arg1i && real_zerop (arg1i))
9668 tree rp = arg1r ? arg1r
9669 : build1 (REALPART_EXPR, rtype, arg1);
9670 tree ip = arg0i ? arg0i
9671 : build1 (IMAGPART_EXPR, rtype, arg0);
9672 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9674 else if (arg0iz && arg1r && real_zerop (arg1r))
9676 tree rp = arg0r ? arg0r
9677 : build1 (REALPART_EXPR, rtype, arg0);
9678 tree ip = arg1i ? arg1i
9679 : build1 (IMAGPART_EXPR, rtype, arg1);
9680 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9685 if (flag_unsafe_math_optimizations
9686 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9687 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9688 && (tem = distribute_real_division (code, type, arg0, arg1)))
9691 /* Convert x+x into x*2.0. */
9692 if (operand_equal_p (arg0, arg1, 0)
9693 && SCALAR_FLOAT_TYPE_P (type))
9694 return fold_build2 (MULT_EXPR, type, arg0,
9695 build_real (type, dconst2));
9697 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9698 We associate floats only if the user has specified
9699 -fassociative-math. */
9700 if (flag_associative_math
9701 && TREE_CODE (arg1) == PLUS_EXPR
9702 && TREE_CODE (arg0) != MULT_EXPR)
9704 tree tree10 = TREE_OPERAND (arg1, 0);
9705 tree tree11 = TREE_OPERAND (arg1, 1);
9706 if (TREE_CODE (tree11) == MULT_EXPR
9707 && TREE_CODE (tree10) == MULT_EXPR)
9710 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9711 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9714 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9715 We associate floats only if the user has specified
9716 -fassociative-math. */
9717 if (flag_associative_math
9718 && TREE_CODE (arg0) == PLUS_EXPR
9719 && TREE_CODE (arg1) != MULT_EXPR)
9721 tree tree00 = TREE_OPERAND (arg0, 0);
9722 tree tree01 = TREE_OPERAND (arg0, 1);
9723 if (TREE_CODE (tree01) == MULT_EXPR
9724 && TREE_CODE (tree00) == MULT_EXPR)
9727 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9728 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9734 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9735 is a rotate of A by C1 bits. */
9736 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9737 is a rotate of A by B bits. */
9739 enum tree_code code0, code1;
9741 code0 = TREE_CODE (arg0);
9742 code1 = TREE_CODE (arg1);
9743 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9744 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9745 && operand_equal_p (TREE_OPERAND (arg0, 0),
9746 TREE_OPERAND (arg1, 0), 0)
9747 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9748 TYPE_UNSIGNED (rtype))
9749 /* Only create rotates in complete modes. Other cases are not
9750 expanded properly. */
9751 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9753 tree tree01, tree11;
9754 enum tree_code code01, code11;
9756 tree01 = TREE_OPERAND (arg0, 1);
9757 tree11 = TREE_OPERAND (arg1, 1);
9758 STRIP_NOPS (tree01);
9759 STRIP_NOPS (tree11);
9760 code01 = TREE_CODE (tree01);
9761 code11 = TREE_CODE (tree11);
9762 if (code01 == INTEGER_CST
9763 && code11 == INTEGER_CST
9764 && TREE_INT_CST_HIGH (tree01) == 0
9765 && TREE_INT_CST_HIGH (tree11) == 0
9766 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9767 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9768 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9769 code0 == LSHIFT_EXPR ? tree01 : tree11);
9770 else if (code11 == MINUS_EXPR)
9772 tree tree110, tree111;
9773 tree110 = TREE_OPERAND (tree11, 0);
9774 tree111 = TREE_OPERAND (tree11, 1);
9775 STRIP_NOPS (tree110);
9776 STRIP_NOPS (tree111);
9777 if (TREE_CODE (tree110) == INTEGER_CST
9778 && 0 == compare_tree_int (tree110,
9780 (TREE_TYPE (TREE_OPERAND
9782 && operand_equal_p (tree01, tree111, 0))
9783 return build2 ((code0 == LSHIFT_EXPR
9786 type, TREE_OPERAND (arg0, 0), tree01);
9788 else if (code01 == MINUS_EXPR)
9790 tree tree010, tree011;
9791 tree010 = TREE_OPERAND (tree01, 0);
9792 tree011 = TREE_OPERAND (tree01, 1);
9793 STRIP_NOPS (tree010);
9794 STRIP_NOPS (tree011);
9795 if (TREE_CODE (tree010) == INTEGER_CST
9796 && 0 == compare_tree_int (tree010,
9798 (TREE_TYPE (TREE_OPERAND
9800 && operand_equal_p (tree11, tree011, 0))
9801 return build2 ((code0 != LSHIFT_EXPR
9804 type, TREE_OPERAND (arg0, 0), tree11);
9810 /* In most languages, can't associate operations on floats through
9811 parentheses. Rather than remember where the parentheses were, we
9812 don't associate floats at all, unless the user has specified
9814 And, we need to make sure type is not saturating. */
9816 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9817 && !TYPE_SATURATING (type))
9819 tree var0, con0, lit0, minus_lit0;
9820 tree var1, con1, lit1, minus_lit1;
9823 /* Split both trees into variables, constants, and literals. Then
9824 associate each group together, the constants with literals,
9825 then the result with variables. This increases the chances of
9826 literals being recombined later and of generating relocatable
9827 expressions for the sum of a constant and literal. */
9828 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9829 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9830 code == MINUS_EXPR);
9832 /* With undefined overflow we can only associate constants
9833 with one variable. */
9834 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9835 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9841 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9842 tmp0 = TREE_OPERAND (tmp0, 0);
9843 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9844 tmp1 = TREE_OPERAND (tmp1, 0);
9845 /* The only case we can still associate with two variables
9846 is if they are the same, modulo negation. */
9847 if (!operand_equal_p (tmp0, tmp1, 0))
9851 /* Only do something if we found more than two objects. Otherwise,
9852 nothing has changed and we risk infinite recursion. */
9854 && (2 < ((var0 != 0) + (var1 != 0)
9855 + (con0 != 0) + (con1 != 0)
9856 + (lit0 != 0) + (lit1 != 0)
9857 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9859 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9860 if (code == MINUS_EXPR)
9863 var0 = associate_trees (var0, var1, code, type);
9864 con0 = associate_trees (con0, con1, code, type);
9865 lit0 = associate_trees (lit0, lit1, code, type);
9866 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9868 /* Preserve the MINUS_EXPR if the negative part of the literal is
9869 greater than the positive part. Otherwise, the multiplicative
9870 folding code (i.e extract_muldiv) may be fooled in case
9871 unsigned constants are subtracted, like in the following
9872 example: ((X*2 + 4) - 8U)/2. */
9873 if (minus_lit0 && lit0)
9875 if (TREE_CODE (lit0) == INTEGER_CST
9876 && TREE_CODE (minus_lit0) == INTEGER_CST
9877 && tree_int_cst_lt (lit0, minus_lit0))
9879 minus_lit0 = associate_trees (minus_lit0, lit0,
9885 lit0 = associate_trees (lit0, minus_lit0,
9893 return fold_convert (type,
9894 associate_trees (var0, minus_lit0,
9898 con0 = associate_trees (con0, minus_lit0,
9900 return fold_convert (type,
9901 associate_trees (var0, con0,
9906 con0 = associate_trees (con0, lit0, code, type);
9907 return fold_convert (type, associate_trees (var0, con0,
9915 /* Pointer simplifications for subtraction, simple reassociations. */
9916 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
9918 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
9919 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
9920 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9922 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9923 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9924 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9925 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
9926 return fold_build2 (PLUS_EXPR, type,
9927 fold_build2 (MINUS_EXPR, type, arg00, arg10),
9928 fold_build2 (MINUS_EXPR, type, arg01, arg11));
9930 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
9931 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9933 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
9934 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
9935 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
9937 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
9940 /* A - (-B) -> A + B */
9941 if (TREE_CODE (arg1) == NEGATE_EXPR)
9942 return fold_build2 (PLUS_EXPR, type, op0,
9943 fold_convert (type, TREE_OPERAND (arg1, 0)));
9944 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9945 if (TREE_CODE (arg0) == NEGATE_EXPR
9946 && (FLOAT_TYPE_P (type)
9947 || INTEGRAL_TYPE_P (type))
9948 && negate_expr_p (arg1)
9949 && reorder_operands_p (arg0, arg1))
9950 return fold_build2 (MINUS_EXPR, type,
9951 fold_convert (type, negate_expr (arg1)),
9952 fold_convert (type, TREE_OPERAND (arg0, 0)));
9953 /* Convert -A - 1 to ~A. */
9954 if (INTEGRAL_TYPE_P (type)
9955 && TREE_CODE (arg0) == NEGATE_EXPR
9956 && integer_onep (arg1)
9957 && !TYPE_OVERFLOW_TRAPS (type))
9958 return fold_build1 (BIT_NOT_EXPR, type,
9959 fold_convert (type, TREE_OPERAND (arg0, 0)));
9961 /* Convert -1 - A to ~A. */
9962 if (INTEGRAL_TYPE_P (type)
9963 && integer_all_onesp (arg0))
9964 return fold_build1 (BIT_NOT_EXPR, type, op1);
9967 /* X - (X / CST) * CST is X % CST. */
9968 if (INTEGRAL_TYPE_P (type)
9969 && TREE_CODE (arg1) == MULT_EXPR
9970 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9971 && operand_equal_p (arg0,
9972 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
9973 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
9974 TREE_OPERAND (arg1, 1), 0))
9975 return fold_convert (type,
9976 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
9977 arg0, TREE_OPERAND (arg1, 1)));
9979 if (! FLOAT_TYPE_P (type))
9981 if (integer_zerop (arg0))
9982 return negate_expr (fold_convert (type, arg1));
9983 if (integer_zerop (arg1))
9984 return non_lvalue (fold_convert (type, arg0));
9986 /* Fold A - (A & B) into ~B & A. */
9987 if (!TREE_SIDE_EFFECTS (arg0)
9988 && TREE_CODE (arg1) == BIT_AND_EXPR)
9990 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9992 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
9993 return fold_build2 (BIT_AND_EXPR, type,
9994 fold_build1 (BIT_NOT_EXPR, type, arg10),
9995 fold_convert (type, arg0));
9997 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9999 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10000 return fold_build2 (BIT_AND_EXPR, type,
10001 fold_build1 (BIT_NOT_EXPR, type, arg11),
10002 fold_convert (type, arg0));
10006 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10007 any power of 2 minus 1. */
10008 if (TREE_CODE (arg0) == BIT_AND_EXPR
10009 && TREE_CODE (arg1) == BIT_AND_EXPR
10010 && operand_equal_p (TREE_OPERAND (arg0, 0),
10011 TREE_OPERAND (arg1, 0), 0))
10013 tree mask0 = TREE_OPERAND (arg0, 1);
10014 tree mask1 = TREE_OPERAND (arg1, 1);
10015 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10017 if (operand_equal_p (tem, mask1, 0))
10019 tem = fold_build2 (BIT_XOR_EXPR, type,
10020 TREE_OPERAND (arg0, 0), mask1);
10021 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10026 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10027 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10028 return non_lvalue (fold_convert (type, arg0));
10030 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10031 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10032 (-ARG1 + ARG0) reduces to -ARG1. */
10033 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10034 return negate_expr (fold_convert (type, arg1));
10036 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10037 __complex__ ( x, -y ). This is not the same for SNaNs or if
10038 signed zeros are involved. */
10039 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10040 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10041 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10043 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10044 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10045 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10046 bool arg0rz = false, arg0iz = false;
10047 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10048 || (arg0i && (arg0iz = real_zerop (arg0i))))
10050 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10051 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10052 if (arg0rz && arg1i && real_zerop (arg1i))
10054 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10056 : build1 (REALPART_EXPR, rtype, arg1));
10057 tree ip = arg0i ? arg0i
10058 : build1 (IMAGPART_EXPR, rtype, arg0);
10059 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10061 else if (arg0iz && arg1r && real_zerop (arg1r))
10063 tree rp = arg0r ? arg0r
10064 : build1 (REALPART_EXPR, rtype, arg0);
10065 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10067 : build1 (IMAGPART_EXPR, rtype, arg1));
10068 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10073 /* Fold &x - &x. This can happen from &x.foo - &x.
10074 This is unsafe for certain floats even in non-IEEE formats.
10075 In IEEE, it is unsafe because it does wrong for NaNs.
10076 Also note that operand_equal_p is always false if an operand
10079 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10080 && operand_equal_p (arg0, arg1, 0))
10081 return fold_convert (type, integer_zero_node);
10083 /* A - B -> A + (-B) if B is easily negatable. */
10084 if (negate_expr_p (arg1)
10085 && ((FLOAT_TYPE_P (type)
10086 /* Avoid this transformation if B is a positive REAL_CST. */
10087 && (TREE_CODE (arg1) != REAL_CST
10088 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10089 || INTEGRAL_TYPE_P (type)))
10090 return fold_build2 (PLUS_EXPR, type,
10091 fold_convert (type, arg0),
10092 fold_convert (type, negate_expr (arg1)));
10094 /* Try folding difference of addresses. */
10096 HOST_WIDE_INT diff;
10098 if ((TREE_CODE (arg0) == ADDR_EXPR
10099 || TREE_CODE (arg1) == ADDR_EXPR)
10100 && ptr_difference_const (arg0, arg1, &diff))
10101 return build_int_cst_type (type, diff);
10104 /* Fold &a[i] - &a[j] to i-j. */
10105 if (TREE_CODE (arg0) == ADDR_EXPR
10106 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10107 && TREE_CODE (arg1) == ADDR_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10110 tree aref0 = TREE_OPERAND (arg0, 0);
10111 tree aref1 = TREE_OPERAND (arg1, 0);
10112 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10113 TREE_OPERAND (aref1, 0), 0))
10115 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10116 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10117 tree esz = array_ref_element_size (aref0);
10118 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10119 return fold_build2 (MULT_EXPR, type, diff,
10120 fold_convert (type, esz));
10125 if (flag_unsafe_math_optimizations
10126 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10127 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10128 && (tem = distribute_real_division (code, type, arg0, arg1)))
10131 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10132 same or one. Make sure type is not saturating.
10133 fold_plusminus_mult_expr will re-associate. */
10134 if ((TREE_CODE (arg0) == MULT_EXPR
10135 || TREE_CODE (arg1) == MULT_EXPR)
10136 && !TYPE_SATURATING (type)
10137 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10139 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10147 /* (-A) * (-B) -> A * B */
10148 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10149 return fold_build2 (MULT_EXPR, type,
10150 fold_convert (type, TREE_OPERAND (arg0, 0)),
10151 fold_convert (type, negate_expr (arg1)));
10152 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10153 return fold_build2 (MULT_EXPR, type,
10154 fold_convert (type, negate_expr (arg0)),
10155 fold_convert (type, TREE_OPERAND (arg1, 0)));
10157 if (! FLOAT_TYPE_P (type))
10159 if (integer_zerop (arg1))
10160 return omit_one_operand (type, arg1, arg0);
10161 if (integer_onep (arg1))
10162 return non_lvalue (fold_convert (type, arg0));
10163 /* Transform x * -1 into -x. Make sure to do the negation
10164 on the original operand with conversions not stripped
10165 because we can only strip non-sign-changing conversions. */
10166 if (integer_all_onesp (arg1))
10167 return fold_convert (type, negate_expr (op0));
10168 /* Transform x * -C into -x * C if x is easily negatable. */
10169 if (TREE_CODE (arg1) == INTEGER_CST
10170 && tree_int_cst_sgn (arg1) == -1
10171 && negate_expr_p (arg0)
10172 && (tem = negate_expr (arg1)) != arg1
10173 && !TREE_OVERFLOW (tem))
10174 return fold_build2 (MULT_EXPR, type,
10175 fold_convert (type, negate_expr (arg0)), tem);
10177 /* (a * (1 << b)) is (a << b) */
10178 if (TREE_CODE (arg1) == LSHIFT_EXPR
10179 && integer_onep (TREE_OPERAND (arg1, 0)))
10180 return fold_build2 (LSHIFT_EXPR, type, op0,
10181 TREE_OPERAND (arg1, 1));
10182 if (TREE_CODE (arg0) == LSHIFT_EXPR
10183 && integer_onep (TREE_OPERAND (arg0, 0)))
10184 return fold_build2 (LSHIFT_EXPR, type, op1,
10185 TREE_OPERAND (arg0, 1));
10187 /* (A + A) * C -> A * 2 * C */
10188 if (TREE_CODE (arg0) == PLUS_EXPR
10189 && TREE_CODE (arg1) == INTEGER_CST
10190 && operand_equal_p (TREE_OPERAND (arg0, 0),
10191 TREE_OPERAND (arg0, 1), 0))
10192 return fold_build2 (MULT_EXPR, type,
10193 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10194 TREE_OPERAND (arg0, 1)),
10195 fold_build2 (MULT_EXPR, type,
10196 build_int_cst (type, 2) , arg1));
10198 strict_overflow_p = false;
10199 if (TREE_CODE (arg1) == INTEGER_CST
10200 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10201 &strict_overflow_p)))
10203 if (strict_overflow_p)
10204 fold_overflow_warning (("assuming signed overflow does not "
10205 "occur when simplifying "
10207 WARN_STRICT_OVERFLOW_MISC);
10208 return fold_convert (type, tem);
10211 /* Optimize z * conj(z) for integer complex numbers. */
10212 if (TREE_CODE (arg0) == CONJ_EXPR
10213 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10214 return fold_mult_zconjz (type, arg1);
10215 if (TREE_CODE (arg1) == CONJ_EXPR
10216 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10217 return fold_mult_zconjz (type, arg0);
10221 /* Maybe fold x * 0 to 0. The expressions aren't the same
10222 when x is NaN, since x * 0 is also NaN. Nor are they the
10223 same in modes with signed zeros, since multiplying a
10224 negative value by 0 gives -0, not +0. */
10225 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10226 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10227 && real_zerop (arg1))
10228 return omit_one_operand (type, arg1, arg0);
10229 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10230 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10231 && real_onep (arg1))
10232 return non_lvalue (fold_convert (type, arg0));
10234 /* Transform x * -1.0 into -x. */
10235 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10236 && real_minus_onep (arg1))
10237 return fold_convert (type, negate_expr (arg0));
10239 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10240 the result for floating point types due to rounding so it is applied
10241 only if -fassociative-math was specify. */
10242 if (flag_associative_math
10243 && TREE_CODE (arg0) == RDIV_EXPR
10244 && TREE_CODE (arg1) == REAL_CST
10245 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10247 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10250 return fold_build2 (RDIV_EXPR, type, tem,
10251 TREE_OPERAND (arg0, 1));
10254 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10255 if (operand_equal_p (arg0, arg1, 0))
10257 tree tem = fold_strip_sign_ops (arg0);
10258 if (tem != NULL_TREE)
10260 tem = fold_convert (type, tem);
10261 return fold_build2 (MULT_EXPR, type, tem, tem);
10265 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10266 This is not the same for NaNs or if signed zeros are
10268 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10269 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10270 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10271 && TREE_CODE (arg1) == COMPLEX_CST
10272 && real_zerop (TREE_REALPART (arg1)))
10274 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10275 if (real_onep (TREE_IMAGPART (arg1)))
10276 return fold_build2 (COMPLEX_EXPR, type,
10277 negate_expr (fold_build1 (IMAGPART_EXPR,
10279 fold_build1 (REALPART_EXPR, rtype, arg0));
10280 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10281 return fold_build2 (COMPLEX_EXPR, type,
10282 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10283 negate_expr (fold_build1 (REALPART_EXPR,
10287 /* Optimize z * conj(z) for floating point complex numbers.
10288 Guarded by flag_unsafe_math_optimizations as non-finite
10289 imaginary components don't produce scalar results. */
10290 if (flag_unsafe_math_optimizations
10291 && TREE_CODE (arg0) == CONJ_EXPR
10292 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10293 return fold_mult_zconjz (type, arg1);
10294 if (flag_unsafe_math_optimizations
10295 && TREE_CODE (arg1) == CONJ_EXPR
10296 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10297 return fold_mult_zconjz (type, arg0);
10299 if (flag_unsafe_math_optimizations)
10301 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10302 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10304 /* Optimizations of root(...)*root(...). */
10305 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10308 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10309 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10311 /* Optimize sqrt(x)*sqrt(x) as x. */
10312 if (BUILTIN_SQRT_P (fcode0)
10313 && operand_equal_p (arg00, arg10, 0)
10314 && ! HONOR_SNANS (TYPE_MODE (type)))
10317 /* Optimize root(x)*root(y) as root(x*y). */
10318 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10319 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10320 return build_call_expr (rootfn, 1, arg);
10323 /* Optimize expN(x)*expN(y) as expN(x+y). */
10324 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10326 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10327 tree arg = fold_build2 (PLUS_EXPR, type,
10328 CALL_EXPR_ARG (arg0, 0),
10329 CALL_EXPR_ARG (arg1, 0));
10330 return build_call_expr (expfn, 1, arg);
10333 /* Optimizations of pow(...)*pow(...). */
10334 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10335 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10336 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10338 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10339 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10340 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10341 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10343 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10344 if (operand_equal_p (arg01, arg11, 0))
10346 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10347 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10348 return build_call_expr (powfn, 2, arg, arg01);
10351 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10352 if (operand_equal_p (arg00, arg10, 0))
10354 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10355 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10356 return build_call_expr (powfn, 2, arg00, arg);
10360 /* Optimize tan(x)*cos(x) as sin(x). */
10361 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10362 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10363 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10364 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10365 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10366 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10367 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10368 CALL_EXPR_ARG (arg1, 0), 0))
10370 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10372 if (sinfn != NULL_TREE)
10373 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10376 /* Optimize x*pow(x,c) as pow(x,c+1). */
10377 if (fcode1 == BUILT_IN_POW
10378 || fcode1 == BUILT_IN_POWF
10379 || fcode1 == BUILT_IN_POWL)
10381 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10382 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10383 if (TREE_CODE (arg11) == REAL_CST
10384 && !TREE_OVERFLOW (arg11)
10385 && operand_equal_p (arg0, arg10, 0))
10387 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10391 c = TREE_REAL_CST (arg11);
10392 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10393 arg = build_real (type, c);
10394 return build_call_expr (powfn, 2, arg0, arg);
10398 /* Optimize pow(x,c)*x as pow(x,c+1). */
10399 if (fcode0 == BUILT_IN_POW
10400 || fcode0 == BUILT_IN_POWF
10401 || fcode0 == BUILT_IN_POWL)
10403 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10404 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10405 if (TREE_CODE (arg01) == REAL_CST
10406 && !TREE_OVERFLOW (arg01)
10407 && operand_equal_p (arg1, arg00, 0))
10409 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10413 c = TREE_REAL_CST (arg01);
10414 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10415 arg = build_real (type, c);
10416 return build_call_expr (powfn, 2, arg1, arg);
10420 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10421 if (optimize_function_for_speed_p (cfun)
10422 && operand_equal_p (arg0, arg1, 0))
10424 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10428 tree arg = build_real (type, dconst2);
10429 return build_call_expr (powfn, 2, arg0, arg);
10438 if (integer_all_onesp (arg1))
10439 return omit_one_operand (type, arg1, arg0);
10440 if (integer_zerop (arg1))
10441 return non_lvalue (fold_convert (type, arg0));
10442 if (operand_equal_p (arg0, arg1, 0))
10443 return non_lvalue (fold_convert (type, arg0));
10445 /* ~X | X is -1. */
10446 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10447 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10449 t1 = fold_convert (type, integer_zero_node);
10450 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10451 return omit_one_operand (type, t1, arg1);
10454 /* X | ~X is -1. */
10455 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10456 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10458 t1 = fold_convert (type, integer_zero_node);
10459 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10460 return omit_one_operand (type, t1, arg0);
10463 /* Canonicalize (X & C1) | C2. */
10464 if (TREE_CODE (arg0) == BIT_AND_EXPR
10465 && TREE_CODE (arg1) == INTEGER_CST
10466 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10468 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10469 int width = TYPE_PRECISION (type), w;
10470 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10471 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10472 hi2 = TREE_INT_CST_HIGH (arg1);
10473 lo2 = TREE_INT_CST_LOW (arg1);
10475 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10476 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10477 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10479 if (width > HOST_BITS_PER_WIDE_INT)
10481 mhi = (unsigned HOST_WIDE_INT) -1
10482 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10488 mlo = (unsigned HOST_WIDE_INT) -1
10489 >> (HOST_BITS_PER_WIDE_INT - width);
10492 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10493 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10494 return fold_build2 (BIT_IOR_EXPR, type,
10495 TREE_OPERAND (arg0, 0), arg1);
10497 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10498 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10499 mode which allows further optimizations. */
10506 for (w = BITS_PER_UNIT;
10507 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10510 unsigned HOST_WIDE_INT mask
10511 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10512 if (((lo1 | lo2) & mask) == mask
10513 && (lo1 & ~mask) == 0 && hi1 == 0)
10520 if (hi3 != hi1 || lo3 != lo1)
10521 return fold_build2 (BIT_IOR_EXPR, type,
10522 fold_build2 (BIT_AND_EXPR, type,
10523 TREE_OPERAND (arg0, 0),
10524 build_int_cst_wide (type,
10529 /* (X & Y) | Y is (X, Y). */
10530 if (TREE_CODE (arg0) == BIT_AND_EXPR
10531 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10532 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10533 /* (X & Y) | X is (Y, X). */
10534 if (TREE_CODE (arg0) == BIT_AND_EXPR
10535 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10536 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10537 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10538 /* X | (X & Y) is (Y, X). */
10539 if (TREE_CODE (arg1) == BIT_AND_EXPR
10540 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10541 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10542 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10543 /* X | (Y & X) is (Y, X). */
10544 if (TREE_CODE (arg1) == BIT_AND_EXPR
10545 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10546 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10547 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10549 t1 = distribute_bit_expr (code, type, arg0, arg1);
10550 if (t1 != NULL_TREE)
10553 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10555 This results in more efficient code for machines without a NAND
10556 instruction. Combine will canonicalize to the first form
10557 which will allow use of NAND instructions provided by the
10558 backend if they exist. */
10559 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10560 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10562 return fold_build1 (BIT_NOT_EXPR, type,
10563 build2 (BIT_AND_EXPR, type,
10564 fold_convert (type,
10565 TREE_OPERAND (arg0, 0)),
10566 fold_convert (type,
10567 TREE_OPERAND (arg1, 0))));
10570 /* See if this can be simplified into a rotate first. If that
10571 is unsuccessful continue in the association code. */
10575 if (integer_zerop (arg1))
10576 return non_lvalue (fold_convert (type, arg0));
10577 if (integer_all_onesp (arg1))
10578 return fold_build1 (BIT_NOT_EXPR, type, op0);
10579 if (operand_equal_p (arg0, arg1, 0))
10580 return omit_one_operand (type, integer_zero_node, arg0);
10582 /* ~X ^ X is -1. */
10583 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10584 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10586 t1 = fold_convert (type, integer_zero_node);
10587 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10588 return omit_one_operand (type, t1, arg1);
10591 /* X ^ ~X is -1. */
10592 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10593 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10595 t1 = fold_convert (type, integer_zero_node);
10596 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10597 return omit_one_operand (type, t1, arg0);
10600 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10601 with a constant, and the two constants have no bits in common,
10602 we should treat this as a BIT_IOR_EXPR since this may produce more
10603 simplifications. */
10604 if (TREE_CODE (arg0) == BIT_AND_EXPR
10605 && TREE_CODE (arg1) == BIT_AND_EXPR
10606 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10607 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10608 && integer_zerop (const_binop (BIT_AND_EXPR,
10609 TREE_OPERAND (arg0, 1),
10610 TREE_OPERAND (arg1, 1), 0)))
10612 code = BIT_IOR_EXPR;
10616 /* (X | Y) ^ X -> Y & ~ X*/
10617 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10618 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10620 tree t2 = TREE_OPERAND (arg0, 1);
10621 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10623 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10624 fold_convert (type, t1));
10628 /* (Y | X) ^ X -> Y & ~ X*/
10629 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10630 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10632 tree t2 = TREE_OPERAND (arg0, 0);
10633 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10635 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10636 fold_convert (type, t1));
10640 /* X ^ (X | Y) -> Y & ~ X*/
10641 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10642 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10644 tree t2 = TREE_OPERAND (arg1, 1);
10645 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10647 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10648 fold_convert (type, t1));
10652 /* X ^ (Y | X) -> Y & ~ X*/
10653 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10654 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10656 tree t2 = TREE_OPERAND (arg1, 0);
10657 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10659 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10660 fold_convert (type, t1));
10664 /* Convert ~X ^ ~Y to X ^ Y. */
10665 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10666 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10667 return fold_build2 (code, type,
10668 fold_convert (type, TREE_OPERAND (arg0, 0)),
10669 fold_convert (type, TREE_OPERAND (arg1, 0)));
10671 /* Convert ~X ^ C to X ^ ~C. */
10672 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10673 && TREE_CODE (arg1) == INTEGER_CST)
10674 return fold_build2 (code, type,
10675 fold_convert (type, TREE_OPERAND (arg0, 0)),
10676 fold_build1 (BIT_NOT_EXPR, type, arg1));
10678 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10679 if (TREE_CODE (arg0) == BIT_AND_EXPR
10680 && integer_onep (TREE_OPERAND (arg0, 1))
10681 && integer_onep (arg1))
10682 return fold_build2 (EQ_EXPR, type, arg0,
10683 build_int_cst (TREE_TYPE (arg0), 0));
10685 /* Fold (X & Y) ^ Y as ~X & Y. */
10686 if (TREE_CODE (arg0) == BIT_AND_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10689 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10690 return fold_build2 (BIT_AND_EXPR, type,
10691 fold_build1 (BIT_NOT_EXPR, type, tem),
10692 fold_convert (type, arg1));
10694 /* Fold (X & Y) ^ X as ~Y & X. */
10695 if (TREE_CODE (arg0) == BIT_AND_EXPR
10696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10697 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10699 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10700 return fold_build2 (BIT_AND_EXPR, type,
10701 fold_build1 (BIT_NOT_EXPR, type, tem),
10702 fold_convert (type, arg1));
10704 /* Fold X ^ (X & Y) as X & ~Y. */
10705 if (TREE_CODE (arg1) == BIT_AND_EXPR
10706 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10708 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10709 return fold_build2 (BIT_AND_EXPR, type,
10710 fold_convert (type, arg0),
10711 fold_build1 (BIT_NOT_EXPR, type, tem));
10713 /* Fold X ^ (Y & X) as ~Y & X. */
10714 if (TREE_CODE (arg1) == BIT_AND_EXPR
10715 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10716 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10718 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10719 return fold_build2 (BIT_AND_EXPR, type,
10720 fold_build1 (BIT_NOT_EXPR, type, tem),
10721 fold_convert (type, arg0));
10724 /* See if this can be simplified into a rotate first. If that
10725 is unsuccessful continue in the association code. */
10729 if (integer_all_onesp (arg1))
10730 return non_lvalue (fold_convert (type, arg0));
10731 if (integer_zerop (arg1))
10732 return omit_one_operand (type, arg1, arg0);
10733 if (operand_equal_p (arg0, arg1, 0))
10734 return non_lvalue (fold_convert (type, arg0));
10736 /* ~X & X is always zero. */
10737 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10738 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10739 return omit_one_operand (type, integer_zero_node, arg1);
10741 /* X & ~X is always zero. */
10742 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10743 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10744 return omit_one_operand (type, integer_zero_node, arg0);
10746 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10747 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10748 && TREE_CODE (arg1) == INTEGER_CST
10749 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10751 tree tmp1 = fold_convert (type, arg1);
10752 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
10753 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
10754 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
10755 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
10756 return fold_convert (type,
10757 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
10760 /* (X | Y) & Y is (X, Y). */
10761 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10762 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10763 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10764 /* (X | Y) & X is (Y, X). */
10765 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10766 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10767 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10768 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10769 /* X & (X | Y) is (Y, X). */
10770 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10771 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10772 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10773 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10774 /* X & (Y | X) is (Y, X). */
10775 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10776 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10777 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10778 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10780 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10781 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10782 && integer_onep (TREE_OPERAND (arg0, 1))
10783 && integer_onep (arg1))
10785 tem = TREE_OPERAND (arg0, 0);
10786 return fold_build2 (EQ_EXPR, type,
10787 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10788 build_int_cst (TREE_TYPE (tem), 1)),
10789 build_int_cst (TREE_TYPE (tem), 0));
10791 /* Fold ~X & 1 as (X & 1) == 0. */
10792 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10793 && integer_onep (arg1))
10795 tem = TREE_OPERAND (arg0, 0);
10796 return fold_build2 (EQ_EXPR, type,
10797 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10798 build_int_cst (TREE_TYPE (tem), 1)),
10799 build_int_cst (TREE_TYPE (tem), 0));
10802 /* Fold (X ^ Y) & Y as ~X & Y. */
10803 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10804 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10806 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10807 return fold_build2 (BIT_AND_EXPR, type,
10808 fold_build1 (BIT_NOT_EXPR, type, tem),
10809 fold_convert (type, arg1));
10811 /* Fold (X ^ Y) & X as ~Y & X. */
10812 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10813 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10814 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10816 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10817 return fold_build2 (BIT_AND_EXPR, type,
10818 fold_build1 (BIT_NOT_EXPR, type, tem),
10819 fold_convert (type, arg1));
10821 /* Fold X & (X ^ Y) as X & ~Y. */
10822 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10823 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10825 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10826 return fold_build2 (BIT_AND_EXPR, type,
10827 fold_convert (type, arg0),
10828 fold_build1 (BIT_NOT_EXPR, type, tem));
10830 /* Fold X & (Y ^ X) as ~Y & X. */
10831 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10832 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10833 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10835 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10836 return fold_build2 (BIT_AND_EXPR, type,
10837 fold_build1 (BIT_NOT_EXPR, type, tem),
10838 fold_convert (type, arg0));
10841 t1 = distribute_bit_expr (code, type, arg0, arg1);
10842 if (t1 != NULL_TREE)
10844 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10845 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10846 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10849 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10851 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
10852 && (~TREE_INT_CST_LOW (arg1)
10853 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
10854 return fold_convert (type, TREE_OPERAND (arg0, 0));
10857 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
10859 This results in more efficient code for machines without a NOR
10860 instruction. Combine will canonicalize to the first form
10861 which will allow use of NOR instructions provided by the
10862 backend if they exist. */
10863 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10864 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10866 return fold_build1 (BIT_NOT_EXPR, type,
10867 build2 (BIT_IOR_EXPR, type,
10868 fold_convert (type,
10869 TREE_OPERAND (arg0, 0)),
10870 fold_convert (type,
10871 TREE_OPERAND (arg1, 0))));
10874 /* If arg0 is derived from the address of an object or function, we may
10875 be able to fold this expression using the object or function's
10877 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
10879 unsigned HOST_WIDE_INT modulus, residue;
10880 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
10882 modulus = get_pointer_modulus_and_residue (arg0, &residue);
10884 /* This works because modulus is a power of 2. If this weren't the
10885 case, we'd have to replace it by its greatest power-of-2
10886 divisor: modulus & -modulus. */
10888 return build_int_cst (type, residue & low);
10891 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
10892 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
10893 if the new mask might be further optimized. */
10894 if ((TREE_CODE (arg0) == LSHIFT_EXPR
10895 || TREE_CODE (arg0) == RSHIFT_EXPR)
10896 && host_integerp (TREE_OPERAND (arg0, 1), 1)
10897 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
10898 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
10899 < TYPE_PRECISION (TREE_TYPE (arg0))
10900 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
10901 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
10903 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
10904 unsigned HOST_WIDE_INT mask
10905 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
10906 unsigned HOST_WIDE_INT newmask, zerobits = 0;
10907 tree shift_type = TREE_TYPE (arg0);
10909 if (TREE_CODE (arg0) == LSHIFT_EXPR)
10910 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
10911 else if (TREE_CODE (arg0) == RSHIFT_EXPR
10912 && TYPE_PRECISION (TREE_TYPE (arg0))
10913 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
10915 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
10916 tree arg00 = TREE_OPERAND (arg0, 0);
10917 /* See if more bits can be proven as zero because of
10919 if (TREE_CODE (arg00) == NOP_EXPR
10920 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
10922 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
10923 if (TYPE_PRECISION (inner_type)
10924 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
10925 && TYPE_PRECISION (inner_type) < prec)
10927 prec = TYPE_PRECISION (inner_type);
10928 /* See if we can shorten the right shift. */
10930 shift_type = inner_type;
10933 zerobits = ~(unsigned HOST_WIDE_INT) 0;
10934 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
10935 zerobits <<= prec - shiftc;
10936 /* For arithmetic shift if sign bit could be set, zerobits
10937 can contain actually sign bits, so no transformation is
10938 possible, unless MASK masks them all away. In that
10939 case the shift needs to be converted into logical shift. */
10940 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
10941 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
10943 if ((mask & zerobits) == 0)
10944 shift_type = unsigned_type_for (TREE_TYPE (arg0));
10950 /* ((X << 16) & 0xff00) is (X, 0). */
10951 if ((mask & zerobits) == mask)
10952 return omit_one_operand (type, build_int_cst (type, 0), arg0);
10954 newmask = mask | zerobits;
10955 if (newmask != mask && (newmask & (newmask + 1)) == 0)
10959 /* Only do the transformation if NEWMASK is some integer
10961 for (prec = BITS_PER_UNIT;
10962 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
10963 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
10965 if (prec < HOST_BITS_PER_WIDE_INT
10966 || newmask == ~(unsigned HOST_WIDE_INT) 0)
10968 if (shift_type != TREE_TYPE (arg0))
10970 tem = fold_build2 (TREE_CODE (arg0), shift_type,
10971 fold_convert (shift_type,
10972 TREE_OPERAND (arg0, 0)),
10973 TREE_OPERAND (arg0, 1));
10974 tem = fold_convert (type, tem);
10978 return fold_build2 (BIT_AND_EXPR, type, tem,
10979 build_int_cst_type (TREE_TYPE (op1),
10988 /* Don't touch a floating-point divide by zero unless the mode
10989 of the constant can represent infinity. */
10990 if (TREE_CODE (arg1) == REAL_CST
10991 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10992 && real_zerop (arg1))
10995 /* Optimize A / A to 1.0 if we don't care about
10996 NaNs or Infinities. Skip the transformation
10997 for non-real operands. */
10998 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10999 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11000 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11001 && operand_equal_p (arg0, arg1, 0))
11003 tree r = build_real (TREE_TYPE (arg0), dconst1);
11005 return omit_two_operands (type, r, arg0, arg1);
11008 /* The complex version of the above A / A optimization. */
11009 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11010 && operand_equal_p (arg0, arg1, 0))
11012 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11013 if (! HONOR_NANS (TYPE_MODE (elem_type))
11014 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11016 tree r = build_real (elem_type, dconst1);
11017 /* omit_two_operands will call fold_convert for us. */
11018 return omit_two_operands (type, r, arg0, arg1);
11022 /* (-A) / (-B) -> A / B */
11023 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11024 return fold_build2 (RDIV_EXPR, type,
11025 TREE_OPERAND (arg0, 0),
11026 negate_expr (arg1));
11027 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11028 return fold_build2 (RDIV_EXPR, type,
11029 negate_expr (arg0),
11030 TREE_OPERAND (arg1, 0));
11032 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11033 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11034 && real_onep (arg1))
11035 return non_lvalue (fold_convert (type, arg0));
11037 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11038 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11039 && real_minus_onep (arg1))
11040 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11042 /* If ARG1 is a constant, we can convert this to a multiply by the
11043 reciprocal. This does not have the same rounding properties,
11044 so only do this if -freciprocal-math. We can actually
11045 always safely do it if ARG1 is a power of two, but it's hard to
11046 tell if it is or not in a portable manner. */
11047 if (TREE_CODE (arg1) == REAL_CST)
11049 if (flag_reciprocal_math
11050 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11052 return fold_build2 (MULT_EXPR, type, arg0, tem);
11053 /* Find the reciprocal if optimizing and the result is exact. */
11057 r = TREE_REAL_CST (arg1);
11058 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11060 tem = build_real (type, r);
11061 return fold_build2 (MULT_EXPR, type,
11062 fold_convert (type, arg0), tem);
11066 /* Convert A/B/C to A/(B*C). */
11067 if (flag_reciprocal_math
11068 && TREE_CODE (arg0) == RDIV_EXPR)
11069 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11070 fold_build2 (MULT_EXPR, type,
11071 TREE_OPERAND (arg0, 1), arg1));
11073 /* Convert A/(B/C) to (A/B)*C. */
11074 if (flag_reciprocal_math
11075 && TREE_CODE (arg1) == RDIV_EXPR)
11076 return fold_build2 (MULT_EXPR, type,
11077 fold_build2 (RDIV_EXPR, type, arg0,
11078 TREE_OPERAND (arg1, 0)),
11079 TREE_OPERAND (arg1, 1));
11081 /* Convert C1/(X*C2) into (C1/C2)/X. */
11082 if (flag_reciprocal_math
11083 && TREE_CODE (arg1) == MULT_EXPR
11084 && TREE_CODE (arg0) == REAL_CST
11085 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11087 tree tem = const_binop (RDIV_EXPR, arg0,
11088 TREE_OPERAND (arg1, 1), 0);
11090 return fold_build2 (RDIV_EXPR, type, tem,
11091 TREE_OPERAND (arg1, 0));
11094 if (flag_unsafe_math_optimizations)
11096 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11097 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11099 /* Optimize sin(x)/cos(x) as tan(x). */
11100 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11101 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11102 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11103 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11104 CALL_EXPR_ARG (arg1, 0), 0))
11106 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11108 if (tanfn != NULL_TREE)
11109 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11112 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11113 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11114 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11115 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11116 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11117 CALL_EXPR_ARG (arg1, 0), 0))
11119 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11121 if (tanfn != NULL_TREE)
11123 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11124 return fold_build2 (RDIV_EXPR, type,
11125 build_real (type, dconst1), tmp);
11129 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11130 NaNs or Infinities. */
11131 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11132 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11133 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11135 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11136 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11138 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11139 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11140 && operand_equal_p (arg00, arg01, 0))
11142 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11144 if (cosfn != NULL_TREE)
11145 return build_call_expr (cosfn, 1, arg00);
11149 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11150 NaNs or Infinities. */
11151 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11152 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11153 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11155 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11156 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11158 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11159 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11160 && operand_equal_p (arg00, arg01, 0))
11162 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11164 if (cosfn != NULL_TREE)
11166 tree tmp = build_call_expr (cosfn, 1, arg00);
11167 return fold_build2 (RDIV_EXPR, type,
11168 build_real (type, dconst1),
11174 /* Optimize pow(x,c)/x as pow(x,c-1). */
11175 if (fcode0 == BUILT_IN_POW
11176 || fcode0 == BUILT_IN_POWF
11177 || fcode0 == BUILT_IN_POWL)
11179 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11180 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11181 if (TREE_CODE (arg01) == REAL_CST
11182 && !TREE_OVERFLOW (arg01)
11183 && operand_equal_p (arg1, arg00, 0))
11185 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11189 c = TREE_REAL_CST (arg01);
11190 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11191 arg = build_real (type, c);
11192 return build_call_expr (powfn, 2, arg1, arg);
11196 /* Optimize a/root(b/c) into a*root(c/b). */
11197 if (BUILTIN_ROOT_P (fcode1))
11199 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11201 if (TREE_CODE (rootarg) == RDIV_EXPR)
11203 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11204 tree b = TREE_OPERAND (rootarg, 0);
11205 tree c = TREE_OPERAND (rootarg, 1);
11207 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11209 tmp = build_call_expr (rootfn, 1, tmp);
11210 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11214 /* Optimize x/expN(y) into x*expN(-y). */
11215 if (BUILTIN_EXPONENT_P (fcode1))
11217 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11218 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11219 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11220 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11223 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11224 if (fcode1 == BUILT_IN_POW
11225 || fcode1 == BUILT_IN_POWF
11226 || fcode1 == BUILT_IN_POWL)
11228 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11229 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11230 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11231 tree neg11 = fold_convert (type, negate_expr (arg11));
11232 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11233 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11238 case TRUNC_DIV_EXPR:
11239 case FLOOR_DIV_EXPR:
11240 /* Simplify A / (B << N) where A and B are positive and B is
11241 a power of 2, to A >> (N + log2(B)). */
11242 strict_overflow_p = false;
11243 if (TREE_CODE (arg1) == LSHIFT_EXPR
11244 && (TYPE_UNSIGNED (type)
11245 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11247 tree sval = TREE_OPERAND (arg1, 0);
11248 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11250 tree sh_cnt = TREE_OPERAND (arg1, 1);
11251 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11253 if (strict_overflow_p)
11254 fold_overflow_warning (("assuming signed overflow does not "
11255 "occur when simplifying A / (B << N)"),
11256 WARN_STRICT_OVERFLOW_MISC);
11258 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11259 sh_cnt, build_int_cst (NULL_TREE, pow2));
11260 return fold_build2 (RSHIFT_EXPR, type,
11261 fold_convert (type, arg0), sh_cnt);
11265 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11266 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11267 if (INTEGRAL_TYPE_P (type)
11268 && TYPE_UNSIGNED (type)
11269 && code == FLOOR_DIV_EXPR)
11270 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11274 case ROUND_DIV_EXPR:
11275 case CEIL_DIV_EXPR:
11276 case EXACT_DIV_EXPR:
11277 if (integer_onep (arg1))
11278 return non_lvalue (fold_convert (type, arg0));
11279 if (integer_zerop (arg1))
11281 /* X / -1 is -X. */
11282 if (!TYPE_UNSIGNED (type)
11283 && TREE_CODE (arg1) == INTEGER_CST
11284 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11285 && TREE_INT_CST_HIGH (arg1) == -1)
11286 return fold_convert (type, negate_expr (arg0));
11288 /* Convert -A / -B to A / B when the type is signed and overflow is
11290 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11291 && TREE_CODE (arg0) == NEGATE_EXPR
11292 && negate_expr_p (arg1))
11294 if (INTEGRAL_TYPE_P (type))
11295 fold_overflow_warning (("assuming signed overflow does not occur "
11296 "when distributing negation across "
11298 WARN_STRICT_OVERFLOW_MISC);
11299 return fold_build2 (code, type,
11300 fold_convert (type, TREE_OPERAND (arg0, 0)),
11301 negate_expr (arg1));
11303 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11304 && TREE_CODE (arg1) == NEGATE_EXPR
11305 && negate_expr_p (arg0))
11307 if (INTEGRAL_TYPE_P (type))
11308 fold_overflow_warning (("assuming signed overflow does not occur "
11309 "when distributing negation across "
11311 WARN_STRICT_OVERFLOW_MISC);
11312 return fold_build2 (code, type, negate_expr (arg0),
11313 TREE_OPERAND (arg1, 0));
11316 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11317 operation, EXACT_DIV_EXPR.
11319 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11320 At one time others generated faster code, it's not clear if they do
11321 after the last round to changes to the DIV code in expmed.c. */
11322 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11323 && multiple_of_p (type, arg0, arg1))
11324 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11326 strict_overflow_p = false;
11327 if (TREE_CODE (arg1) == INTEGER_CST
11328 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11329 &strict_overflow_p)))
11331 if (strict_overflow_p)
11332 fold_overflow_warning (("assuming signed overflow does not occur "
11333 "when simplifying division"),
11334 WARN_STRICT_OVERFLOW_MISC);
11335 return fold_convert (type, tem);
11340 case CEIL_MOD_EXPR:
11341 case FLOOR_MOD_EXPR:
11342 case ROUND_MOD_EXPR:
11343 case TRUNC_MOD_EXPR:
11344 /* X % 1 is always zero, but be sure to preserve any side
11346 if (integer_onep (arg1))
11347 return omit_one_operand (type, integer_zero_node, arg0);
11349 /* X % 0, return X % 0 unchanged so that we can get the
11350 proper warnings and errors. */
11351 if (integer_zerop (arg1))
11354 /* 0 % X is always zero, but be sure to preserve any side
11355 effects in X. Place this after checking for X == 0. */
11356 if (integer_zerop (arg0))
11357 return omit_one_operand (type, integer_zero_node, arg1);
11359 /* X % -1 is zero. */
11360 if (!TYPE_UNSIGNED (type)
11361 && TREE_CODE (arg1) == INTEGER_CST
11362 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11363 && TREE_INT_CST_HIGH (arg1) == -1)
11364 return omit_one_operand (type, integer_zero_node, arg0);
11366 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11367 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11368 strict_overflow_p = false;
11369 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11370 && (TYPE_UNSIGNED (type)
11371 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11374 /* Also optimize A % (C << N) where C is a power of 2,
11375 to A & ((C << N) - 1). */
11376 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11377 c = TREE_OPERAND (arg1, 0);
11379 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11381 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11382 build_int_cst (TREE_TYPE (arg1), 1));
11383 if (strict_overflow_p)
11384 fold_overflow_warning (("assuming signed overflow does not "
11385 "occur when simplifying "
11386 "X % (power of two)"),
11387 WARN_STRICT_OVERFLOW_MISC);
11388 return fold_build2 (BIT_AND_EXPR, type,
11389 fold_convert (type, arg0),
11390 fold_convert (type, mask));
11394 /* X % -C is the same as X % C. */
11395 if (code == TRUNC_MOD_EXPR
11396 && !TYPE_UNSIGNED (type)
11397 && TREE_CODE (arg1) == INTEGER_CST
11398 && !TREE_OVERFLOW (arg1)
11399 && TREE_INT_CST_HIGH (arg1) < 0
11400 && !TYPE_OVERFLOW_TRAPS (type)
11401 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11402 && !sign_bit_p (arg1, arg1))
11403 return fold_build2 (code, type, fold_convert (type, arg0),
11404 fold_convert (type, negate_expr (arg1)));
11406 /* X % -Y is the same as X % Y. */
11407 if (code == TRUNC_MOD_EXPR
11408 && !TYPE_UNSIGNED (type)
11409 && TREE_CODE (arg1) == NEGATE_EXPR
11410 && !TYPE_OVERFLOW_TRAPS (type))
11411 return fold_build2 (code, type, fold_convert (type, arg0),
11412 fold_convert (type, TREE_OPERAND (arg1, 0)));
11414 if (TREE_CODE (arg1) == INTEGER_CST
11415 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11416 &strict_overflow_p)))
11418 if (strict_overflow_p)
11419 fold_overflow_warning (("assuming signed overflow does not occur "
11420 "when simplifying modulus"),
11421 WARN_STRICT_OVERFLOW_MISC);
11422 return fold_convert (type, tem);
11429 if (integer_all_onesp (arg0))
11430 return omit_one_operand (type, arg0, arg1);
11434 /* Optimize -1 >> x for arithmetic right shifts. */
11435 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11436 return omit_one_operand (type, arg0, arg1);
11437 /* ... fall through ... */
11441 if (integer_zerop (arg1))
11442 return non_lvalue (fold_convert (type, arg0));
11443 if (integer_zerop (arg0))
11444 return omit_one_operand (type, arg0, arg1);
11446 /* Since negative shift count is not well-defined,
11447 don't try to compute it in the compiler. */
11448 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11451 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11452 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11453 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11454 && host_integerp (TREE_OPERAND (arg0, 1), false)
11455 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11457 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11458 + TREE_INT_CST_LOW (arg1));
11460 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11461 being well defined. */
11462 if (low >= TYPE_PRECISION (type))
11464 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11465 low = low % TYPE_PRECISION (type);
11466 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11467 return build_int_cst (type, 0);
11469 low = TYPE_PRECISION (type) - 1;
11472 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11473 build_int_cst (type, low));
11476 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11477 into x & ((unsigned)-1 >> c) for unsigned types. */
11478 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11479 || (TYPE_UNSIGNED (type)
11480 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11481 && host_integerp (arg1, false)
11482 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11483 && host_integerp (TREE_OPERAND (arg0, 1), false)
11484 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11486 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11487 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11493 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11495 lshift = build_int_cst (type, -1);
11496 lshift = int_const_binop (code, lshift, arg1, 0);
11498 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11502 /* Rewrite an LROTATE_EXPR by a constant into an
11503 RROTATE_EXPR by a new constant. */
11504 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11506 tree tem = build_int_cst (TREE_TYPE (arg1),
11507 TYPE_PRECISION (type));
11508 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11509 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11512 /* If we have a rotate of a bit operation with the rotate count and
11513 the second operand of the bit operation both constant,
11514 permute the two operations. */
11515 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11516 && (TREE_CODE (arg0) == BIT_AND_EXPR
11517 || TREE_CODE (arg0) == BIT_IOR_EXPR
11518 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11520 return fold_build2 (TREE_CODE (arg0), type,
11521 fold_build2 (code, type,
11522 TREE_OPERAND (arg0, 0), arg1),
11523 fold_build2 (code, type,
11524 TREE_OPERAND (arg0, 1), arg1));
11526 /* Two consecutive rotates adding up to the precision of the
11527 type can be ignored. */
11528 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11529 && TREE_CODE (arg0) == RROTATE_EXPR
11530 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11531 && TREE_INT_CST_HIGH (arg1) == 0
11532 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11533 && ((TREE_INT_CST_LOW (arg1)
11534 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11535 == (unsigned int) TYPE_PRECISION (type)))
11536 return TREE_OPERAND (arg0, 0);
11538 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11539 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11540 if the latter can be further optimized. */
11541 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11542 && TREE_CODE (arg0) == BIT_AND_EXPR
11543 && TREE_CODE (arg1) == INTEGER_CST
11544 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11546 tree mask = fold_build2 (code, type,
11547 fold_convert (type, TREE_OPERAND (arg0, 1)),
11549 tree shift = fold_build2 (code, type,
11550 fold_convert (type, TREE_OPERAND (arg0, 0)),
11552 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11560 if (operand_equal_p (arg0, arg1, 0))
11561 return omit_one_operand (type, arg0, arg1);
11562 if (INTEGRAL_TYPE_P (type)
11563 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11564 return omit_one_operand (type, arg1, arg0);
11565 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11571 if (operand_equal_p (arg0, arg1, 0))
11572 return omit_one_operand (type, arg0, arg1);
11573 if (INTEGRAL_TYPE_P (type)
11574 && TYPE_MAX_VALUE (type)
11575 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11576 return omit_one_operand (type, arg1, arg0);
11577 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11582 case TRUTH_ANDIF_EXPR:
11583 /* Note that the operands of this must be ints
11584 and their values must be 0 or 1.
11585 ("true" is a fixed value perhaps depending on the language.) */
11586 /* If first arg is constant zero, return it. */
11587 if (integer_zerop (arg0))
11588 return fold_convert (type, arg0);
11589 case TRUTH_AND_EXPR:
11590 /* If either arg is constant true, drop it. */
11591 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11592 return non_lvalue (fold_convert (type, arg1));
11593 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11594 /* Preserve sequence points. */
11595 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11596 return non_lvalue (fold_convert (type, arg0));
11597 /* If second arg is constant zero, result is zero, but first arg
11598 must be evaluated. */
11599 if (integer_zerop (arg1))
11600 return omit_one_operand (type, arg1, arg0);
11601 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11602 case will be handled here. */
11603 if (integer_zerop (arg0))
11604 return omit_one_operand (type, arg0, arg1);
11606 /* !X && X is always false. */
11607 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11608 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11609 return omit_one_operand (type, integer_zero_node, arg1);
11610 /* X && !X is always false. */
11611 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11612 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11613 return omit_one_operand (type, integer_zero_node, arg0);
11615 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11616 means A >= Y && A != MAX, but in this case we know that
11619 if (!TREE_SIDE_EFFECTS (arg0)
11620 && !TREE_SIDE_EFFECTS (arg1))
11622 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11623 if (tem && !operand_equal_p (tem, arg0, 0))
11624 return fold_build2 (code, type, tem, arg1);
11626 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11627 if (tem && !operand_equal_p (tem, arg1, 0))
11628 return fold_build2 (code, type, arg0, tem);
11632 /* We only do these simplifications if we are optimizing. */
11636 /* Check for things like (A || B) && (A || C). We can convert this
11637 to A || (B && C). Note that either operator can be any of the four
11638 truth and/or operations and the transformation will still be
11639 valid. Also note that we only care about order for the
11640 ANDIF and ORIF operators. If B contains side effects, this
11641 might change the truth-value of A. */
11642 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11643 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11644 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11645 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11646 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11647 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11649 tree a00 = TREE_OPERAND (arg0, 0);
11650 tree a01 = TREE_OPERAND (arg0, 1);
11651 tree a10 = TREE_OPERAND (arg1, 0);
11652 tree a11 = TREE_OPERAND (arg1, 1);
11653 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11654 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11655 && (code == TRUTH_AND_EXPR
11656 || code == TRUTH_OR_EXPR));
11658 if (operand_equal_p (a00, a10, 0))
11659 return fold_build2 (TREE_CODE (arg0), type, a00,
11660 fold_build2 (code, type, a01, a11));
11661 else if (commutative && operand_equal_p (a00, a11, 0))
11662 return fold_build2 (TREE_CODE (arg0), type, a00,
11663 fold_build2 (code, type, a01, a10));
11664 else if (commutative && operand_equal_p (a01, a10, 0))
11665 return fold_build2 (TREE_CODE (arg0), type, a01,
11666 fold_build2 (code, type, a00, a11));
11668 /* This case if tricky because we must either have commutative
11669 operators or else A10 must not have side-effects. */
11671 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11672 && operand_equal_p (a01, a11, 0))
11673 return fold_build2 (TREE_CODE (arg0), type,
11674 fold_build2 (code, type, a00, a10),
11678 /* See if we can build a range comparison. */
11679 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11682 /* Check for the possibility of merging component references. If our
11683 lhs is another similar operation, try to merge its rhs with our
11684 rhs. Then try to merge our lhs and rhs. */
11685 if (TREE_CODE (arg0) == code
11686 && 0 != (tem = fold_truthop (code, type,
11687 TREE_OPERAND (arg0, 1), arg1)))
11688 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11690 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11695 case TRUTH_ORIF_EXPR:
11696 /* Note that the operands of this must be ints
11697 and their values must be 0 or true.
11698 ("true" is a fixed value perhaps depending on the language.) */
11699 /* If first arg is constant true, return it. */
11700 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11701 return fold_convert (type, arg0);
11702 case TRUTH_OR_EXPR:
11703 /* If either arg is constant zero, drop it. */
11704 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11705 return non_lvalue (fold_convert (type, arg1));
11706 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11707 /* Preserve sequence points. */
11708 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11709 return non_lvalue (fold_convert (type, arg0));
11710 /* If second arg is constant true, result is true, but we must
11711 evaluate first arg. */
11712 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11713 return omit_one_operand (type, arg1, arg0);
11714 /* Likewise for first arg, but note this only occurs here for
11716 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11717 return omit_one_operand (type, arg0, arg1);
11719 /* !X || X is always true. */
11720 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11721 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11722 return omit_one_operand (type, integer_one_node, arg1);
11723 /* X || !X is always true. */
11724 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11725 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11726 return omit_one_operand (type, integer_one_node, arg0);
11730 case TRUTH_XOR_EXPR:
11731 /* If the second arg is constant zero, drop it. */
11732 if (integer_zerop (arg1))
11733 return non_lvalue (fold_convert (type, arg0));
11734 /* If the second arg is constant true, this is a logical inversion. */
11735 if (integer_onep (arg1))
11737 /* Only call invert_truthvalue if operand is a truth value. */
11738 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11739 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11741 tem = invert_truthvalue (arg0);
11742 return non_lvalue (fold_convert (type, tem));
11744 /* Identical arguments cancel to zero. */
11745 if (operand_equal_p (arg0, arg1, 0))
11746 return omit_one_operand (type, integer_zero_node, arg0);
11748 /* !X ^ X is always true. */
11749 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11750 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11751 return omit_one_operand (type, integer_one_node, arg1);
11753 /* X ^ !X is always true. */
11754 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11756 return omit_one_operand (type, integer_one_node, arg0);
11762 tem = fold_comparison (code, type, op0, op1);
11763 if (tem != NULL_TREE)
11766 /* bool_var != 0 becomes bool_var. */
11767 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11768 && code == NE_EXPR)
11769 return non_lvalue (fold_convert (type, arg0));
11771 /* bool_var == 1 becomes bool_var. */
11772 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11773 && code == EQ_EXPR)
11774 return non_lvalue (fold_convert (type, arg0));
11776 /* bool_var != 1 becomes !bool_var. */
11777 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11778 && code == NE_EXPR)
11779 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11781 /* bool_var == 0 becomes !bool_var. */
11782 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11783 && code == EQ_EXPR)
11784 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11786 /* If this is an equality comparison of the address of two non-weak,
11787 unaliased symbols neither of which are extern (since we do not
11788 have access to attributes for externs), then we know the result. */
11789 if (TREE_CODE (arg0) == ADDR_EXPR
11790 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11791 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11792 && ! lookup_attribute ("alias",
11793 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11794 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11795 && TREE_CODE (arg1) == ADDR_EXPR
11796 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11797 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11798 && ! lookup_attribute ("alias",
11799 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11800 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11802 /* We know that we're looking at the address of two
11803 non-weak, unaliased, static _DECL nodes.
11805 It is both wasteful and incorrect to call operand_equal_p
11806 to compare the two ADDR_EXPR nodes. It is wasteful in that
11807 all we need to do is test pointer equality for the arguments
11808 to the two ADDR_EXPR nodes. It is incorrect to use
11809 operand_equal_p as that function is NOT equivalent to a
11810 C equality test. It can in fact return false for two
11811 objects which would test as equal using the C equality
11813 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11814 return constant_boolean_node (equal
11815 ? code == EQ_EXPR : code != EQ_EXPR,
11819 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11820 a MINUS_EXPR of a constant, we can convert it into a comparison with
11821 a revised constant as long as no overflow occurs. */
11822 if (TREE_CODE (arg1) == INTEGER_CST
11823 && (TREE_CODE (arg0) == PLUS_EXPR
11824 || TREE_CODE (arg0) == MINUS_EXPR)
11825 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11826 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11827 ? MINUS_EXPR : PLUS_EXPR,
11828 fold_convert (TREE_TYPE (arg0), arg1),
11829 TREE_OPERAND (arg0, 1), 0))
11830 && !TREE_OVERFLOW (tem))
11831 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11833 /* Similarly for a NEGATE_EXPR. */
11834 if (TREE_CODE (arg0) == NEGATE_EXPR
11835 && TREE_CODE (arg1) == INTEGER_CST
11836 && 0 != (tem = negate_expr (arg1))
11837 && TREE_CODE (tem) == INTEGER_CST
11838 && !TREE_OVERFLOW (tem))
11839 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11841 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
11842 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11843 && TREE_CODE (arg1) == INTEGER_CST
11844 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11845 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11846 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
11847 fold_convert (TREE_TYPE (arg0), arg1),
11848 TREE_OPERAND (arg0, 1)));
11850 /* Transform comparisons of the form X +- C CMP X. */
11851 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11852 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11853 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11854 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11855 || POINTER_TYPE_P (TREE_TYPE (arg0))))
11857 tree cst = TREE_OPERAND (arg0, 1);
11859 if (code == EQ_EXPR
11860 && !integer_zerop (cst))
11861 return omit_two_operands (type, boolean_false_node,
11862 TREE_OPERAND (arg0, 0), arg1);
11864 return omit_two_operands (type, boolean_true_node,
11865 TREE_OPERAND (arg0, 0), arg1);
11868 /* If we have X - Y == 0, we can convert that to X == Y and similarly
11869 for !=. Don't do this for ordered comparisons due to overflow. */
11870 if (TREE_CODE (arg0) == MINUS_EXPR
11871 && integer_zerop (arg1))
11872 return fold_build2 (code, type,
11873 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
11875 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
11876 if (TREE_CODE (arg0) == ABS_EXPR
11877 && (integer_zerop (arg1) || real_zerop (arg1)))
11878 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
11880 /* If this is an EQ or NE comparison with zero and ARG0 is
11881 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
11882 two operations, but the latter can be done in one less insn
11883 on machines that have only two-operand insns or on which a
11884 constant cannot be the first operand. */
11885 if (TREE_CODE (arg0) == BIT_AND_EXPR
11886 && integer_zerop (arg1))
11888 tree arg00 = TREE_OPERAND (arg0, 0);
11889 tree arg01 = TREE_OPERAND (arg0, 1);
11890 if (TREE_CODE (arg00) == LSHIFT_EXPR
11891 && integer_onep (TREE_OPERAND (arg00, 0)))
11893 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
11894 arg01, TREE_OPERAND (arg00, 1));
11895 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11896 build_int_cst (TREE_TYPE (arg0), 1));
11897 return fold_build2 (code, type,
11898 fold_convert (TREE_TYPE (arg1), tem), arg1);
11900 else if (TREE_CODE (arg01) == LSHIFT_EXPR
11901 && integer_onep (TREE_OPERAND (arg01, 0)))
11903 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
11904 arg00, TREE_OPERAND (arg01, 1));
11905 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
11906 build_int_cst (TREE_TYPE (arg0), 1));
11907 return fold_build2 (code, type,
11908 fold_convert (TREE_TYPE (arg1), tem), arg1);
11912 /* If this is an NE or EQ comparison of zero against the result of a
11913 signed MOD operation whose second operand is a power of 2, make
11914 the MOD operation unsigned since it is simpler and equivalent. */
11915 if (integer_zerop (arg1)
11916 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
11917 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
11918 || TREE_CODE (arg0) == CEIL_MOD_EXPR
11919 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
11920 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
11921 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11923 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
11924 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
11925 fold_convert (newtype,
11926 TREE_OPERAND (arg0, 0)),
11927 fold_convert (newtype,
11928 TREE_OPERAND (arg0, 1)));
11930 return fold_build2 (code, type, newmod,
11931 fold_convert (newtype, arg1));
11934 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
11935 C1 is a valid shift constant, and C2 is a power of two, i.e.
11937 if (TREE_CODE (arg0) == BIT_AND_EXPR
11938 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
11939 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11941 && integer_pow2p (TREE_OPERAND (arg0, 1))
11942 && integer_zerop (arg1))
11944 tree itype = TREE_TYPE (arg0);
11945 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
11946 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11948 /* Check for a valid shift count. */
11949 if (TREE_INT_CST_HIGH (arg001) == 0
11950 && TREE_INT_CST_LOW (arg001) < prec)
11952 tree arg01 = TREE_OPERAND (arg0, 1);
11953 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11954 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
11955 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
11956 can be rewritten as (X & (C2 << C1)) != 0. */
11957 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
11959 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
11960 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
11961 return fold_build2 (code, type, tem, arg1);
11963 /* Otherwise, for signed (arithmetic) shifts,
11964 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
11965 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
11966 else if (!TYPE_UNSIGNED (itype))
11967 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
11968 arg000, build_int_cst (itype, 0));
11969 /* Otherwise, of unsigned (logical) shifts,
11970 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
11971 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
11973 return omit_one_operand (type,
11974 code == EQ_EXPR ? integer_one_node
11975 : integer_zero_node,
11980 /* If this is an NE comparison of zero with an AND of one, remove the
11981 comparison since the AND will give the correct value. */
11982 if (code == NE_EXPR
11983 && integer_zerop (arg1)
11984 && TREE_CODE (arg0) == BIT_AND_EXPR
11985 && integer_onep (TREE_OPERAND (arg0, 1)))
11986 return fold_convert (type, arg0);
11988 /* If we have (A & C) == C where C is a power of 2, convert this into
11989 (A & C) != 0. Similarly for NE_EXPR. */
11990 if (TREE_CODE (arg0) == BIT_AND_EXPR
11991 && integer_pow2p (TREE_OPERAND (arg0, 1))
11992 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11993 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11994 arg0, fold_convert (TREE_TYPE (arg0),
11995 integer_zero_node));
11997 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
11998 bit, then fold the expression into A < 0 or A >= 0. */
11999 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12003 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12004 Similarly for NE_EXPR. */
12005 if (TREE_CODE (arg0) == BIT_AND_EXPR
12006 && TREE_CODE (arg1) == INTEGER_CST
12007 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12009 tree notc = fold_build1 (BIT_NOT_EXPR,
12010 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12011 TREE_OPERAND (arg0, 1));
12012 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12014 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12015 if (integer_nonzerop (dandnotc))
12016 return omit_one_operand (type, rslt, arg0);
12019 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12020 Similarly for NE_EXPR. */
12021 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12022 && TREE_CODE (arg1) == INTEGER_CST
12023 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12025 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12026 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12027 TREE_OPERAND (arg0, 1), notd);
12028 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12029 if (integer_nonzerop (candnotd))
12030 return omit_one_operand (type, rslt, arg0);
12033 /* Optimize comparisons of strlen vs zero to a compare of the
12034 first character of the string vs zero. To wit,
12035 strlen(ptr) == 0 => *ptr == 0
12036 strlen(ptr) != 0 => *ptr != 0
12037 Other cases should reduce to one of these two (or a constant)
12038 due to the return value of strlen being unsigned. */
12039 if (TREE_CODE (arg0) == CALL_EXPR
12040 && integer_zerop (arg1))
12042 tree fndecl = get_callee_fndecl (arg0);
12045 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12046 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12047 && call_expr_nargs (arg0) == 1
12048 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12050 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12051 return fold_build2 (code, type, iref,
12052 build_int_cst (TREE_TYPE (iref), 0));
12056 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12057 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12058 if (TREE_CODE (arg0) == RSHIFT_EXPR
12059 && integer_zerop (arg1)
12060 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12062 tree arg00 = TREE_OPERAND (arg0, 0);
12063 tree arg01 = TREE_OPERAND (arg0, 1);
12064 tree itype = TREE_TYPE (arg00);
12065 if (TREE_INT_CST_HIGH (arg01) == 0
12066 && TREE_INT_CST_LOW (arg01)
12067 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12069 if (TYPE_UNSIGNED (itype))
12071 itype = signed_type_for (itype);
12072 arg00 = fold_convert (itype, arg00);
12074 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12075 type, arg00, build_int_cst (itype, 0));
12079 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12080 if (integer_zerop (arg1)
12081 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12082 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12083 TREE_OPERAND (arg0, 1));
12085 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12086 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12087 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12088 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12089 build_int_cst (TREE_TYPE (arg1), 0));
12090 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12091 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12092 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12093 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12094 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12095 build_int_cst (TREE_TYPE (arg1), 0));
12097 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12098 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12099 && TREE_CODE (arg1) == INTEGER_CST
12100 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12101 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12102 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12103 TREE_OPERAND (arg0, 1), arg1));
12105 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12106 (X & C) == 0 when C is a single bit. */
12107 if (TREE_CODE (arg0) == BIT_AND_EXPR
12108 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12109 && integer_zerop (arg1)
12110 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12112 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12113 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12114 TREE_OPERAND (arg0, 1));
12115 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12119 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12120 constant C is a power of two, i.e. a single bit. */
12121 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12122 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12123 && integer_zerop (arg1)
12124 && integer_pow2p (TREE_OPERAND (arg0, 1))
12125 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12126 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12128 tree arg00 = TREE_OPERAND (arg0, 0);
12129 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12130 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12133 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12134 when is C is a power of two, i.e. a single bit. */
12135 if (TREE_CODE (arg0) == BIT_AND_EXPR
12136 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12137 && integer_zerop (arg1)
12138 && integer_pow2p (TREE_OPERAND (arg0, 1))
12139 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12140 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12142 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12143 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12144 arg000, TREE_OPERAND (arg0, 1));
12145 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12146 tem, build_int_cst (TREE_TYPE (tem), 0));
12149 if (integer_zerop (arg1)
12150 && tree_expr_nonzero_p (arg0))
12152 tree res = constant_boolean_node (code==NE_EXPR, type);
12153 return omit_one_operand (type, res, arg0);
12156 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12157 if (TREE_CODE (arg0) == NEGATE_EXPR
12158 && TREE_CODE (arg1) == NEGATE_EXPR)
12159 return fold_build2 (code, type,
12160 TREE_OPERAND (arg0, 0),
12161 TREE_OPERAND (arg1, 0));
12163 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12164 if (TREE_CODE (arg0) == BIT_AND_EXPR
12165 && TREE_CODE (arg1) == BIT_AND_EXPR)
12167 tree arg00 = TREE_OPERAND (arg0, 0);
12168 tree arg01 = TREE_OPERAND (arg0, 1);
12169 tree arg10 = TREE_OPERAND (arg1, 0);
12170 tree arg11 = TREE_OPERAND (arg1, 1);
12171 tree itype = TREE_TYPE (arg0);
12173 if (operand_equal_p (arg01, arg11, 0))
12174 return fold_build2 (code, type,
12175 fold_build2 (BIT_AND_EXPR, itype,
12176 fold_build2 (BIT_XOR_EXPR, itype,
12179 build_int_cst (itype, 0));
12181 if (operand_equal_p (arg01, arg10, 0))
12182 return fold_build2 (code, type,
12183 fold_build2 (BIT_AND_EXPR, itype,
12184 fold_build2 (BIT_XOR_EXPR, itype,
12187 build_int_cst (itype, 0));
12189 if (operand_equal_p (arg00, arg11, 0))
12190 return fold_build2 (code, type,
12191 fold_build2 (BIT_AND_EXPR, itype,
12192 fold_build2 (BIT_XOR_EXPR, itype,
12195 build_int_cst (itype, 0));
12197 if (operand_equal_p (arg00, arg10, 0))
12198 return fold_build2 (code, type,
12199 fold_build2 (BIT_AND_EXPR, itype,
12200 fold_build2 (BIT_XOR_EXPR, itype,
12203 build_int_cst (itype, 0));
12206 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12207 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12209 tree arg00 = TREE_OPERAND (arg0, 0);
12210 tree arg01 = TREE_OPERAND (arg0, 1);
12211 tree arg10 = TREE_OPERAND (arg1, 0);
12212 tree arg11 = TREE_OPERAND (arg1, 1);
12213 tree itype = TREE_TYPE (arg0);
12215 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12216 operand_equal_p guarantees no side-effects so we don't need
12217 to use omit_one_operand on Z. */
12218 if (operand_equal_p (arg01, arg11, 0))
12219 return fold_build2 (code, type, arg00, arg10);
12220 if (operand_equal_p (arg01, arg10, 0))
12221 return fold_build2 (code, type, arg00, arg11);
12222 if (operand_equal_p (arg00, arg11, 0))
12223 return fold_build2 (code, type, arg01, arg10);
12224 if (operand_equal_p (arg00, arg10, 0))
12225 return fold_build2 (code, type, arg01, arg11);
12227 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12228 if (TREE_CODE (arg01) == INTEGER_CST
12229 && TREE_CODE (arg11) == INTEGER_CST)
12230 return fold_build2 (code, type,
12231 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12232 fold_build2 (BIT_XOR_EXPR, itype,
12237 /* Attempt to simplify equality/inequality comparisons of complex
12238 values. Only lower the comparison if the result is known or
12239 can be simplified to a single scalar comparison. */
12240 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12241 || TREE_CODE (arg0) == COMPLEX_CST)
12242 && (TREE_CODE (arg1) == COMPLEX_EXPR
12243 || TREE_CODE (arg1) == COMPLEX_CST))
12245 tree real0, imag0, real1, imag1;
12248 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12250 real0 = TREE_OPERAND (arg0, 0);
12251 imag0 = TREE_OPERAND (arg0, 1);
12255 real0 = TREE_REALPART (arg0);
12256 imag0 = TREE_IMAGPART (arg0);
12259 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12261 real1 = TREE_OPERAND (arg1, 0);
12262 imag1 = TREE_OPERAND (arg1, 1);
12266 real1 = TREE_REALPART (arg1);
12267 imag1 = TREE_IMAGPART (arg1);
12270 rcond = fold_binary (code, type, real0, real1);
12271 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12273 if (integer_zerop (rcond))
12275 if (code == EQ_EXPR)
12276 return omit_two_operands (type, boolean_false_node,
12278 return fold_build2 (NE_EXPR, type, imag0, imag1);
12282 if (code == NE_EXPR)
12283 return omit_two_operands (type, boolean_true_node,
12285 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12289 icond = fold_binary (code, type, imag0, imag1);
12290 if (icond && TREE_CODE (icond) == INTEGER_CST)
12292 if (integer_zerop (icond))
12294 if (code == EQ_EXPR)
12295 return omit_two_operands (type, boolean_false_node,
12297 return fold_build2 (NE_EXPR, type, real0, real1);
12301 if (code == NE_EXPR)
12302 return omit_two_operands (type, boolean_true_node,
12304 return fold_build2 (EQ_EXPR, type, real0, real1);
12315 tem = fold_comparison (code, type, op0, op1);
12316 if (tem != NULL_TREE)
12319 /* Transform comparisons of the form X +- C CMP X. */
12320 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12321 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12322 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12323 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12324 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12325 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12327 tree arg01 = TREE_OPERAND (arg0, 1);
12328 enum tree_code code0 = TREE_CODE (arg0);
12331 if (TREE_CODE (arg01) == REAL_CST)
12332 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12334 is_positive = tree_int_cst_sgn (arg01);
12336 /* (X - c) > X becomes false. */
12337 if (code == GT_EXPR
12338 && ((code0 == MINUS_EXPR && is_positive >= 0)
12339 || (code0 == PLUS_EXPR && is_positive <= 0)))
12341 if (TREE_CODE (arg01) == INTEGER_CST
12342 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12343 fold_overflow_warning (("assuming signed overflow does not "
12344 "occur when assuming that (X - c) > X "
12345 "is always false"),
12346 WARN_STRICT_OVERFLOW_ALL);
12347 return constant_boolean_node (0, type);
12350 /* Likewise (X + c) < X becomes false. */
12351 if (code == LT_EXPR
12352 && ((code0 == PLUS_EXPR && is_positive >= 0)
12353 || (code0 == MINUS_EXPR && is_positive <= 0)))
12355 if (TREE_CODE (arg01) == INTEGER_CST
12356 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12357 fold_overflow_warning (("assuming signed overflow does not "
12358 "occur when assuming that "
12359 "(X + c) < X is always false"),
12360 WARN_STRICT_OVERFLOW_ALL);
12361 return constant_boolean_node (0, type);
12364 /* Convert (X - c) <= X to true. */
12365 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12367 && ((code0 == MINUS_EXPR && is_positive >= 0)
12368 || (code0 == PLUS_EXPR && is_positive <= 0)))
12370 if (TREE_CODE (arg01) == INTEGER_CST
12371 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12372 fold_overflow_warning (("assuming signed overflow does not "
12373 "occur when assuming that "
12374 "(X - c) <= X is always true"),
12375 WARN_STRICT_OVERFLOW_ALL);
12376 return constant_boolean_node (1, type);
12379 /* Convert (X + c) >= X to true. */
12380 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12382 && ((code0 == PLUS_EXPR && is_positive >= 0)
12383 || (code0 == MINUS_EXPR && is_positive <= 0)))
12385 if (TREE_CODE (arg01) == INTEGER_CST
12386 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12387 fold_overflow_warning (("assuming signed overflow does not "
12388 "occur when assuming that "
12389 "(X + c) >= X is always true"),
12390 WARN_STRICT_OVERFLOW_ALL);
12391 return constant_boolean_node (1, type);
12394 if (TREE_CODE (arg01) == INTEGER_CST)
12396 /* Convert X + c > X and X - c < X to true for integers. */
12397 if (code == GT_EXPR
12398 && ((code0 == PLUS_EXPR && is_positive > 0)
12399 || (code0 == MINUS_EXPR && is_positive < 0)))
12401 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12402 fold_overflow_warning (("assuming signed overflow does "
12403 "not occur when assuming that "
12404 "(X + c) > X is always true"),
12405 WARN_STRICT_OVERFLOW_ALL);
12406 return constant_boolean_node (1, type);
12409 if (code == LT_EXPR
12410 && ((code0 == MINUS_EXPR && is_positive > 0)
12411 || (code0 == PLUS_EXPR && is_positive < 0)))
12413 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12414 fold_overflow_warning (("assuming signed overflow does "
12415 "not occur when assuming that "
12416 "(X - c) < X is always true"),
12417 WARN_STRICT_OVERFLOW_ALL);
12418 return constant_boolean_node (1, type);
12421 /* Convert X + c <= X and X - c >= X to false for integers. */
12422 if (code == LE_EXPR
12423 && ((code0 == PLUS_EXPR && is_positive > 0)
12424 || (code0 == MINUS_EXPR && is_positive < 0)))
12426 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12427 fold_overflow_warning (("assuming signed overflow does "
12428 "not occur when assuming that "
12429 "(X + c) <= X is always false"),
12430 WARN_STRICT_OVERFLOW_ALL);
12431 return constant_boolean_node (0, type);
12434 if (code == GE_EXPR
12435 && ((code0 == MINUS_EXPR && is_positive > 0)
12436 || (code0 == PLUS_EXPR && is_positive < 0)))
12438 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12439 fold_overflow_warning (("assuming signed overflow does "
12440 "not occur when assuming that "
12441 "(X - c) >= X is always false"),
12442 WARN_STRICT_OVERFLOW_ALL);
12443 return constant_boolean_node (0, type);
12448 /* Comparisons with the highest or lowest possible integer of
12449 the specified precision will have known values. */
12451 tree arg1_type = TREE_TYPE (arg1);
12452 unsigned int width = TYPE_PRECISION (arg1_type);
12454 if (TREE_CODE (arg1) == INTEGER_CST
12455 && width <= 2 * HOST_BITS_PER_WIDE_INT
12456 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12458 HOST_WIDE_INT signed_max_hi;
12459 unsigned HOST_WIDE_INT signed_max_lo;
12460 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12462 if (width <= HOST_BITS_PER_WIDE_INT)
12464 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12469 if (TYPE_UNSIGNED (arg1_type))
12471 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12477 max_lo = signed_max_lo;
12478 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12484 width -= HOST_BITS_PER_WIDE_INT;
12485 signed_max_lo = -1;
12486 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12491 if (TYPE_UNSIGNED (arg1_type))
12493 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12498 max_hi = signed_max_hi;
12499 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12503 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12504 && TREE_INT_CST_LOW (arg1) == max_lo)
12508 return omit_one_operand (type, integer_zero_node, arg0);
12511 return fold_build2 (EQ_EXPR, type, op0, op1);
12514 return omit_one_operand (type, integer_one_node, arg0);
12517 return fold_build2 (NE_EXPR, type, op0, op1);
12519 /* The GE_EXPR and LT_EXPR cases above are not normally
12520 reached because of previous transformations. */
12525 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12527 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12531 arg1 = const_binop (PLUS_EXPR, arg1,
12532 build_int_cst (TREE_TYPE (arg1), 1), 0);
12533 return fold_build2 (EQ_EXPR, type,
12534 fold_convert (TREE_TYPE (arg1), arg0),
12537 arg1 = const_binop (PLUS_EXPR, arg1,
12538 build_int_cst (TREE_TYPE (arg1), 1), 0);
12539 return fold_build2 (NE_EXPR, type,
12540 fold_convert (TREE_TYPE (arg1), arg0),
12545 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12547 && TREE_INT_CST_LOW (arg1) == min_lo)
12551 return omit_one_operand (type, integer_zero_node, arg0);
12554 return fold_build2 (EQ_EXPR, type, op0, op1);
12557 return omit_one_operand (type, integer_one_node, arg0);
12560 return fold_build2 (NE_EXPR, type, op0, op1);
12565 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12567 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12571 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12572 return fold_build2 (NE_EXPR, type,
12573 fold_convert (TREE_TYPE (arg1), arg0),
12576 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12577 return fold_build2 (EQ_EXPR, type,
12578 fold_convert (TREE_TYPE (arg1), arg0),
12584 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12585 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12586 && TYPE_UNSIGNED (arg1_type)
12587 /* We will flip the signedness of the comparison operator
12588 associated with the mode of arg1, so the sign bit is
12589 specified by this mode. Check that arg1 is the signed
12590 max associated with this sign bit. */
12591 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12592 /* signed_type does not work on pointer types. */
12593 && INTEGRAL_TYPE_P (arg1_type))
12595 /* The following case also applies to X < signed_max+1
12596 and X >= signed_max+1 because previous transformations. */
12597 if (code == LE_EXPR || code == GT_EXPR)
12600 st = signed_type_for (TREE_TYPE (arg1));
12601 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12602 type, fold_convert (st, arg0),
12603 build_int_cst (st, 0));
12609 /* If we are comparing an ABS_EXPR with a constant, we can
12610 convert all the cases into explicit comparisons, but they may
12611 well not be faster than doing the ABS and one comparison.
12612 But ABS (X) <= C is a range comparison, which becomes a subtraction
12613 and a comparison, and is probably faster. */
12614 if (code == LE_EXPR
12615 && TREE_CODE (arg1) == INTEGER_CST
12616 && TREE_CODE (arg0) == ABS_EXPR
12617 && ! TREE_SIDE_EFFECTS (arg0)
12618 && (0 != (tem = negate_expr (arg1)))
12619 && TREE_CODE (tem) == INTEGER_CST
12620 && !TREE_OVERFLOW (tem))
12621 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12622 build2 (GE_EXPR, type,
12623 TREE_OPERAND (arg0, 0), tem),
12624 build2 (LE_EXPR, type,
12625 TREE_OPERAND (arg0, 0), arg1));
12627 /* Convert ABS_EXPR<x> >= 0 to true. */
12628 strict_overflow_p = false;
12629 if (code == GE_EXPR
12630 && (integer_zerop (arg1)
12631 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12632 && real_zerop (arg1)))
12633 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12635 if (strict_overflow_p)
12636 fold_overflow_warning (("assuming signed overflow does not occur "
12637 "when simplifying comparison of "
12638 "absolute value and zero"),
12639 WARN_STRICT_OVERFLOW_CONDITIONAL);
12640 return omit_one_operand (type, integer_one_node, arg0);
12643 /* Convert ABS_EXPR<x> < 0 to false. */
12644 strict_overflow_p = false;
12645 if (code == LT_EXPR
12646 && (integer_zerop (arg1) || real_zerop (arg1))
12647 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12649 if (strict_overflow_p)
12650 fold_overflow_warning (("assuming signed overflow does not occur "
12651 "when simplifying comparison of "
12652 "absolute value and zero"),
12653 WARN_STRICT_OVERFLOW_CONDITIONAL);
12654 return omit_one_operand (type, integer_zero_node, arg0);
12657 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12658 and similarly for >= into !=. */
12659 if ((code == LT_EXPR || code == GE_EXPR)
12660 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12661 && TREE_CODE (arg1) == LSHIFT_EXPR
12662 && integer_onep (TREE_OPERAND (arg1, 0)))
12663 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12664 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12665 TREE_OPERAND (arg1, 1)),
12666 build_int_cst (TREE_TYPE (arg0), 0));
12668 if ((code == LT_EXPR || code == GE_EXPR)
12669 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12670 && CONVERT_EXPR_P (arg1)
12671 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12672 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12674 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12675 fold_convert (TREE_TYPE (arg0),
12676 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12677 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12679 build_int_cst (TREE_TYPE (arg0), 0));
12683 case UNORDERED_EXPR:
12691 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12693 t1 = fold_relational_const (code, type, arg0, arg1);
12694 if (t1 != NULL_TREE)
12698 /* If the first operand is NaN, the result is constant. */
12699 if (TREE_CODE (arg0) == REAL_CST
12700 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12701 && (code != LTGT_EXPR || ! flag_trapping_math))
12703 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12704 ? integer_zero_node
12705 : integer_one_node;
12706 return omit_one_operand (type, t1, arg1);
12709 /* If the second operand is NaN, the result is constant. */
12710 if (TREE_CODE (arg1) == REAL_CST
12711 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12712 && (code != LTGT_EXPR || ! flag_trapping_math))
12714 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12715 ? integer_zero_node
12716 : integer_one_node;
12717 return omit_one_operand (type, t1, arg0);
12720 /* Simplify unordered comparison of something with itself. */
12721 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12722 && operand_equal_p (arg0, arg1, 0))
12723 return constant_boolean_node (1, type);
12725 if (code == LTGT_EXPR
12726 && !flag_trapping_math
12727 && operand_equal_p (arg0, arg1, 0))
12728 return constant_boolean_node (0, type);
12730 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12732 tree targ0 = strip_float_extensions (arg0);
12733 tree targ1 = strip_float_extensions (arg1);
12734 tree newtype = TREE_TYPE (targ0);
12736 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12737 newtype = TREE_TYPE (targ1);
12739 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12740 return fold_build2 (code, type, fold_convert (newtype, targ0),
12741 fold_convert (newtype, targ1));
12746 case COMPOUND_EXPR:
12747 /* When pedantic, a compound expression can be neither an lvalue
12748 nor an integer constant expression. */
12749 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12751 /* Don't let (0, 0) be null pointer constant. */
12752 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12753 : fold_convert (type, arg1);
12754 return pedantic_non_lvalue (tem);
12757 if ((TREE_CODE (arg0) == REAL_CST
12758 && TREE_CODE (arg1) == REAL_CST)
12759 || (TREE_CODE (arg0) == INTEGER_CST
12760 && TREE_CODE (arg1) == INTEGER_CST))
12761 return build_complex (type, arg0, arg1);
12765 /* An ASSERT_EXPR should never be passed to fold_binary. */
12766 gcc_unreachable ();
12770 } /* switch (code) */
12773 /* Callback for walk_tree, looking for LABEL_EXPR.
12774 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12775 Do not check the sub-tree of GOTO_EXPR. */
12778 contains_label_1 (tree *tp,
12779 int *walk_subtrees,
12780 void *data ATTRIBUTE_UNUSED)
12782 switch (TREE_CODE (*tp))
12787 *walk_subtrees = 0;
12794 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12795 accessible from outside the sub-tree. Returns NULL_TREE if no
12796 addressable label is found. */
12799 contains_label_p (tree st)
12801 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
12804 /* Fold a ternary expression of code CODE and type TYPE with operands
12805 OP0, OP1, and OP2. Return the folded expression if folding is
12806 successful. Otherwise, return NULL_TREE. */
12809 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
12812 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
12813 enum tree_code_class kind = TREE_CODE_CLASS (code);
12815 gcc_assert (IS_EXPR_CODE_CLASS (kind)
12816 && TREE_CODE_LENGTH (code) == 3);
12818 /* Strip any conversions that don't change the mode. This is safe
12819 for every expression, except for a comparison expression because
12820 its signedness is derived from its operands. So, in the latter
12821 case, only strip conversions that don't change the signedness.
12823 Note that this is done as an internal manipulation within the
12824 constant folder, in order to find the simplest representation of
12825 the arguments so that their form can be studied. In any cases,
12826 the appropriate type conversions should be put back in the tree
12827 that will get out of the constant folder. */
12842 case COMPONENT_REF:
12843 if (TREE_CODE (arg0) == CONSTRUCTOR
12844 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
12846 unsigned HOST_WIDE_INT idx;
12848 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
12855 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
12856 so all simple results must be passed through pedantic_non_lvalue. */
12857 if (TREE_CODE (arg0) == INTEGER_CST)
12859 tree unused_op = integer_zerop (arg0) ? op1 : op2;
12860 tem = integer_zerop (arg0) ? op2 : op1;
12861 /* Only optimize constant conditions when the selected branch
12862 has the same type as the COND_EXPR. This avoids optimizing
12863 away "c ? x : throw", where the throw has a void type.
12864 Avoid throwing away that operand which contains label. */
12865 if ((!TREE_SIDE_EFFECTS (unused_op)
12866 || !contains_label_p (unused_op))
12867 && (! VOID_TYPE_P (TREE_TYPE (tem))
12868 || VOID_TYPE_P (type)))
12869 return pedantic_non_lvalue (tem);
12872 if (operand_equal_p (arg1, op2, 0))
12873 return pedantic_omit_one_operand (type, arg1, arg0);
12875 /* If we have A op B ? A : C, we may be able to convert this to a
12876 simpler expression, depending on the operation and the values
12877 of B and C. Signed zeros prevent all of these transformations,
12878 for reasons given above each one.
12880 Also try swapping the arguments and inverting the conditional. */
12881 if (COMPARISON_CLASS_P (arg0)
12882 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12883 arg1, TREE_OPERAND (arg0, 1))
12884 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
12886 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
12891 if (COMPARISON_CLASS_P (arg0)
12892 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
12894 TREE_OPERAND (arg0, 1))
12895 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
12897 tem = fold_truth_not_expr (arg0);
12898 if (tem && COMPARISON_CLASS_P (tem))
12900 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
12906 /* If the second operand is simpler than the third, swap them
12907 since that produces better jump optimization results. */
12908 if (truth_value_p (TREE_CODE (arg0))
12909 && tree_swap_operands_p (op1, op2, false))
12911 /* See if this can be inverted. If it can't, possibly because
12912 it was a floating-point inequality comparison, don't do
12914 tem = fold_truth_not_expr (arg0);
12916 return fold_build3 (code, type, tem, op2, op1);
12919 /* Convert A ? 1 : 0 to simply A. */
12920 if (integer_onep (op1)
12921 && integer_zerop (op2)
12922 /* If we try to convert OP0 to our type, the
12923 call to fold will try to move the conversion inside
12924 a COND, which will recurse. In that case, the COND_EXPR
12925 is probably the best choice, so leave it alone. */
12926 && type == TREE_TYPE (arg0))
12927 return pedantic_non_lvalue (arg0);
12929 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
12930 over COND_EXPR in cases such as floating point comparisons. */
12931 if (integer_zerop (op1)
12932 && integer_onep (op2)
12933 && truth_value_p (TREE_CODE (arg0)))
12934 return pedantic_non_lvalue (fold_convert (type,
12935 invert_truthvalue (arg0)));
12937 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
12938 if (TREE_CODE (arg0) == LT_EXPR
12939 && integer_zerop (TREE_OPERAND (arg0, 1))
12940 && integer_zerop (op2)
12941 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
12943 /* sign_bit_p only checks ARG1 bits within A's precision.
12944 If <sign bit of A> has wider type than A, bits outside
12945 of A's precision in <sign bit of A> need to be checked.
12946 If they are all 0, this optimization needs to be done
12947 in unsigned A's type, if they are all 1 in signed A's type,
12948 otherwise this can't be done. */
12949 if (TYPE_PRECISION (TREE_TYPE (tem))
12950 < TYPE_PRECISION (TREE_TYPE (arg1))
12951 && TYPE_PRECISION (TREE_TYPE (tem))
12952 < TYPE_PRECISION (type))
12954 unsigned HOST_WIDE_INT mask_lo;
12955 HOST_WIDE_INT mask_hi;
12956 int inner_width, outer_width;
12959 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
12960 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
12961 if (outer_width > TYPE_PRECISION (type))
12962 outer_width = TYPE_PRECISION (type);
12964 if (outer_width > HOST_BITS_PER_WIDE_INT)
12966 mask_hi = ((unsigned HOST_WIDE_INT) -1
12967 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
12973 mask_lo = ((unsigned HOST_WIDE_INT) -1
12974 >> (HOST_BITS_PER_WIDE_INT - outer_width));
12976 if (inner_width > HOST_BITS_PER_WIDE_INT)
12978 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
12979 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12983 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
12984 >> (HOST_BITS_PER_WIDE_INT - inner_width));
12986 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
12987 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
12989 tem_type = signed_type_for (TREE_TYPE (tem));
12990 tem = fold_convert (tem_type, tem);
12992 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
12993 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
12995 tem_type = unsigned_type_for (TREE_TYPE (tem));
12996 tem = fold_convert (tem_type, tem);
13003 return fold_convert (type,
13004 fold_build2 (BIT_AND_EXPR,
13005 TREE_TYPE (tem), tem,
13006 fold_convert (TREE_TYPE (tem),
13010 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13011 already handled above. */
13012 if (TREE_CODE (arg0) == BIT_AND_EXPR
13013 && integer_onep (TREE_OPERAND (arg0, 1))
13014 && integer_zerop (op2)
13015 && integer_pow2p (arg1))
13017 tree tem = TREE_OPERAND (arg0, 0);
13019 if (TREE_CODE (tem) == RSHIFT_EXPR
13020 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13021 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13022 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13023 return fold_build2 (BIT_AND_EXPR, type,
13024 TREE_OPERAND (tem, 0), arg1);
13027 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13028 is probably obsolete because the first operand should be a
13029 truth value (that's why we have the two cases above), but let's
13030 leave it in until we can confirm this for all front-ends. */
13031 if (integer_zerop (op2)
13032 && TREE_CODE (arg0) == NE_EXPR
13033 && integer_zerop (TREE_OPERAND (arg0, 1))
13034 && integer_pow2p (arg1)
13035 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13036 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13037 arg1, OEP_ONLY_CONST))
13038 return pedantic_non_lvalue (fold_convert (type,
13039 TREE_OPERAND (arg0, 0)));
13041 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13042 if (integer_zerop (op2)
13043 && truth_value_p (TREE_CODE (arg0))
13044 && truth_value_p (TREE_CODE (arg1)))
13045 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13046 fold_convert (type, arg0),
13049 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13050 if (integer_onep (op2)
13051 && truth_value_p (TREE_CODE (arg0))
13052 && truth_value_p (TREE_CODE (arg1)))
13054 /* Only perform transformation if ARG0 is easily inverted. */
13055 tem = fold_truth_not_expr (arg0);
13057 return fold_build2 (TRUTH_ORIF_EXPR, type,
13058 fold_convert (type, tem),
13062 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13063 if (integer_zerop (arg1)
13064 && truth_value_p (TREE_CODE (arg0))
13065 && truth_value_p (TREE_CODE (op2)))
13067 /* Only perform transformation if ARG0 is easily inverted. */
13068 tem = fold_truth_not_expr (arg0);
13070 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13071 fold_convert (type, tem),
13075 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13076 if (integer_onep (arg1)
13077 && truth_value_p (TREE_CODE (arg0))
13078 && truth_value_p (TREE_CODE (op2)))
13079 return fold_build2 (TRUTH_ORIF_EXPR, type,
13080 fold_convert (type, arg0),
13086 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13087 of fold_ternary on them. */
13088 gcc_unreachable ();
13090 case BIT_FIELD_REF:
13091 if ((TREE_CODE (arg0) == VECTOR_CST
13092 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13093 && type == TREE_TYPE (TREE_TYPE (arg0)))
13095 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13096 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13099 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13100 && (idx % width) == 0
13101 && (idx = idx / width)
13102 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13104 tree elements = NULL_TREE;
13106 if (TREE_CODE (arg0) == VECTOR_CST)
13107 elements = TREE_VECTOR_CST_ELTS (arg0);
13110 unsigned HOST_WIDE_INT idx;
13113 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13114 elements = tree_cons (NULL_TREE, value, elements);
13116 while (idx-- > 0 && elements)
13117 elements = TREE_CHAIN (elements);
13119 return TREE_VALUE (elements);
13121 return fold_convert (type, integer_zero_node);
13125 /* A bit-field-ref that referenced the full argument can be stripped. */
13126 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13127 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13128 && integer_zerop (op2))
13129 return fold_convert (type, arg0);
13135 } /* switch (code) */
13138 /* Perform constant folding and related simplification of EXPR.
13139 The related simplifications include x*1 => x, x*0 => 0, etc.,
13140 and application of the associative law.
13141 NOP_EXPR conversions may be removed freely (as long as we
13142 are careful not to change the type of the overall expression).
13143 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13144 but we can constant-fold them if they have constant operands. */
13146 #ifdef ENABLE_FOLD_CHECKING
13147 # define fold(x) fold_1 (x)
13148 static tree fold_1 (tree);
13154 const tree t = expr;
13155 enum tree_code code = TREE_CODE (t);
13156 enum tree_code_class kind = TREE_CODE_CLASS (code);
13159 /* Return right away if a constant. */
13160 if (kind == tcc_constant)
13163 /* CALL_EXPR-like objects with variable numbers of operands are
13164 treated specially. */
13165 if (kind == tcc_vl_exp)
13167 if (code == CALL_EXPR)
13169 tem = fold_call_expr (expr, false);
13170 return tem ? tem : expr;
13175 if (IS_EXPR_CODE_CLASS (kind))
13177 tree type = TREE_TYPE (t);
13178 tree op0, op1, op2;
13180 switch (TREE_CODE_LENGTH (code))
13183 op0 = TREE_OPERAND (t, 0);
13184 tem = fold_unary (code, type, op0);
13185 return tem ? tem : expr;
13187 op0 = TREE_OPERAND (t, 0);
13188 op1 = TREE_OPERAND (t, 1);
13189 tem = fold_binary (code, type, op0, op1);
13190 return tem ? tem : expr;
13192 op0 = TREE_OPERAND (t, 0);
13193 op1 = TREE_OPERAND (t, 1);
13194 op2 = TREE_OPERAND (t, 2);
13195 tem = fold_ternary (code, type, op0, op1, op2);
13196 return tem ? tem : expr;
13206 tree op0 = TREE_OPERAND (t, 0);
13207 tree op1 = TREE_OPERAND (t, 1);
13209 if (TREE_CODE (op1) == INTEGER_CST
13210 && TREE_CODE (op0) == CONSTRUCTOR
13211 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13213 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13214 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13215 unsigned HOST_WIDE_INT begin = 0;
13217 /* Find a matching index by means of a binary search. */
13218 while (begin != end)
13220 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13221 tree index = VEC_index (constructor_elt, elts, middle)->index;
13223 if (TREE_CODE (index) == INTEGER_CST
13224 && tree_int_cst_lt (index, op1))
13225 begin = middle + 1;
13226 else if (TREE_CODE (index) == INTEGER_CST
13227 && tree_int_cst_lt (op1, index))
13229 else if (TREE_CODE (index) == RANGE_EXPR
13230 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13231 begin = middle + 1;
13232 else if (TREE_CODE (index) == RANGE_EXPR
13233 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13236 return VEC_index (constructor_elt, elts, middle)->value;
13244 return fold (DECL_INITIAL (t));
13248 } /* switch (code) */
13251 #ifdef ENABLE_FOLD_CHECKING
13254 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13255 static void fold_check_failed (const_tree, const_tree);
13256 void print_fold_checksum (const_tree);
13258 /* When --enable-checking=fold, compute a digest of expr before
13259 and after actual fold call to see if fold did not accidentally
13260 change original expr. */
13266 struct md5_ctx ctx;
13267 unsigned char checksum_before[16], checksum_after[16];
13270 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13271 md5_init_ctx (&ctx);
13272 fold_checksum_tree (expr, &ctx, ht);
13273 md5_finish_ctx (&ctx, checksum_before);
13276 ret = fold_1 (expr);
13278 md5_init_ctx (&ctx);
13279 fold_checksum_tree (expr, &ctx, ht);
13280 md5_finish_ctx (&ctx, checksum_after);
13283 if (memcmp (checksum_before, checksum_after, 16))
13284 fold_check_failed (expr, ret);
13290 print_fold_checksum (const_tree expr)
13292 struct md5_ctx ctx;
13293 unsigned char checksum[16], cnt;
13296 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13297 md5_init_ctx (&ctx);
13298 fold_checksum_tree (expr, &ctx, ht);
13299 md5_finish_ctx (&ctx, checksum);
13301 for (cnt = 0; cnt < 16; ++cnt)
13302 fprintf (stderr, "%02x", checksum[cnt]);
13303 putc ('\n', stderr);
13307 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13309 internal_error ("fold check: original tree changed by fold");
13313 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13316 enum tree_code code;
13317 struct tree_function_decl buf;
13322 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13323 <= sizeof (struct tree_function_decl))
13324 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13327 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13331 code = TREE_CODE (expr);
13332 if (TREE_CODE_CLASS (code) == tcc_declaration
13333 && DECL_ASSEMBLER_NAME_SET_P (expr))
13335 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13336 memcpy ((char *) &buf, expr, tree_size (expr));
13337 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13338 expr = (tree) &buf;
13340 else if (TREE_CODE_CLASS (code) == tcc_type
13341 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13342 || TYPE_CACHED_VALUES_P (expr)
13343 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13345 /* Allow these fields to be modified. */
13347 memcpy ((char *) &buf, expr, tree_size (expr));
13348 expr = tmp = (tree) &buf;
13349 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13350 TYPE_POINTER_TO (tmp) = NULL;
13351 TYPE_REFERENCE_TO (tmp) = NULL;
13352 if (TYPE_CACHED_VALUES_P (tmp))
13354 TYPE_CACHED_VALUES_P (tmp) = 0;
13355 TYPE_CACHED_VALUES (tmp) = NULL;
13358 md5_process_bytes (expr, tree_size (expr), ctx);
13359 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13360 if (TREE_CODE_CLASS (code) != tcc_type
13361 && TREE_CODE_CLASS (code) != tcc_declaration
13362 && code != TREE_LIST
13363 && code != SSA_NAME)
13364 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13365 switch (TREE_CODE_CLASS (code))
13371 md5_process_bytes (TREE_STRING_POINTER (expr),
13372 TREE_STRING_LENGTH (expr), ctx);
13375 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13376 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13379 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13385 case tcc_exceptional:
13389 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13390 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13391 expr = TREE_CHAIN (expr);
13392 goto recursive_label;
13395 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13396 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13402 case tcc_expression:
13403 case tcc_reference:
13404 case tcc_comparison:
13407 case tcc_statement:
13409 len = TREE_OPERAND_LENGTH (expr);
13410 for (i = 0; i < len; ++i)
13411 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13413 case tcc_declaration:
13414 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13415 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13416 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13418 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13419 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13420 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13421 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13422 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13424 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13425 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13427 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13429 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13430 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13431 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13435 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13436 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13437 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13438 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13439 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13440 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13441 if (INTEGRAL_TYPE_P (expr)
13442 || SCALAR_FLOAT_TYPE_P (expr))
13444 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13445 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13447 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13448 if (TREE_CODE (expr) == RECORD_TYPE
13449 || TREE_CODE (expr) == UNION_TYPE
13450 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13451 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13452 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13459 /* Helper function for outputting the checksum of a tree T. When
13460 debugging with gdb, you can "define mynext" to be "next" followed
13461 by "call debug_fold_checksum (op0)", then just trace down till the
13465 debug_fold_checksum (const_tree t)
13468 unsigned char checksum[16];
13469 struct md5_ctx ctx;
13470 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13472 md5_init_ctx (&ctx);
13473 fold_checksum_tree (t, &ctx, ht);
13474 md5_finish_ctx (&ctx, checksum);
13477 for (i = 0; i < 16; i++)
13478 fprintf (stderr, "%d ", checksum[i]);
13480 fprintf (stderr, "\n");
13485 /* Fold a unary tree expression with code CODE of type TYPE with an
13486 operand OP0. Return a folded expression if successful. Otherwise,
13487 return a tree expression with code CODE of type TYPE with an
13491 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13494 #ifdef ENABLE_FOLD_CHECKING
13495 unsigned char checksum_before[16], checksum_after[16];
13496 struct md5_ctx ctx;
13499 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13500 md5_init_ctx (&ctx);
13501 fold_checksum_tree (op0, &ctx, ht);
13502 md5_finish_ctx (&ctx, checksum_before);
13506 tem = fold_unary (code, type, op0);
13508 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13510 #ifdef ENABLE_FOLD_CHECKING
13511 md5_init_ctx (&ctx);
13512 fold_checksum_tree (op0, &ctx, ht);
13513 md5_finish_ctx (&ctx, checksum_after);
13516 if (memcmp (checksum_before, checksum_after, 16))
13517 fold_check_failed (op0, tem);
13522 /* Fold a binary tree expression with code CODE of type TYPE with
13523 operands OP0 and OP1. Return a folded expression if successful.
13524 Otherwise, return a tree expression with code CODE of type TYPE
13525 with operands OP0 and OP1. */
13528 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13532 #ifdef ENABLE_FOLD_CHECKING
13533 unsigned char checksum_before_op0[16],
13534 checksum_before_op1[16],
13535 checksum_after_op0[16],
13536 checksum_after_op1[16];
13537 struct md5_ctx ctx;
13540 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13541 md5_init_ctx (&ctx);
13542 fold_checksum_tree (op0, &ctx, ht);
13543 md5_finish_ctx (&ctx, checksum_before_op0);
13546 md5_init_ctx (&ctx);
13547 fold_checksum_tree (op1, &ctx, ht);
13548 md5_finish_ctx (&ctx, checksum_before_op1);
13552 tem = fold_binary (code, type, op0, op1);
13554 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13556 #ifdef ENABLE_FOLD_CHECKING
13557 md5_init_ctx (&ctx);
13558 fold_checksum_tree (op0, &ctx, ht);
13559 md5_finish_ctx (&ctx, checksum_after_op0);
13562 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13563 fold_check_failed (op0, tem);
13565 md5_init_ctx (&ctx);
13566 fold_checksum_tree (op1, &ctx, ht);
13567 md5_finish_ctx (&ctx, checksum_after_op1);
13570 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13571 fold_check_failed (op1, tem);
13576 /* Fold a ternary tree expression with code CODE of type TYPE with
13577 operands OP0, OP1, and OP2. Return a folded expression if
13578 successful. Otherwise, return a tree expression with code CODE of
13579 type TYPE with operands OP0, OP1, and OP2. */
13582 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13586 #ifdef ENABLE_FOLD_CHECKING
13587 unsigned char checksum_before_op0[16],
13588 checksum_before_op1[16],
13589 checksum_before_op2[16],
13590 checksum_after_op0[16],
13591 checksum_after_op1[16],
13592 checksum_after_op2[16];
13593 struct md5_ctx ctx;
13596 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13597 md5_init_ctx (&ctx);
13598 fold_checksum_tree (op0, &ctx, ht);
13599 md5_finish_ctx (&ctx, checksum_before_op0);
13602 md5_init_ctx (&ctx);
13603 fold_checksum_tree (op1, &ctx, ht);
13604 md5_finish_ctx (&ctx, checksum_before_op1);
13607 md5_init_ctx (&ctx);
13608 fold_checksum_tree (op2, &ctx, ht);
13609 md5_finish_ctx (&ctx, checksum_before_op2);
13613 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13614 tem = fold_ternary (code, type, op0, op1, op2);
13616 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13618 #ifdef ENABLE_FOLD_CHECKING
13619 md5_init_ctx (&ctx);
13620 fold_checksum_tree (op0, &ctx, ht);
13621 md5_finish_ctx (&ctx, checksum_after_op0);
13624 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13625 fold_check_failed (op0, tem);
13627 md5_init_ctx (&ctx);
13628 fold_checksum_tree (op1, &ctx, ht);
13629 md5_finish_ctx (&ctx, checksum_after_op1);
13632 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13633 fold_check_failed (op1, tem);
13635 md5_init_ctx (&ctx);
13636 fold_checksum_tree (op2, &ctx, ht);
13637 md5_finish_ctx (&ctx, checksum_after_op2);
13640 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13641 fold_check_failed (op2, tem);
13646 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13647 arguments in ARGARRAY, and a null static chain.
13648 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13649 of type TYPE from the given operands as constructed by build_call_array. */
13652 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13655 #ifdef ENABLE_FOLD_CHECKING
13656 unsigned char checksum_before_fn[16],
13657 checksum_before_arglist[16],
13658 checksum_after_fn[16],
13659 checksum_after_arglist[16];
13660 struct md5_ctx ctx;
13664 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13665 md5_init_ctx (&ctx);
13666 fold_checksum_tree (fn, &ctx, ht);
13667 md5_finish_ctx (&ctx, checksum_before_fn);
13670 md5_init_ctx (&ctx);
13671 for (i = 0; i < nargs; i++)
13672 fold_checksum_tree (argarray[i], &ctx, ht);
13673 md5_finish_ctx (&ctx, checksum_before_arglist);
13677 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13679 #ifdef ENABLE_FOLD_CHECKING
13680 md5_init_ctx (&ctx);
13681 fold_checksum_tree (fn, &ctx, ht);
13682 md5_finish_ctx (&ctx, checksum_after_fn);
13685 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13686 fold_check_failed (fn, tem);
13688 md5_init_ctx (&ctx);
13689 for (i = 0; i < nargs; i++)
13690 fold_checksum_tree (argarray[i], &ctx, ht);
13691 md5_finish_ctx (&ctx, checksum_after_arglist);
13694 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13695 fold_check_failed (NULL_TREE, tem);
13700 /* Perform constant folding and related simplification of initializer
13701 expression EXPR. These behave identically to "fold_buildN" but ignore
13702 potential run-time traps and exceptions that fold must preserve. */
13704 #define START_FOLD_INIT \
13705 int saved_signaling_nans = flag_signaling_nans;\
13706 int saved_trapping_math = flag_trapping_math;\
13707 int saved_rounding_math = flag_rounding_math;\
13708 int saved_trapv = flag_trapv;\
13709 int saved_folding_initializer = folding_initializer;\
13710 flag_signaling_nans = 0;\
13711 flag_trapping_math = 0;\
13712 flag_rounding_math = 0;\
13714 folding_initializer = 1;
13716 #define END_FOLD_INIT \
13717 flag_signaling_nans = saved_signaling_nans;\
13718 flag_trapping_math = saved_trapping_math;\
13719 flag_rounding_math = saved_rounding_math;\
13720 flag_trapv = saved_trapv;\
13721 folding_initializer = saved_folding_initializer;
13724 fold_build1_initializer (enum tree_code code, tree type, tree op)
13729 result = fold_build1 (code, type, op);
13736 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13741 result = fold_build2 (code, type, op0, op1);
13748 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13754 result = fold_build3 (code, type, op0, op1, op2);
13761 fold_build_call_array_initializer (tree type, tree fn,
13762 int nargs, tree *argarray)
13767 result = fold_build_call_array (type, fn, nargs, argarray);
13773 #undef START_FOLD_INIT
13774 #undef END_FOLD_INIT
13776 /* Determine if first argument is a multiple of second argument. Return 0 if
13777 it is not, or we cannot easily determined it to be.
13779 An example of the sort of thing we care about (at this point; this routine
13780 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13781 fold cases do now) is discovering that
13783 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13789 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13791 This code also handles discovering that
13793 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13795 is a multiple of 8 so we don't have to worry about dealing with a
13796 possible remainder.
13798 Note that we *look* inside a SAVE_EXPR only to determine how it was
13799 calculated; it is not safe for fold to do much of anything else with the
13800 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13801 at run time. For example, the latter example above *cannot* be implemented
13802 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13803 evaluation time of the original SAVE_EXPR is not necessarily the same at
13804 the time the new expression is evaluated. The only optimization of this
13805 sort that would be valid is changing
13807 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13811 SAVE_EXPR (I) * SAVE_EXPR (J)
13813 (where the same SAVE_EXPR (J) is used in the original and the
13814 transformed version). */
13817 multiple_of_p (tree type, const_tree top, const_tree bottom)
13819 if (operand_equal_p (top, bottom, 0))
13822 if (TREE_CODE (type) != INTEGER_TYPE)
13825 switch (TREE_CODE (top))
13828 /* Bitwise and provides a power of two multiple. If the mask is
13829 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13830 if (!integer_pow2p (bottom))
13835 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13836 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13840 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13841 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13844 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
13848 op1 = TREE_OPERAND (top, 1);
13849 /* const_binop may not detect overflow correctly,
13850 so check for it explicitly here. */
13851 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
13852 > TREE_INT_CST_LOW (op1)
13853 && TREE_INT_CST_HIGH (op1) == 0
13854 && 0 != (t1 = fold_convert (type,
13855 const_binop (LSHIFT_EXPR,
13858 && !TREE_OVERFLOW (t1))
13859 return multiple_of_p (type, t1, bottom);
13864 /* Can't handle conversions from non-integral or wider integral type. */
13865 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
13866 || (TYPE_PRECISION (type)
13867 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
13870 /* .. fall through ... */
13873 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
13876 if (TREE_CODE (bottom) != INTEGER_CST
13877 || integer_zerop (bottom)
13878 || (TYPE_UNSIGNED (type)
13879 && (tree_int_cst_sgn (top) < 0
13880 || tree_int_cst_sgn (bottom) < 0)))
13882 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
13890 /* Return true if CODE or TYPE is known to be non-negative. */
13893 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
13895 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
13896 && truth_value_p (code))
13897 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
13898 have a signed:1 type (where the value is -1 and 0). */
13903 /* Return true if (CODE OP0) is known to be non-negative. If the return
13904 value is based on the assumption that signed overflow is undefined,
13905 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13906 *STRICT_OVERFLOW_P. */
13909 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13910 bool *strict_overflow_p)
13912 if (TYPE_UNSIGNED (type))
13918 /* We can't return 1 if flag_wrapv is set because
13919 ABS_EXPR<INT_MIN> = INT_MIN. */
13920 if (!INTEGRAL_TYPE_P (type))
13922 if (TYPE_OVERFLOW_UNDEFINED (type))
13924 *strict_overflow_p = true;
13929 case NON_LVALUE_EXPR:
13931 case FIX_TRUNC_EXPR:
13932 return tree_expr_nonnegative_warnv_p (op0,
13933 strict_overflow_p);
13937 tree inner_type = TREE_TYPE (op0);
13938 tree outer_type = type;
13940 if (TREE_CODE (outer_type) == REAL_TYPE)
13942 if (TREE_CODE (inner_type) == REAL_TYPE)
13943 return tree_expr_nonnegative_warnv_p (op0,
13944 strict_overflow_p);
13945 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13947 if (TYPE_UNSIGNED (inner_type))
13949 return tree_expr_nonnegative_warnv_p (op0,
13950 strict_overflow_p);
13953 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
13955 if (TREE_CODE (inner_type) == REAL_TYPE)
13956 return tree_expr_nonnegative_warnv_p (op0,
13957 strict_overflow_p);
13958 if (TREE_CODE (inner_type) == INTEGER_TYPE)
13959 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
13960 && TYPE_UNSIGNED (inner_type);
13966 return tree_simple_nonnegative_warnv_p (code, type);
13969 /* We don't know sign of `t', so be conservative and return false. */
13973 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
13974 value is based on the assumption that signed overflow is undefined,
13975 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13976 *STRICT_OVERFLOW_P. */
13979 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
13980 tree op1, bool *strict_overflow_p)
13982 if (TYPE_UNSIGNED (type))
13987 case POINTER_PLUS_EXPR:
13989 if (FLOAT_TYPE_P (type))
13990 return (tree_expr_nonnegative_warnv_p (op0,
13992 && tree_expr_nonnegative_warnv_p (op1,
13993 strict_overflow_p));
13995 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
13996 both unsigned and at least 2 bits shorter than the result. */
13997 if (TREE_CODE (type) == INTEGER_TYPE
13998 && TREE_CODE (op0) == NOP_EXPR
13999 && TREE_CODE (op1) == NOP_EXPR)
14001 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14002 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14003 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14004 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14006 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14007 TYPE_PRECISION (inner2)) + 1;
14008 return prec < TYPE_PRECISION (type);
14014 if (FLOAT_TYPE_P (type))
14016 /* x * x for floating point x is always non-negative. */
14017 if (operand_equal_p (op0, op1, 0))
14019 return (tree_expr_nonnegative_warnv_p (op0,
14021 && tree_expr_nonnegative_warnv_p (op1,
14022 strict_overflow_p));
14025 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14026 both unsigned and their total bits is shorter than the result. */
14027 if (TREE_CODE (type) == INTEGER_TYPE
14028 && TREE_CODE (op0) == NOP_EXPR
14029 && TREE_CODE (op1) == NOP_EXPR)
14031 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14032 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14033 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14034 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14035 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14036 < TYPE_PRECISION (type);
14042 return (tree_expr_nonnegative_warnv_p (op0,
14044 || tree_expr_nonnegative_warnv_p (op1,
14045 strict_overflow_p));
14051 case TRUNC_DIV_EXPR:
14052 case CEIL_DIV_EXPR:
14053 case FLOOR_DIV_EXPR:
14054 case ROUND_DIV_EXPR:
14055 return (tree_expr_nonnegative_warnv_p (op0,
14057 && tree_expr_nonnegative_warnv_p (op1,
14058 strict_overflow_p));
14060 case TRUNC_MOD_EXPR:
14061 case CEIL_MOD_EXPR:
14062 case FLOOR_MOD_EXPR:
14063 case ROUND_MOD_EXPR:
14064 return tree_expr_nonnegative_warnv_p (op0,
14065 strict_overflow_p);
14067 return tree_simple_nonnegative_warnv_p (code, type);
14070 /* We don't know sign of `t', so be conservative and return false. */
14074 /* Return true if T is known to be non-negative. If the return
14075 value is based on the assumption that signed overflow is undefined,
14076 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14077 *STRICT_OVERFLOW_P. */
14080 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14082 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14085 switch (TREE_CODE (t))
14088 return tree_int_cst_sgn (t) >= 0;
14091 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14094 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14097 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14099 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14100 strict_overflow_p));
14102 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14105 /* We don't know sign of `t', so be conservative and return false. */
14109 /* Return true if T is known to be non-negative. If the return
14110 value is based on the assumption that signed overflow is undefined,
14111 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14112 *STRICT_OVERFLOW_P. */
14115 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14116 tree arg0, tree arg1, bool *strict_overflow_p)
14118 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14119 switch (DECL_FUNCTION_CODE (fndecl))
14121 CASE_FLT_FN (BUILT_IN_ACOS):
14122 CASE_FLT_FN (BUILT_IN_ACOSH):
14123 CASE_FLT_FN (BUILT_IN_CABS):
14124 CASE_FLT_FN (BUILT_IN_COSH):
14125 CASE_FLT_FN (BUILT_IN_ERFC):
14126 CASE_FLT_FN (BUILT_IN_EXP):
14127 CASE_FLT_FN (BUILT_IN_EXP10):
14128 CASE_FLT_FN (BUILT_IN_EXP2):
14129 CASE_FLT_FN (BUILT_IN_FABS):
14130 CASE_FLT_FN (BUILT_IN_FDIM):
14131 CASE_FLT_FN (BUILT_IN_HYPOT):
14132 CASE_FLT_FN (BUILT_IN_POW10):
14133 CASE_INT_FN (BUILT_IN_FFS):
14134 CASE_INT_FN (BUILT_IN_PARITY):
14135 CASE_INT_FN (BUILT_IN_POPCOUNT):
14136 case BUILT_IN_BSWAP32:
14137 case BUILT_IN_BSWAP64:
14141 CASE_FLT_FN (BUILT_IN_SQRT):
14142 /* sqrt(-0.0) is -0.0. */
14143 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14145 return tree_expr_nonnegative_warnv_p (arg0,
14146 strict_overflow_p);
14148 CASE_FLT_FN (BUILT_IN_ASINH):
14149 CASE_FLT_FN (BUILT_IN_ATAN):
14150 CASE_FLT_FN (BUILT_IN_ATANH):
14151 CASE_FLT_FN (BUILT_IN_CBRT):
14152 CASE_FLT_FN (BUILT_IN_CEIL):
14153 CASE_FLT_FN (BUILT_IN_ERF):
14154 CASE_FLT_FN (BUILT_IN_EXPM1):
14155 CASE_FLT_FN (BUILT_IN_FLOOR):
14156 CASE_FLT_FN (BUILT_IN_FMOD):
14157 CASE_FLT_FN (BUILT_IN_FREXP):
14158 CASE_FLT_FN (BUILT_IN_LCEIL):
14159 CASE_FLT_FN (BUILT_IN_LDEXP):
14160 CASE_FLT_FN (BUILT_IN_LFLOOR):
14161 CASE_FLT_FN (BUILT_IN_LLCEIL):
14162 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14163 CASE_FLT_FN (BUILT_IN_LLRINT):
14164 CASE_FLT_FN (BUILT_IN_LLROUND):
14165 CASE_FLT_FN (BUILT_IN_LRINT):
14166 CASE_FLT_FN (BUILT_IN_LROUND):
14167 CASE_FLT_FN (BUILT_IN_MODF):
14168 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14169 CASE_FLT_FN (BUILT_IN_RINT):
14170 CASE_FLT_FN (BUILT_IN_ROUND):
14171 CASE_FLT_FN (BUILT_IN_SCALB):
14172 CASE_FLT_FN (BUILT_IN_SCALBLN):
14173 CASE_FLT_FN (BUILT_IN_SCALBN):
14174 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14175 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14176 CASE_FLT_FN (BUILT_IN_SINH):
14177 CASE_FLT_FN (BUILT_IN_TANH):
14178 CASE_FLT_FN (BUILT_IN_TRUNC):
14179 /* True if the 1st argument is nonnegative. */
14180 return tree_expr_nonnegative_warnv_p (arg0,
14181 strict_overflow_p);
14183 CASE_FLT_FN (BUILT_IN_FMAX):
14184 /* True if the 1st OR 2nd arguments are nonnegative. */
14185 return (tree_expr_nonnegative_warnv_p (arg0,
14187 || (tree_expr_nonnegative_warnv_p (arg1,
14188 strict_overflow_p)));
14190 CASE_FLT_FN (BUILT_IN_FMIN):
14191 /* True if the 1st AND 2nd arguments are nonnegative. */
14192 return (tree_expr_nonnegative_warnv_p (arg0,
14194 && (tree_expr_nonnegative_warnv_p (arg1,
14195 strict_overflow_p)));
14197 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14198 /* True if the 2nd argument is nonnegative. */
14199 return tree_expr_nonnegative_warnv_p (arg1,
14200 strict_overflow_p);
14202 CASE_FLT_FN (BUILT_IN_POWI):
14203 /* True if the 1st argument is nonnegative or the second
14204 argument is an even integer. */
14205 if (TREE_CODE (arg1) == INTEGER_CST
14206 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14208 return tree_expr_nonnegative_warnv_p (arg0,
14209 strict_overflow_p);
14211 CASE_FLT_FN (BUILT_IN_POW):
14212 /* True if the 1st argument is nonnegative or the second
14213 argument is an even integer valued real. */
14214 if (TREE_CODE (arg1) == REAL_CST)
14219 c = TREE_REAL_CST (arg1);
14220 n = real_to_integer (&c);
14223 REAL_VALUE_TYPE cint;
14224 real_from_integer (&cint, VOIDmode, n,
14225 n < 0 ? -1 : 0, 0);
14226 if (real_identical (&c, &cint))
14230 return tree_expr_nonnegative_warnv_p (arg0,
14231 strict_overflow_p);
14236 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14240 /* Return true if T is known to be non-negative. If the return
14241 value is based on the assumption that signed overflow is undefined,
14242 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14243 *STRICT_OVERFLOW_P. */
14246 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14248 enum tree_code code = TREE_CODE (t);
14249 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14256 tree temp = TARGET_EXPR_SLOT (t);
14257 t = TARGET_EXPR_INITIAL (t);
14259 /* If the initializer is non-void, then it's a normal expression
14260 that will be assigned to the slot. */
14261 if (!VOID_TYPE_P (t))
14262 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14264 /* Otherwise, the initializer sets the slot in some way. One common
14265 way is an assignment statement at the end of the initializer. */
14268 if (TREE_CODE (t) == BIND_EXPR)
14269 t = expr_last (BIND_EXPR_BODY (t));
14270 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14271 || TREE_CODE (t) == TRY_CATCH_EXPR)
14272 t = expr_last (TREE_OPERAND (t, 0));
14273 else if (TREE_CODE (t) == STATEMENT_LIST)
14278 if (TREE_CODE (t) == MODIFY_EXPR
14279 && TREE_OPERAND (t, 0) == temp)
14280 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14281 strict_overflow_p);
14288 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14289 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14291 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14292 get_callee_fndecl (t),
14295 strict_overflow_p);
14297 case COMPOUND_EXPR:
14299 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14300 strict_overflow_p);
14302 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14303 strict_overflow_p);
14305 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14306 strict_overflow_p);
14309 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14313 /* We don't know sign of `t', so be conservative and return false. */
14317 /* Return true if T is known to be non-negative. If the return
14318 value is based on the assumption that signed overflow is undefined,
14319 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14320 *STRICT_OVERFLOW_P. */
14323 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14325 enum tree_code code;
14326 if (t == error_mark_node)
14329 code = TREE_CODE (t);
14330 switch (TREE_CODE_CLASS (code))
14333 case tcc_comparison:
14334 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14336 TREE_OPERAND (t, 0),
14337 TREE_OPERAND (t, 1),
14338 strict_overflow_p);
14341 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14343 TREE_OPERAND (t, 0),
14344 strict_overflow_p);
14347 case tcc_declaration:
14348 case tcc_reference:
14349 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14357 case TRUTH_AND_EXPR:
14358 case TRUTH_OR_EXPR:
14359 case TRUTH_XOR_EXPR:
14360 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14362 TREE_OPERAND (t, 0),
14363 TREE_OPERAND (t, 1),
14364 strict_overflow_p);
14365 case TRUTH_NOT_EXPR:
14366 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14368 TREE_OPERAND (t, 0),
14369 strict_overflow_p);
14376 case WITH_SIZE_EXPR:
14380 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14383 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14387 /* Return true if `t' is known to be non-negative. Handle warnings
14388 about undefined signed overflow. */
14391 tree_expr_nonnegative_p (tree t)
14393 bool ret, strict_overflow_p;
14395 strict_overflow_p = false;
14396 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14397 if (strict_overflow_p)
14398 fold_overflow_warning (("assuming signed overflow does not occur when "
14399 "determining that expression is always "
14401 WARN_STRICT_OVERFLOW_MISC);
14406 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14407 For floating point we further ensure that T is not denormal.
14408 Similar logic is present in nonzero_address in rtlanal.h.
14410 If the return value is based on the assumption that signed overflow
14411 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14412 change *STRICT_OVERFLOW_P. */
14415 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14416 bool *strict_overflow_p)
14421 return tree_expr_nonzero_warnv_p (op0,
14422 strict_overflow_p);
14426 tree inner_type = TREE_TYPE (op0);
14427 tree outer_type = type;
14429 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14430 && tree_expr_nonzero_warnv_p (op0,
14431 strict_overflow_p));
14435 case NON_LVALUE_EXPR:
14436 return tree_expr_nonzero_warnv_p (op0,
14437 strict_overflow_p);
14446 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14447 For floating point we further ensure that T is not denormal.
14448 Similar logic is present in nonzero_address in rtlanal.h.
14450 If the return value is based on the assumption that signed overflow
14451 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14452 change *STRICT_OVERFLOW_P. */
14455 tree_binary_nonzero_warnv_p (enum tree_code code,
14458 tree op1, bool *strict_overflow_p)
14460 bool sub_strict_overflow_p;
14463 case POINTER_PLUS_EXPR:
14465 if (TYPE_OVERFLOW_UNDEFINED (type))
14467 /* With the presence of negative values it is hard
14468 to say something. */
14469 sub_strict_overflow_p = false;
14470 if (!tree_expr_nonnegative_warnv_p (op0,
14471 &sub_strict_overflow_p)
14472 || !tree_expr_nonnegative_warnv_p (op1,
14473 &sub_strict_overflow_p))
14475 /* One of operands must be positive and the other non-negative. */
14476 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14477 overflows, on a twos-complement machine the sum of two
14478 nonnegative numbers can never be zero. */
14479 return (tree_expr_nonzero_warnv_p (op0,
14481 || tree_expr_nonzero_warnv_p (op1,
14482 strict_overflow_p));
14487 if (TYPE_OVERFLOW_UNDEFINED (type))
14489 if (tree_expr_nonzero_warnv_p (op0,
14491 && tree_expr_nonzero_warnv_p (op1,
14492 strict_overflow_p))
14494 *strict_overflow_p = true;
14501 sub_strict_overflow_p = false;
14502 if (tree_expr_nonzero_warnv_p (op0,
14503 &sub_strict_overflow_p)
14504 && tree_expr_nonzero_warnv_p (op1,
14505 &sub_strict_overflow_p))
14507 if (sub_strict_overflow_p)
14508 *strict_overflow_p = true;
14513 sub_strict_overflow_p = false;
14514 if (tree_expr_nonzero_warnv_p (op0,
14515 &sub_strict_overflow_p))
14517 if (sub_strict_overflow_p)
14518 *strict_overflow_p = true;
14520 /* When both operands are nonzero, then MAX must be too. */
14521 if (tree_expr_nonzero_warnv_p (op1,
14522 strict_overflow_p))
14525 /* MAX where operand 0 is positive is positive. */
14526 return tree_expr_nonnegative_warnv_p (op0,
14527 strict_overflow_p);
14529 /* MAX where operand 1 is positive is positive. */
14530 else if (tree_expr_nonzero_warnv_p (op1,
14531 &sub_strict_overflow_p)
14532 && tree_expr_nonnegative_warnv_p (op1,
14533 &sub_strict_overflow_p))
14535 if (sub_strict_overflow_p)
14536 *strict_overflow_p = true;
14542 return (tree_expr_nonzero_warnv_p (op1,
14544 || tree_expr_nonzero_warnv_p (op0,
14545 strict_overflow_p));
14554 /* Return true when T is an address and is known to be nonzero.
14555 For floating point we further ensure that T is not denormal.
14556 Similar logic is present in nonzero_address in rtlanal.h.
14558 If the return value is based on the assumption that signed overflow
14559 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14560 change *STRICT_OVERFLOW_P. */
14563 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14565 bool sub_strict_overflow_p;
14566 switch (TREE_CODE (t))
14569 return !integer_zerop (t);
14573 tree base = get_base_address (TREE_OPERAND (t, 0));
14578 /* Weak declarations may link to NULL. */
14579 if (VAR_OR_FUNCTION_DECL_P (base))
14580 return !DECL_WEAK (base);
14582 /* Constants are never weak. */
14583 if (CONSTANT_CLASS_P (base))
14590 sub_strict_overflow_p = false;
14591 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14592 &sub_strict_overflow_p)
14593 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14594 &sub_strict_overflow_p))
14596 if (sub_strict_overflow_p)
14597 *strict_overflow_p = true;
14608 /* Return true when T is an address and is known to be nonzero.
14609 For floating point we further ensure that T is not denormal.
14610 Similar logic is present in nonzero_address in rtlanal.h.
14612 If the return value is based on the assumption that signed overflow
14613 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14614 change *STRICT_OVERFLOW_P. */
14617 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14619 tree type = TREE_TYPE (t);
14620 enum tree_code code;
14622 /* Doing something useful for floating point would need more work. */
14623 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14626 code = TREE_CODE (t);
14627 switch (TREE_CODE_CLASS (code))
14630 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14631 strict_overflow_p);
14633 case tcc_comparison:
14634 return tree_binary_nonzero_warnv_p (code, type,
14635 TREE_OPERAND (t, 0),
14636 TREE_OPERAND (t, 1),
14637 strict_overflow_p);
14639 case tcc_declaration:
14640 case tcc_reference:
14641 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14649 case TRUTH_NOT_EXPR:
14650 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
14651 strict_overflow_p);
14653 case TRUTH_AND_EXPR:
14654 case TRUTH_OR_EXPR:
14655 case TRUTH_XOR_EXPR:
14656 return tree_binary_nonzero_warnv_p (code, type,
14657 TREE_OPERAND (t, 0),
14658 TREE_OPERAND (t, 1),
14659 strict_overflow_p);
14666 case WITH_SIZE_EXPR:
14670 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
14672 case COMPOUND_EXPR:
14675 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14676 strict_overflow_p);
14679 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14680 strict_overflow_p);
14683 return alloca_call_p (t);
14691 /* Return true when T is an address and is known to be nonzero.
14692 Handle warnings about undefined signed overflow. */
14695 tree_expr_nonzero_p (tree t)
14697 bool ret, strict_overflow_p;
14699 strict_overflow_p = false;
14700 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14701 if (strict_overflow_p)
14702 fold_overflow_warning (("assuming signed overflow does not occur when "
14703 "determining that expression is always "
14705 WARN_STRICT_OVERFLOW_MISC);
14709 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14710 attempt to fold the expression to a constant without modifying TYPE,
14713 If the expression could be simplified to a constant, then return
14714 the constant. If the expression would not be simplified to a
14715 constant, then return NULL_TREE. */
14718 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14720 tree tem = fold_binary (code, type, op0, op1);
14721 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14724 /* Given the components of a unary expression CODE, TYPE and OP0,
14725 attempt to fold the expression to a constant without modifying
14728 If the expression could be simplified to a constant, then return
14729 the constant. If the expression would not be simplified to a
14730 constant, then return NULL_TREE. */
14733 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14735 tree tem = fold_unary (code, type, op0);
14736 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14739 /* If EXP represents referencing an element in a constant string
14740 (either via pointer arithmetic or array indexing), return the
14741 tree representing the value accessed, otherwise return NULL. */
14744 fold_read_from_constant_string (tree exp)
14746 if ((TREE_CODE (exp) == INDIRECT_REF
14747 || TREE_CODE (exp) == ARRAY_REF)
14748 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14750 tree exp1 = TREE_OPERAND (exp, 0);
14754 if (TREE_CODE (exp) == INDIRECT_REF)
14755 string = string_constant (exp1, &index);
14758 tree low_bound = array_ref_low_bound (exp);
14759 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14761 /* Optimize the special-case of a zero lower bound.
14763 We convert the low_bound to sizetype to avoid some problems
14764 with constant folding. (E.g. suppose the lower bound is 1,
14765 and its mode is QI. Without the conversion,l (ARRAY
14766 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14767 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14768 if (! integer_zerop (low_bound))
14769 index = size_diffop (index, fold_convert (sizetype, low_bound));
14775 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14776 && TREE_CODE (string) == STRING_CST
14777 && TREE_CODE (index) == INTEGER_CST
14778 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14779 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14781 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14782 return build_int_cst_type (TREE_TYPE (exp),
14783 (TREE_STRING_POINTER (string)
14784 [TREE_INT_CST_LOW (index)]));
14789 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14790 an integer constant, real, or fixed-point constant.
14792 TYPE is the type of the result. */
14795 fold_negate_const (tree arg0, tree type)
14797 tree t = NULL_TREE;
14799 switch (TREE_CODE (arg0))
14803 unsigned HOST_WIDE_INT low;
14804 HOST_WIDE_INT high;
14805 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14806 TREE_INT_CST_HIGH (arg0),
14808 t = force_fit_type_double (type, low, high, 1,
14809 (overflow | TREE_OVERFLOW (arg0))
14810 && !TYPE_UNSIGNED (type));
14815 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14820 FIXED_VALUE_TYPE f;
14821 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14822 &(TREE_FIXED_CST (arg0)), NULL,
14823 TYPE_SATURATING (type));
14824 t = build_fixed (type, f);
14825 /* Propagate overflow flags. */
14826 if (overflow_p | TREE_OVERFLOW (arg0))
14828 TREE_OVERFLOW (t) = 1;
14829 TREE_CONSTANT_OVERFLOW (t) = 1;
14831 else if (TREE_CONSTANT_OVERFLOW (arg0))
14832 TREE_CONSTANT_OVERFLOW (t) = 1;
14837 gcc_unreachable ();
14843 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14844 an integer constant or real constant.
14846 TYPE is the type of the result. */
14849 fold_abs_const (tree arg0, tree type)
14851 tree t = NULL_TREE;
14853 switch (TREE_CODE (arg0))
14856 /* If the value is unsigned, then the absolute value is
14857 the same as the ordinary value. */
14858 if (TYPE_UNSIGNED (type))
14860 /* Similarly, if the value is non-negative. */
14861 else if (INT_CST_LT (integer_minus_one_node, arg0))
14863 /* If the value is negative, then the absolute value is
14867 unsigned HOST_WIDE_INT low;
14868 HOST_WIDE_INT high;
14869 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14870 TREE_INT_CST_HIGH (arg0),
14872 t = force_fit_type_double (type, low, high, -1,
14873 overflow | TREE_OVERFLOW (arg0));
14878 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14879 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14885 gcc_unreachable ();
14891 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14892 constant. TYPE is the type of the result. */
14895 fold_not_const (tree arg0, tree type)
14897 tree t = NULL_TREE;
14899 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14901 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14902 ~TREE_INT_CST_HIGH (arg0), 0,
14903 TREE_OVERFLOW (arg0));
14908 /* Given CODE, a relational operator, the target type, TYPE and two
14909 constant operands OP0 and OP1, return the result of the
14910 relational operation. If the result is not a compile time
14911 constant, then return NULL_TREE. */
14914 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14916 int result, invert;
14918 /* From here on, the only cases we handle are when the result is
14919 known to be a constant. */
14921 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14923 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14924 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14926 /* Handle the cases where either operand is a NaN. */
14927 if (real_isnan (c0) || real_isnan (c1))
14937 case UNORDERED_EXPR:
14951 if (flag_trapping_math)
14957 gcc_unreachable ();
14960 return constant_boolean_node (result, type);
14963 return constant_boolean_node (real_compare (code, c0, c1), type);
14966 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14968 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14969 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14970 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14973 /* Handle equality/inequality of complex constants. */
14974 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14976 tree rcond = fold_relational_const (code, type,
14977 TREE_REALPART (op0),
14978 TREE_REALPART (op1));
14979 tree icond = fold_relational_const (code, type,
14980 TREE_IMAGPART (op0),
14981 TREE_IMAGPART (op1));
14982 if (code == EQ_EXPR)
14983 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14984 else if (code == NE_EXPR)
14985 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14990 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14992 To compute GT, swap the arguments and do LT.
14993 To compute GE, do LT and invert the result.
14994 To compute LE, swap the arguments, do LT and invert the result.
14995 To compute NE, do EQ and invert the result.
14997 Therefore, the code below must handle only EQ and LT. */
14999 if (code == LE_EXPR || code == GT_EXPR)
15004 code = swap_tree_comparison (code);
15007 /* Note that it is safe to invert for real values here because we
15008 have already handled the one case that it matters. */
15011 if (code == NE_EXPR || code == GE_EXPR)
15014 code = invert_tree_comparison (code, false);
15017 /* Compute a result for LT or EQ if args permit;
15018 Otherwise return T. */
15019 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15021 if (code == EQ_EXPR)
15022 result = tree_int_cst_equal (op0, op1);
15023 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15024 result = INT_CST_LT_UNSIGNED (op0, op1);
15026 result = INT_CST_LT (op0, op1);
15033 return constant_boolean_node (result, type);
15036 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15037 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15041 fold_build_cleanup_point_expr (tree type, tree expr)
15043 /* If the expression does not have side effects then we don't have to wrap
15044 it with a cleanup point expression. */
15045 if (!TREE_SIDE_EFFECTS (expr))
15048 /* If the expression is a return, check to see if the expression inside the
15049 return has no side effects or the right hand side of the modify expression
15050 inside the return. If either don't have side effects set we don't need to
15051 wrap the expression in a cleanup point expression. Note we don't check the
15052 left hand side of the modify because it should always be a return decl. */
15053 if (TREE_CODE (expr) == RETURN_EXPR)
15055 tree op = TREE_OPERAND (expr, 0);
15056 if (!op || !TREE_SIDE_EFFECTS (op))
15058 op = TREE_OPERAND (op, 1);
15059 if (!TREE_SIDE_EFFECTS (op))
15063 return build1 (CLEANUP_POINT_EXPR, type, expr);
15066 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15067 of an indirection through OP0, or NULL_TREE if no simplification is
15071 fold_indirect_ref_1 (tree type, tree op0)
15077 subtype = TREE_TYPE (sub);
15078 if (!POINTER_TYPE_P (subtype))
15081 if (TREE_CODE (sub) == ADDR_EXPR)
15083 tree op = TREE_OPERAND (sub, 0);
15084 tree optype = TREE_TYPE (op);
15085 /* *&CONST_DECL -> to the value of the const decl. */
15086 if (TREE_CODE (op) == CONST_DECL)
15087 return DECL_INITIAL (op);
15088 /* *&p => p; make sure to handle *&"str"[cst] here. */
15089 if (type == optype)
15091 tree fop = fold_read_from_constant_string (op);
15097 /* *(foo *)&fooarray => fooarray[0] */
15098 else if (TREE_CODE (optype) == ARRAY_TYPE
15099 && type == TREE_TYPE (optype))
15101 tree type_domain = TYPE_DOMAIN (optype);
15102 tree min_val = size_zero_node;
15103 if (type_domain && TYPE_MIN_VALUE (type_domain))
15104 min_val = TYPE_MIN_VALUE (type_domain);
15105 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15107 /* *(foo *)&complexfoo => __real__ complexfoo */
15108 else if (TREE_CODE (optype) == COMPLEX_TYPE
15109 && type == TREE_TYPE (optype))
15110 return fold_build1 (REALPART_EXPR, type, op);
15111 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15112 else if (TREE_CODE (optype) == VECTOR_TYPE
15113 && type == TREE_TYPE (optype))
15115 tree part_width = TYPE_SIZE (type);
15116 tree index = bitsize_int (0);
15117 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15121 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15122 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15123 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15125 tree op00 = TREE_OPERAND (sub, 0);
15126 tree op01 = TREE_OPERAND (sub, 1);
15130 op00type = TREE_TYPE (op00);
15131 if (TREE_CODE (op00) == ADDR_EXPR
15132 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15133 && type == TREE_TYPE (TREE_TYPE (op00type)))
15135 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15136 tree part_width = TYPE_SIZE (type);
15137 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15138 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15139 tree index = bitsize_int (indexi);
15141 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15142 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15143 part_width, index);
15149 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15150 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15151 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15153 tree op00 = TREE_OPERAND (sub, 0);
15154 tree op01 = TREE_OPERAND (sub, 1);
15158 op00type = TREE_TYPE (op00);
15159 if (TREE_CODE (op00) == ADDR_EXPR
15160 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15161 && type == TREE_TYPE (TREE_TYPE (op00type)))
15163 tree size = TYPE_SIZE_UNIT (type);
15164 if (tree_int_cst_equal (size, op01))
15165 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15169 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15170 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15171 && type == TREE_TYPE (TREE_TYPE (subtype)))
15174 tree min_val = size_zero_node;
15175 sub = build_fold_indirect_ref (sub);
15176 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15177 if (type_domain && TYPE_MIN_VALUE (type_domain))
15178 min_val = TYPE_MIN_VALUE (type_domain);
15179 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15185 /* Builds an expression for an indirection through T, simplifying some
15189 build_fold_indirect_ref (tree t)
15191 tree type = TREE_TYPE (TREE_TYPE (t));
15192 tree sub = fold_indirect_ref_1 (type, t);
15197 return build1 (INDIRECT_REF, type, t);
15200 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15203 fold_indirect_ref (tree t)
15205 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15213 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15214 whose result is ignored. The type of the returned tree need not be
15215 the same as the original expression. */
15218 fold_ignored_result (tree t)
15220 if (!TREE_SIDE_EFFECTS (t))
15221 return integer_zero_node;
15224 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15227 t = TREE_OPERAND (t, 0);
15231 case tcc_comparison:
15232 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15233 t = TREE_OPERAND (t, 0);
15234 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15235 t = TREE_OPERAND (t, 1);
15240 case tcc_expression:
15241 switch (TREE_CODE (t))
15243 case COMPOUND_EXPR:
15244 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15246 t = TREE_OPERAND (t, 0);
15250 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15251 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15253 t = TREE_OPERAND (t, 0);
15266 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15267 This can only be applied to objects of a sizetype. */
15270 round_up (tree value, int divisor)
15272 tree div = NULL_TREE;
15274 gcc_assert (divisor > 0);
15278 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15279 have to do anything. Only do this when we are not given a const,
15280 because in that case, this check is more expensive than just
15282 if (TREE_CODE (value) != INTEGER_CST)
15284 div = build_int_cst (TREE_TYPE (value), divisor);
15286 if (multiple_of_p (TREE_TYPE (value), value, div))
15290 /* If divisor is a power of two, simplify this to bit manipulation. */
15291 if (divisor == (divisor & -divisor))
15293 if (TREE_CODE (value) == INTEGER_CST)
15295 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15296 unsigned HOST_WIDE_INT high;
15299 if ((low & (divisor - 1)) == 0)
15302 overflow_p = TREE_OVERFLOW (value);
15303 high = TREE_INT_CST_HIGH (value);
15304 low &= ~(divisor - 1);
15313 return force_fit_type_double (TREE_TYPE (value), low, high,
15320 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15321 value = size_binop (PLUS_EXPR, value, t);
15322 t = build_int_cst (TREE_TYPE (value), -divisor);
15323 value = size_binop (BIT_AND_EXPR, value, t);
15329 div = build_int_cst (TREE_TYPE (value), divisor);
15330 value = size_binop (CEIL_DIV_EXPR, value, div);
15331 value = size_binop (MULT_EXPR, value, div);
15337 /* Likewise, but round down. */
15340 round_down (tree value, int divisor)
15342 tree div = NULL_TREE;
15344 gcc_assert (divisor > 0);
15348 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15349 have to do anything. Only do this when we are not given a const,
15350 because in that case, this check is more expensive than just
15352 if (TREE_CODE (value) != INTEGER_CST)
15354 div = build_int_cst (TREE_TYPE (value), divisor);
15356 if (multiple_of_p (TREE_TYPE (value), value, div))
15360 /* If divisor is a power of two, simplify this to bit manipulation. */
15361 if (divisor == (divisor & -divisor))
15365 t = build_int_cst (TREE_TYPE (value), -divisor);
15366 value = size_binop (BIT_AND_EXPR, value, t);
15371 div = build_int_cst (TREE_TYPE (value), divisor);
15372 value = size_binop (FLOOR_DIV_EXPR, value, div);
15373 value = size_binop (MULT_EXPR, value, div);
15379 /* Returns the pointer to the base of the object addressed by EXP and
15380 extracts the information about the offset of the access, storing it
15381 to PBITPOS and POFFSET. */
15384 split_address_to_core_and_offset (tree exp,
15385 HOST_WIDE_INT *pbitpos, tree *poffset)
15388 enum machine_mode mode;
15389 int unsignedp, volatilep;
15390 HOST_WIDE_INT bitsize;
15392 if (TREE_CODE (exp) == ADDR_EXPR)
15394 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15395 poffset, &mode, &unsignedp, &volatilep,
15397 core = fold_addr_expr (core);
15403 *poffset = NULL_TREE;
15409 /* Returns true if addresses of E1 and E2 differ by a constant, false
15410 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15413 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15416 HOST_WIDE_INT bitpos1, bitpos2;
15417 tree toffset1, toffset2, tdiff, type;
15419 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15420 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15422 if (bitpos1 % BITS_PER_UNIT != 0
15423 || bitpos2 % BITS_PER_UNIT != 0
15424 || !operand_equal_p (core1, core2, 0))
15427 if (toffset1 && toffset2)
15429 type = TREE_TYPE (toffset1);
15430 if (type != TREE_TYPE (toffset2))
15431 toffset2 = fold_convert (type, toffset2);
15433 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15434 if (!cst_and_fits_in_hwi (tdiff))
15437 *diff = int_cst_value (tdiff);
15439 else if (toffset1 || toffset2)
15441 /* If only one of the offsets is non-constant, the difference cannot
15448 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15452 /* Simplify the floating point expression EXP when the sign of the
15453 result is not significant. Return NULL_TREE if no simplification
15457 fold_strip_sign_ops (tree exp)
15461 switch (TREE_CODE (exp))
15465 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15466 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15470 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15472 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15473 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15474 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15475 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15476 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15477 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15480 case COMPOUND_EXPR:
15481 arg0 = TREE_OPERAND (exp, 0);
15482 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15484 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15488 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15489 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15491 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15492 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15493 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15498 const enum built_in_function fcode = builtin_mathfn_code (exp);
15501 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15502 /* Strip copysign function call, return the 1st argument. */
15503 arg0 = CALL_EXPR_ARG (exp, 0);
15504 arg1 = CALL_EXPR_ARG (exp, 1);
15505 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15508 /* Strip sign ops from the argument of "odd" math functions. */
15509 if (negate_mathfn_p (fcode))
15511 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15513 return build_call_expr (get_callee_fndecl (exp), 1, arg0);