1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
65 #include "langhooks.h"
68 /* Nonzero if we are folding constants inside an initializer; zero
70 int folding_initializer = 0;
72 /* The following constants represent a bit based encoding of GCC's
73 comparison operators. This encoding simplifies transformations
74 on relational comparison operators, such as AND and OR. */
75 enum comparison_code {
94 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
95 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
96 static bool negate_mathfn_p (enum built_in_function);
97 static bool negate_expr_p (tree);
98 static tree negate_expr (tree);
99 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
100 static tree associate_trees (tree, tree, enum tree_code, tree);
101 static tree const_binop (enum tree_code, tree, tree, int);
102 static enum comparison_code comparison_to_compcode (enum tree_code);
103 static enum tree_code compcode_to_comparison (enum comparison_code);
104 static tree combine_comparisons (enum tree_code, enum tree_code,
105 enum tree_code, tree, tree, tree);
106 static int truth_value_p (enum tree_code);
107 static int operand_equal_for_comparison_p (tree, tree, tree);
108 static int twoval_comparison_p (tree, tree *, tree *, int *);
109 static tree eval_subst (tree, tree, tree, tree, tree);
110 static tree pedantic_omit_one_operand (tree, tree, tree);
111 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
112 static tree make_bit_field_ref (tree, tree, int, int, int);
113 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
114 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
115 enum machine_mode *, int *, int *,
117 static int all_ones_mask_p (const_tree, int);
118 static tree sign_bit_p (tree, const_tree);
119 static int simple_operand_p (const_tree);
120 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
121 static tree range_predecessor (tree);
122 static tree range_successor (tree);
123 static tree make_range (tree, int *, tree *, tree *, bool *);
124 static tree build_range_check (tree, tree, int, tree, tree);
125 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
127 static tree fold_range_test (enum tree_code, tree, tree, tree);
128 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
129 static tree unextend (tree, int, int, tree);
130 static tree fold_truthop (enum tree_code, tree, tree, tree);
131 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
132 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
133 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
134 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (const_tree, const_tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
284 int sign_extended_type;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
326 unsigned HOST_WIDE_INT l;
330 h = h1 + h2 + (l < l1);
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 return (*hv & h1) < 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
392 for (j = 0; j < 4; j++)
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 if (SHIFT_COUNT_TRUNCATED)
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
501 unsigned HOST_WIDE_INT signmask;
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
507 if (SHIFT_COUNT_TRUNCATED)
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
517 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
651 /* This unsigned division rounds toward zero. */
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
704 { /* scale divisor and dividend */
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
783 decode (quo, lquo, hquo);
786 /* If result is negative, make it so. */
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)) */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, <wice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den < ltwice)))
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
954 fold_undefer_overflow_warnings (bool issue, const_tree stmt, int code)
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = code;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
976 if (stmt != NULL_TREE && TREE_NO_WARNING (stmt))
979 /* Use the smallest code level when deciding to issue the
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
984 if (!issue_strict_overflow_warning (code))
987 if (stmt == NULL_TREE || !expr_has_location (stmt))
988 locus = input_location;
990 locus = expr_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
994 /* Stop deferring overflow warnings, ignoring any deferred
998 fold_undefer_and_ignore_overflow_warnings (void)
1000 fold_undefer_overflow_warnings (false, NULL_TREE, 0);
1003 /* Whether we are deferring overflow warnings. */
1006 fold_deferring_overflow_warnings_p (void)
1008 return fold_deferring_overflow_warnings > 0;
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1017 gcc_assert (!flag_wrapv && !flag_trapv);
1018 if (fold_deferring_overflow_warnings > 0)
1020 if (fold_deferred_overflow_warning == NULL
1021 || wc < fold_deferred_overflow_code)
1023 fold_deferred_overflow_warning = gmsgid;
1024 fold_deferred_overflow_code = wc;
1027 else if (issue_strict_overflow_warning (wc))
1028 warning (OPT_Wstrict_overflow, gmsgid);
1031 /* Return true if the built-in mathematical function specified by CODE
1032 is odd, i.e. -f(x) == f(-x). */
1035 negate_mathfn_p (enum built_in_function code)
1039 CASE_FLT_FN (BUILT_IN_ASIN):
1040 CASE_FLT_FN (BUILT_IN_ASINH):
1041 CASE_FLT_FN (BUILT_IN_ATAN):
1042 CASE_FLT_FN (BUILT_IN_ATANH):
1043 CASE_FLT_FN (BUILT_IN_CASIN):
1044 CASE_FLT_FN (BUILT_IN_CASINH):
1045 CASE_FLT_FN (BUILT_IN_CATAN):
1046 CASE_FLT_FN (BUILT_IN_CATANH):
1047 CASE_FLT_FN (BUILT_IN_CBRT):
1048 CASE_FLT_FN (BUILT_IN_CPROJ):
1049 CASE_FLT_FN (BUILT_IN_CSIN):
1050 CASE_FLT_FN (BUILT_IN_CSINH):
1051 CASE_FLT_FN (BUILT_IN_CTAN):
1052 CASE_FLT_FN (BUILT_IN_CTANH):
1053 CASE_FLT_FN (BUILT_IN_ERF):
1054 CASE_FLT_FN (BUILT_IN_LLROUND):
1055 CASE_FLT_FN (BUILT_IN_LROUND):
1056 CASE_FLT_FN (BUILT_IN_ROUND):
1057 CASE_FLT_FN (BUILT_IN_SIN):
1058 CASE_FLT_FN (BUILT_IN_SINH):
1059 CASE_FLT_FN (BUILT_IN_TAN):
1060 CASE_FLT_FN (BUILT_IN_TANH):
1061 CASE_FLT_FN (BUILT_IN_TRUNC):
1064 CASE_FLT_FN (BUILT_IN_LLRINT):
1065 CASE_FLT_FN (BUILT_IN_LRINT):
1066 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1067 CASE_FLT_FN (BUILT_IN_RINT):
1068 return !flag_rounding_math;
1076 /* Check whether we may negate an integer constant T without causing
1080 may_negate_without_overflow_p (const_tree t)
1082 unsigned HOST_WIDE_INT val;
1086 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1088 type = TREE_TYPE (t);
1089 if (TYPE_UNSIGNED (type))
1092 prec = TYPE_PRECISION (type);
1093 if (prec > HOST_BITS_PER_WIDE_INT)
1095 if (TREE_INT_CST_LOW (t) != 0)
1097 prec -= HOST_BITS_PER_WIDE_INT;
1098 val = TREE_INT_CST_HIGH (t);
1101 val = TREE_INT_CST_LOW (t);
1102 if (prec < HOST_BITS_PER_WIDE_INT)
1103 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1104 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1107 /* Determine whether an expression T can be cheaply negated using
1108 the function negate_expr without introducing undefined overflow. */
1111 negate_expr_p (tree t)
1118 type = TREE_TYPE (t);
1120 STRIP_SIGN_NOPS (t);
1121 switch (TREE_CODE (t))
1124 if (TYPE_OVERFLOW_WRAPS (type))
1127 /* Check that -CST will not overflow type. */
1128 return may_negate_without_overflow_p (t);
1130 return (INTEGRAL_TYPE_P (type)
1131 && TYPE_OVERFLOW_WRAPS (type));
1139 return negate_expr_p (TREE_REALPART (t))
1140 && negate_expr_p (TREE_IMAGPART (t));
1143 return negate_expr_p (TREE_OPERAND (t, 0))
1144 && negate_expr_p (TREE_OPERAND (t, 1));
1147 return negate_expr_p (TREE_OPERAND (t, 0));
1150 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1151 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1153 /* -(A + B) -> (-B) - A. */
1154 if (negate_expr_p (TREE_OPERAND (t, 1))
1155 && reorder_operands_p (TREE_OPERAND (t, 0),
1156 TREE_OPERAND (t, 1)))
1158 /* -(A + B) -> (-A) - B. */
1159 return negate_expr_p (TREE_OPERAND (t, 0));
1162 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1163 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1164 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1165 && reorder_operands_p (TREE_OPERAND (t, 0),
1166 TREE_OPERAND (t, 1));
1169 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1175 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1176 return negate_expr_p (TREE_OPERAND (t, 1))
1177 || negate_expr_p (TREE_OPERAND (t, 0));
1180 case TRUNC_DIV_EXPR:
1181 case ROUND_DIV_EXPR:
1182 case FLOOR_DIV_EXPR:
1184 case EXACT_DIV_EXPR:
1185 /* In general we can't negate A / B, because if A is INT_MIN and
1186 B is 1, we may turn this into INT_MIN / -1 which is undefined
1187 and actually traps on some architectures. But if overflow is
1188 undefined, we can negate, because - (INT_MIN / 1) is an
1190 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1191 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1193 return negate_expr_p (TREE_OPERAND (t, 1))
1194 || negate_expr_p (TREE_OPERAND (t, 0));
1197 /* Negate -((double)float) as (double)(-float). */
1198 if (TREE_CODE (type) == REAL_TYPE)
1200 tree tem = strip_float_extensions (t);
1202 return negate_expr_p (tem);
1207 /* Negate -f(x) as f(-x). */
1208 if (negate_mathfn_p (builtin_mathfn_code (t)))
1209 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1213 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1214 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1216 tree op1 = TREE_OPERAND (t, 1);
1217 if (TREE_INT_CST_HIGH (op1) == 0
1218 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1219 == TREE_INT_CST_LOW (op1))
1230 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1231 simplification is possible.
1232 If negate_expr_p would return true for T, NULL_TREE will never be
1236 fold_negate_expr (tree t)
1238 tree type = TREE_TYPE (t);
1241 switch (TREE_CODE (t))
1243 /* Convert - (~A) to A + 1. */
1245 if (INTEGRAL_TYPE_P (type))
1246 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1247 build_int_cst (type, 1));
1251 tem = fold_negate_const (t, type);
1252 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1253 || !TYPE_OVERFLOW_TRAPS (type))
1258 tem = fold_negate_const (t, type);
1259 /* Two's complement FP formats, such as c4x, may overflow. */
1260 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1265 tem = fold_negate_const (t, type);
1270 tree rpart = negate_expr (TREE_REALPART (t));
1271 tree ipart = negate_expr (TREE_IMAGPART (t));
1273 if ((TREE_CODE (rpart) == REAL_CST
1274 && TREE_CODE (ipart) == REAL_CST)
1275 || (TREE_CODE (rpart) == INTEGER_CST
1276 && TREE_CODE (ipart) == INTEGER_CST))
1277 return build_complex (type, rpart, ipart);
1282 if (negate_expr_p (t))
1283 return fold_build2 (COMPLEX_EXPR, type,
1284 fold_negate_expr (TREE_OPERAND (t, 0)),
1285 fold_negate_expr (TREE_OPERAND (t, 1)));
1289 if (negate_expr_p (t))
1290 return fold_build1 (CONJ_EXPR, type,
1291 fold_negate_expr (TREE_OPERAND (t, 0)));
1295 return TREE_OPERAND (t, 0);
1298 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1299 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1301 /* -(A + B) -> (-B) - A. */
1302 if (negate_expr_p (TREE_OPERAND (t, 1))
1303 && reorder_operands_p (TREE_OPERAND (t, 0),
1304 TREE_OPERAND (t, 1)))
1306 tem = negate_expr (TREE_OPERAND (t, 1));
1307 return fold_build2 (MINUS_EXPR, type,
1308 tem, TREE_OPERAND (t, 0));
1311 /* -(A + B) -> (-A) - B. */
1312 if (negate_expr_p (TREE_OPERAND (t, 0)))
1314 tem = negate_expr (TREE_OPERAND (t, 0));
1315 return fold_build2 (MINUS_EXPR, type,
1316 tem, TREE_OPERAND (t, 1));
1322 /* - (A - B) -> B - A */
1323 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1324 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1325 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1326 return fold_build2 (MINUS_EXPR, type,
1327 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1331 if (TYPE_UNSIGNED (type))
1337 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1339 tem = TREE_OPERAND (t, 1);
1340 if (negate_expr_p (tem))
1341 return fold_build2 (TREE_CODE (t), type,
1342 TREE_OPERAND (t, 0), negate_expr (tem));
1343 tem = TREE_OPERAND (t, 0);
1344 if (negate_expr_p (tem))
1345 return fold_build2 (TREE_CODE (t), type,
1346 negate_expr (tem), TREE_OPERAND (t, 1));
1350 case TRUNC_DIV_EXPR:
1351 case ROUND_DIV_EXPR:
1352 case FLOOR_DIV_EXPR:
1354 case EXACT_DIV_EXPR:
1355 /* In general we can't negate A / B, because if A is INT_MIN and
1356 B is 1, we may turn this into INT_MIN / -1 which is undefined
1357 and actually traps on some architectures. But if overflow is
1358 undefined, we can negate, because - (INT_MIN / 1) is an
1360 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1362 const char * const warnmsg = G_("assuming signed overflow does not "
1363 "occur when negating a division");
1364 tem = TREE_OPERAND (t, 1);
1365 if (negate_expr_p (tem))
1367 if (INTEGRAL_TYPE_P (type)
1368 && (TREE_CODE (tem) != INTEGER_CST
1369 || integer_onep (tem)))
1370 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1371 return fold_build2 (TREE_CODE (t), type,
1372 TREE_OPERAND (t, 0), negate_expr (tem));
1374 tem = TREE_OPERAND (t, 0);
1375 if (negate_expr_p (tem))
1377 if (INTEGRAL_TYPE_P (type)
1378 && (TREE_CODE (tem) != INTEGER_CST
1379 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1380 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1381 return fold_build2 (TREE_CODE (t), type,
1382 negate_expr (tem), TREE_OPERAND (t, 1));
1388 /* Convert -((double)float) into (double)(-float). */
1389 if (TREE_CODE (type) == REAL_TYPE)
1391 tem = strip_float_extensions (t);
1392 if (tem != t && negate_expr_p (tem))
1393 return fold_convert (type, negate_expr (tem));
1398 /* Negate -f(x) as f(-x). */
1399 if (negate_mathfn_p (builtin_mathfn_code (t))
1400 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1404 fndecl = get_callee_fndecl (t);
1405 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1406 return build_call_expr (fndecl, 1, arg);
1411 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1412 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1414 tree op1 = TREE_OPERAND (t, 1);
1415 if (TREE_INT_CST_HIGH (op1) == 0
1416 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1417 == TREE_INT_CST_LOW (op1))
1419 tree ntype = TYPE_UNSIGNED (type)
1420 ? signed_type_for (type)
1421 : unsigned_type_for (type);
1422 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1423 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1424 return fold_convert (type, temp);
1436 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1437 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1438 return NULL_TREE. */
1441 negate_expr (tree t)
1448 type = TREE_TYPE (t);
1449 STRIP_SIGN_NOPS (t);
1451 tem = fold_negate_expr (t);
1453 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1454 return fold_convert (type, tem);
1457 /* Split a tree IN into a constant, literal and variable parts that could be
1458 combined with CODE to make IN. "constant" means an expression with
1459 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1460 commutative arithmetic operation. Store the constant part into *CONP,
1461 the literal in *LITP and return the variable part. If a part isn't
1462 present, set it to null. If the tree does not decompose in this way,
1463 return the entire tree as the variable part and the other parts as null.
1465 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1466 case, we negate an operand that was subtracted. Except if it is a
1467 literal for which we use *MINUS_LITP instead.
1469 If NEGATE_P is true, we are negating all of IN, again except a literal
1470 for which we use *MINUS_LITP instead.
1472 If IN is itself a literal or constant, return it as appropriate.
1474 Note that we do not guarantee that any of the three values will be the
1475 same type as IN, but they will have the same signedness and mode. */
1478 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1479 tree *minus_litp, int negate_p)
1487 /* Strip any conversions that don't change the machine mode or signedness. */
1488 STRIP_SIGN_NOPS (in);
1490 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1491 || TREE_CODE (in) == FIXED_CST)
1493 else if (TREE_CODE (in) == code
1494 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1495 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1496 /* We can associate addition and subtraction together (even
1497 though the C standard doesn't say so) for integers because
1498 the value is not affected. For reals, the value might be
1499 affected, so we can't. */
1500 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1501 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1503 tree op0 = TREE_OPERAND (in, 0);
1504 tree op1 = TREE_OPERAND (in, 1);
1505 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1506 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1508 /* First see if either of the operands is a literal, then a constant. */
1509 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1510 || TREE_CODE (op0) == FIXED_CST)
1511 *litp = op0, op0 = 0;
1512 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1513 || TREE_CODE (op1) == FIXED_CST)
1514 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1516 if (op0 != 0 && TREE_CONSTANT (op0))
1517 *conp = op0, op0 = 0;
1518 else if (op1 != 0 && TREE_CONSTANT (op1))
1519 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1521 /* If we haven't dealt with either operand, this is not a case we can
1522 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1523 if (op0 != 0 && op1 != 0)
1528 var = op1, neg_var_p = neg1_p;
1530 /* Now do any needed negations. */
1532 *minus_litp = *litp, *litp = 0;
1534 *conp = negate_expr (*conp);
1536 var = negate_expr (var);
1538 else if (TREE_CONSTANT (in))
1546 *minus_litp = *litp, *litp = 0;
1547 else if (*minus_litp)
1548 *litp = *minus_litp, *minus_litp = 0;
1549 *conp = negate_expr (*conp);
1550 var = negate_expr (var);
1556 /* Re-associate trees split by the above function. T1 and T2 are either
1557 expressions to associate or null. Return the new expression, if any. If
1558 we build an operation, do it in TYPE and with CODE. */
1561 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1568 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1569 try to fold this since we will have infinite recursion. But do
1570 deal with any NEGATE_EXPRs. */
1571 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1572 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1574 if (code == PLUS_EXPR)
1576 if (TREE_CODE (t1) == NEGATE_EXPR)
1577 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1578 fold_convert (type, TREE_OPERAND (t1, 0)));
1579 else if (TREE_CODE (t2) == NEGATE_EXPR)
1580 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1581 fold_convert (type, TREE_OPERAND (t2, 0)));
1582 else if (integer_zerop (t2))
1583 return fold_convert (type, t1);
1585 else if (code == MINUS_EXPR)
1587 if (integer_zerop (t2))
1588 return fold_convert (type, t1);
1591 return build2 (code, type, fold_convert (type, t1),
1592 fold_convert (type, t2));
1595 return fold_build2 (code, type, fold_convert (type, t1),
1596 fold_convert (type, t2));
1599 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1600 for use in int_const_binop, size_binop and size_diffop. */
1603 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1605 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1607 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1623 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1624 && TYPE_MODE (type1) == TYPE_MODE (type2);
1628 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1629 to produce a new constant. Return NULL_TREE if we don't know how
1630 to evaluate CODE at compile-time.
1632 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1635 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1637 unsigned HOST_WIDE_INT int1l, int2l;
1638 HOST_WIDE_INT int1h, int2h;
1639 unsigned HOST_WIDE_INT low;
1641 unsigned HOST_WIDE_INT garbagel;
1642 HOST_WIDE_INT garbageh;
1644 tree type = TREE_TYPE (arg1);
1645 int uns = TYPE_UNSIGNED (type);
1647 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1650 int1l = TREE_INT_CST_LOW (arg1);
1651 int1h = TREE_INT_CST_HIGH (arg1);
1652 int2l = TREE_INT_CST_LOW (arg2);
1653 int2h = TREE_INT_CST_HIGH (arg2);
1658 low = int1l | int2l, hi = int1h | int2h;
1662 low = int1l ^ int2l, hi = int1h ^ int2h;
1666 low = int1l & int2l, hi = int1h & int2h;
1672 /* It's unclear from the C standard whether shifts can overflow.
1673 The following code ignores overflow; perhaps a C standard
1674 interpretation ruling is needed. */
1675 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1682 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1687 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1691 neg_double (int2l, int2h, &low, &hi);
1692 add_double (int1l, int1h, low, hi, &low, &hi);
1693 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1697 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1700 case TRUNC_DIV_EXPR:
1701 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1702 case EXACT_DIV_EXPR:
1703 /* This is a shortcut for a common special case. */
1704 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1705 && !TREE_OVERFLOW (arg1)
1706 && !TREE_OVERFLOW (arg2)
1707 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1709 if (code == CEIL_DIV_EXPR)
1712 low = int1l / int2l, hi = 0;
1716 /* ... fall through ... */
1718 case ROUND_DIV_EXPR:
1719 if (int2h == 0 && int2l == 0)
1721 if (int2h == 0 && int2l == 1)
1723 low = int1l, hi = int1h;
1726 if (int1l == int2l && int1h == int2h
1727 && ! (int1l == 0 && int1h == 0))
1732 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1733 &low, &hi, &garbagel, &garbageh);
1736 case TRUNC_MOD_EXPR:
1737 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1738 /* This is a shortcut for a common special case. */
1739 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1740 && !TREE_OVERFLOW (arg1)
1741 && !TREE_OVERFLOW (arg2)
1742 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1744 if (code == CEIL_MOD_EXPR)
1746 low = int1l % int2l, hi = 0;
1750 /* ... fall through ... */
1752 case ROUND_MOD_EXPR:
1753 if (int2h == 0 && int2l == 0)
1755 overflow = div_and_round_double (code, uns,
1756 int1l, int1h, int2l, int2h,
1757 &garbagel, &garbageh, &low, &hi);
1763 low = (((unsigned HOST_WIDE_INT) int1h
1764 < (unsigned HOST_WIDE_INT) int2h)
1765 || (((unsigned HOST_WIDE_INT) int1h
1766 == (unsigned HOST_WIDE_INT) int2h)
1769 low = (int1h < int2h
1770 || (int1h == int2h && int1l < int2l));
1772 if (low == (code == MIN_EXPR))
1773 low = int1l, hi = int1h;
1775 low = int2l, hi = int2h;
1784 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1786 /* Propagate overflow flags ourselves. */
1787 if (((!uns || is_sizetype) && overflow)
1788 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1791 TREE_OVERFLOW (t) = 1;
1795 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1796 ((!uns || is_sizetype) && overflow)
1797 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1802 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1803 constant. We assume ARG1 and ARG2 have the same data type, or at least
1804 are the same kind of constant and the same machine mode. Return zero if
1805 combining the constants is not allowed in the current operating mode.
1807 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1810 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1812 /* Sanity check for the recursive cases. */
1819 if (TREE_CODE (arg1) == INTEGER_CST)
1820 return int_const_binop (code, arg1, arg2, notrunc);
1822 if (TREE_CODE (arg1) == REAL_CST)
1824 enum machine_mode mode;
1827 REAL_VALUE_TYPE value;
1828 REAL_VALUE_TYPE result;
1832 /* The following codes are handled by real_arithmetic. */
1847 d1 = TREE_REAL_CST (arg1);
1848 d2 = TREE_REAL_CST (arg2);
1850 type = TREE_TYPE (arg1);
1851 mode = TYPE_MODE (type);
1853 /* Don't perform operation if we honor signaling NaNs and
1854 either operand is a NaN. */
1855 if (HONOR_SNANS (mode)
1856 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1859 /* Don't perform operation if it would raise a division
1860 by zero exception. */
1861 if (code == RDIV_EXPR
1862 && REAL_VALUES_EQUAL (d2, dconst0)
1863 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1866 /* If either operand is a NaN, just return it. Otherwise, set up
1867 for floating-point trap; we return an overflow. */
1868 if (REAL_VALUE_ISNAN (d1))
1870 else if (REAL_VALUE_ISNAN (d2))
1873 inexact = real_arithmetic (&value, code, &d1, &d2);
1874 real_convert (&result, mode, &value);
1876 /* Don't constant fold this floating point operation if
1877 the result has overflowed and flag_trapping_math. */
1878 if (flag_trapping_math
1879 && MODE_HAS_INFINITIES (mode)
1880 && REAL_VALUE_ISINF (result)
1881 && !REAL_VALUE_ISINF (d1)
1882 && !REAL_VALUE_ISINF (d2))
1885 /* Don't constant fold this floating point operation if the
1886 result may dependent upon the run-time rounding mode and
1887 flag_rounding_math is set, or if GCC's software emulation
1888 is unable to accurately represent the result. */
1889 if ((flag_rounding_math
1890 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1891 && !flag_unsafe_math_optimizations))
1892 && (inexact || !real_identical (&result, &value)))
1895 t = build_real (type, result);
1897 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1901 if (TREE_CODE (arg1) == FIXED_CST)
1903 FIXED_VALUE_TYPE f1;
1904 FIXED_VALUE_TYPE f2;
1905 FIXED_VALUE_TYPE result;
1910 /* The following codes are handled by fixed_arithmetic. */
1916 case TRUNC_DIV_EXPR:
1917 f2 = TREE_FIXED_CST (arg2);
1922 f2.data.high = TREE_INT_CST_HIGH (arg2);
1923 f2.data.low = TREE_INT_CST_LOW (arg2);
1931 f1 = TREE_FIXED_CST (arg1);
1932 type = TREE_TYPE (arg1);
1933 sat_p = TYPE_SATURATING (type);
1934 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1935 t = build_fixed (type, result);
1936 /* Propagate overflow flags. */
1937 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1939 TREE_OVERFLOW (t) = 1;
1940 TREE_CONSTANT_OVERFLOW (t) = 1;
1942 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1943 TREE_CONSTANT_OVERFLOW (t) = 1;
1947 if (TREE_CODE (arg1) == COMPLEX_CST)
1949 tree type = TREE_TYPE (arg1);
1950 tree r1 = TREE_REALPART (arg1);
1951 tree i1 = TREE_IMAGPART (arg1);
1952 tree r2 = TREE_REALPART (arg2);
1953 tree i2 = TREE_IMAGPART (arg2);
1960 real = const_binop (code, r1, r2, notrunc);
1961 imag = const_binop (code, i1, i2, notrunc);
1965 real = const_binop (MINUS_EXPR,
1966 const_binop (MULT_EXPR, r1, r2, notrunc),
1967 const_binop (MULT_EXPR, i1, i2, notrunc),
1969 imag = const_binop (PLUS_EXPR,
1970 const_binop (MULT_EXPR, r1, i2, notrunc),
1971 const_binop (MULT_EXPR, i1, r2, notrunc),
1978 = const_binop (PLUS_EXPR,
1979 const_binop (MULT_EXPR, r2, r2, notrunc),
1980 const_binop (MULT_EXPR, i2, i2, notrunc),
1983 = const_binop (PLUS_EXPR,
1984 const_binop (MULT_EXPR, r1, r2, notrunc),
1985 const_binop (MULT_EXPR, i1, i2, notrunc),
1988 = const_binop (MINUS_EXPR,
1989 const_binop (MULT_EXPR, i1, r2, notrunc),
1990 const_binop (MULT_EXPR, r1, i2, notrunc),
1993 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1994 code = TRUNC_DIV_EXPR;
1996 real = const_binop (code, t1, magsquared, notrunc);
1997 imag = const_binop (code, t2, magsquared, notrunc);
2006 return build_complex (type, real, imag);
2012 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2013 indicates which particular sizetype to create. */
2016 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2018 return build_int_cst (sizetype_tab[(int) kind], number);
2021 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2022 is a tree code. The type of the result is taken from the operands.
2023 Both must be equivalent integer types, ala int_binop_types_match_p.
2024 If the operands are constant, so is the result. */
2027 size_binop (enum tree_code code, tree arg0, tree arg1)
2029 tree type = TREE_TYPE (arg0);
2031 if (arg0 == error_mark_node || arg1 == error_mark_node)
2032 return error_mark_node;
2034 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2037 /* Handle the special case of two integer constants faster. */
2038 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2040 /* And some specific cases even faster than that. */
2041 if (code == PLUS_EXPR)
2043 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2045 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2048 else if (code == MINUS_EXPR)
2050 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2053 else if (code == MULT_EXPR)
2055 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2059 /* Handle general case of two integer constants. */
2060 return int_const_binop (code, arg0, arg1, 0);
2063 return fold_build2 (code, type, arg0, arg1);
2066 /* Given two values, either both of sizetype or both of bitsizetype,
2067 compute the difference between the two values. Return the value
2068 in signed type corresponding to the type of the operands. */
2071 size_diffop (tree arg0, tree arg1)
2073 tree type = TREE_TYPE (arg0);
2076 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2079 /* If the type is already signed, just do the simple thing. */
2080 if (!TYPE_UNSIGNED (type))
2081 return size_binop (MINUS_EXPR, arg0, arg1);
2083 if (type == sizetype)
2085 else if (type == bitsizetype)
2086 ctype = sbitsizetype;
2088 ctype = signed_type_for (type);
2090 /* If either operand is not a constant, do the conversions to the signed
2091 type and subtract. The hardware will do the right thing with any
2092 overflow in the subtraction. */
2093 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2094 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2095 fold_convert (ctype, arg1));
2097 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2098 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2099 overflow) and negate (which can't either). Special-case a result
2100 of zero while we're here. */
2101 if (tree_int_cst_equal (arg0, arg1))
2102 return build_int_cst (ctype, 0);
2103 else if (tree_int_cst_lt (arg1, arg0))
2104 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2106 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2107 fold_convert (ctype, size_binop (MINUS_EXPR,
2111 /* A subroutine of fold_convert_const handling conversions of an
2112 INTEGER_CST to another integer type. */
2115 fold_convert_const_int_from_int (tree type, const_tree arg1)
2119 /* Given an integer constant, make new constant with new type,
2120 appropriately sign-extended or truncated. */
2121 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2122 TREE_INT_CST_HIGH (arg1),
2123 /* Don't set the overflow when
2124 converting from a pointer, */
2125 !POINTER_TYPE_P (TREE_TYPE (arg1))
2126 /* or to a sizetype with same signedness
2127 and the precision is unchanged.
2128 ??? sizetype is always sign-extended,
2129 but its signedness depends on the
2130 frontend. Thus we see spurious overflows
2131 here if we do not check this. */
2132 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2133 == TYPE_PRECISION (type))
2134 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2135 == TYPE_UNSIGNED (type))
2136 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2137 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2138 || (TREE_CODE (type) == INTEGER_TYPE
2139 && TYPE_IS_SIZETYPE (type)))),
2140 (TREE_INT_CST_HIGH (arg1) < 0
2141 && (TYPE_UNSIGNED (type)
2142 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2143 | TREE_OVERFLOW (arg1));
2148 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2149 to an integer type. */
2152 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2157 /* The following code implements the floating point to integer
2158 conversion rules required by the Java Language Specification,
2159 that IEEE NaNs are mapped to zero and values that overflow
2160 the target precision saturate, i.e. values greater than
2161 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2162 are mapped to INT_MIN. These semantics are allowed by the
2163 C and C++ standards that simply state that the behavior of
2164 FP-to-integer conversion is unspecified upon overflow. */
2166 HOST_WIDE_INT high, low;
2168 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2172 case FIX_TRUNC_EXPR:
2173 real_trunc (&r, VOIDmode, &x);
2180 /* If R is NaN, return zero and show we have an overflow. */
2181 if (REAL_VALUE_ISNAN (r))
2188 /* See if R is less than the lower bound or greater than the
2193 tree lt = TYPE_MIN_VALUE (type);
2194 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2195 if (REAL_VALUES_LESS (r, l))
2198 high = TREE_INT_CST_HIGH (lt);
2199 low = TREE_INT_CST_LOW (lt);
2205 tree ut = TYPE_MAX_VALUE (type);
2208 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2209 if (REAL_VALUES_LESS (u, r))
2212 high = TREE_INT_CST_HIGH (ut);
2213 low = TREE_INT_CST_LOW (ut);
2219 REAL_VALUE_TO_INT (&low, &high, r);
2221 t = force_fit_type_double (type, low, high, -1,
2222 overflow | TREE_OVERFLOW (arg1));
2226 /* A subroutine of fold_convert_const handling conversions of a
2227 FIXED_CST to an integer type. */
2230 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2233 double_int temp, temp_trunc;
2236 /* Right shift FIXED_CST to temp by fbit. */
2237 temp = TREE_FIXED_CST (arg1).data;
2238 mode = TREE_FIXED_CST (arg1).mode;
2239 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2241 lshift_double (temp.low, temp.high,
2242 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2243 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2245 /* Left shift temp to temp_trunc by fbit. */
2246 lshift_double (temp.low, temp.high,
2247 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2248 &temp_trunc.low, &temp_trunc.high,
2249 SIGNED_FIXED_POINT_MODE_P (mode));
2256 temp_trunc.high = 0;
2259 /* If FIXED_CST is negative, we need to round the value toward 0.
2260 By checking if the fractional bits are not zero to add 1 to temp. */
2261 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2262 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2267 temp = double_int_add (temp, one);
2270 /* Given a fixed-point constant, make new constant with new type,
2271 appropriately sign-extended or truncated. */
2272 t = force_fit_type_double (type, temp.low, temp.high, -1,
2274 && (TYPE_UNSIGNED (type)
2275 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2276 | TREE_OVERFLOW (arg1));
2281 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2282 to another floating point type. */
2285 fold_convert_const_real_from_real (tree type, const_tree arg1)
2287 REAL_VALUE_TYPE value;
2290 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2291 t = build_real (type, value);
2293 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2297 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2298 to a floating point type. */
2301 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2303 REAL_VALUE_TYPE value;
2306 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2307 t = build_real (type, value);
2309 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2310 TREE_CONSTANT_OVERFLOW (t)
2311 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2315 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2316 to another fixed-point type. */
2319 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2321 FIXED_VALUE_TYPE value;
2325 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2326 TYPE_SATURATING (type));
2327 t = build_fixed (type, value);
2329 /* Propagate overflow flags. */
2330 if (overflow_p | TREE_OVERFLOW (arg1))
2332 TREE_OVERFLOW (t) = 1;
2333 TREE_CONSTANT_OVERFLOW (t) = 1;
2335 else if (TREE_CONSTANT_OVERFLOW (arg1))
2336 TREE_CONSTANT_OVERFLOW (t) = 1;
2340 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2341 to a fixed-point type. */
2344 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2346 FIXED_VALUE_TYPE value;
2350 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2351 TREE_INT_CST (arg1),
2352 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2353 TYPE_SATURATING (type));
2354 t = build_fixed (type, value);
2356 /* Propagate overflow flags. */
2357 if (overflow_p | TREE_OVERFLOW (arg1))
2359 TREE_OVERFLOW (t) = 1;
2360 TREE_CONSTANT_OVERFLOW (t) = 1;
2362 else if (TREE_CONSTANT_OVERFLOW (arg1))
2363 TREE_CONSTANT_OVERFLOW (t) = 1;
2367 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2368 to a fixed-point type. */
2371 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2373 FIXED_VALUE_TYPE value;
2377 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2378 &TREE_REAL_CST (arg1),
2379 TYPE_SATURATING (type));
2380 t = build_fixed (type, value);
2382 /* Propagate overflow flags. */
2383 if (overflow_p | TREE_OVERFLOW (arg1))
2385 TREE_OVERFLOW (t) = 1;
2386 TREE_CONSTANT_OVERFLOW (t) = 1;
2388 else if (TREE_CONSTANT_OVERFLOW (arg1))
2389 TREE_CONSTANT_OVERFLOW (t) = 1;
2393 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2394 type TYPE. If no simplification can be done return NULL_TREE. */
2397 fold_convert_const (enum tree_code code, tree type, tree arg1)
2399 if (TREE_TYPE (arg1) == type)
2402 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2404 if (TREE_CODE (arg1) == INTEGER_CST)
2405 return fold_convert_const_int_from_int (type, arg1);
2406 else if (TREE_CODE (arg1) == REAL_CST)
2407 return fold_convert_const_int_from_real (code, type, arg1);
2408 else if (TREE_CODE (arg1) == FIXED_CST)
2409 return fold_convert_const_int_from_fixed (type, arg1);
2411 else if (TREE_CODE (type) == REAL_TYPE)
2413 if (TREE_CODE (arg1) == INTEGER_CST)
2414 return build_real_from_int_cst (type, arg1);
2415 else if (TREE_CODE (arg1) == REAL_CST)
2416 return fold_convert_const_real_from_real (type, arg1);
2417 else if (TREE_CODE (arg1) == FIXED_CST)
2418 return fold_convert_const_real_from_fixed (type, arg1);
2420 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2422 if (TREE_CODE (arg1) == FIXED_CST)
2423 return fold_convert_const_fixed_from_fixed (type, arg1);
2424 else if (TREE_CODE (arg1) == INTEGER_CST)
2425 return fold_convert_const_fixed_from_int (type, arg1);
2426 else if (TREE_CODE (arg1) == REAL_CST)
2427 return fold_convert_const_fixed_from_real (type, arg1);
2432 /* Construct a vector of zero elements of vector type TYPE. */
2435 build_zero_vector (tree type)
2440 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2441 units = TYPE_VECTOR_SUBPARTS (type);
2444 for (i = 0; i < units; i++)
2445 list = tree_cons (NULL_TREE, elem, list);
2446 return build_vector (type, list);
2449 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2452 fold_convertible_p (const_tree type, const_tree arg)
2454 tree orig = TREE_TYPE (arg);
2459 if (TREE_CODE (arg) == ERROR_MARK
2460 || TREE_CODE (type) == ERROR_MARK
2461 || TREE_CODE (orig) == ERROR_MARK)
2464 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2467 switch (TREE_CODE (type))
2469 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2470 case POINTER_TYPE: case REFERENCE_TYPE:
2472 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2473 || TREE_CODE (orig) == OFFSET_TYPE)
2475 return (TREE_CODE (orig) == VECTOR_TYPE
2476 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2479 case FIXED_POINT_TYPE:
2483 return TREE_CODE (type) == TREE_CODE (orig);
2490 /* Convert expression ARG to type TYPE. Used by the middle-end for
2491 simple conversions in preference to calling the front-end's convert. */
2494 fold_convert (tree type, tree arg)
2496 tree orig = TREE_TYPE (arg);
2502 if (TREE_CODE (arg) == ERROR_MARK
2503 || TREE_CODE (type) == ERROR_MARK
2504 || TREE_CODE (orig) == ERROR_MARK)
2505 return error_mark_node;
2507 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2508 return fold_build1 (NOP_EXPR, type, arg);
2510 switch (TREE_CODE (type))
2512 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2513 case POINTER_TYPE: case REFERENCE_TYPE:
2515 if (TREE_CODE (arg) == INTEGER_CST)
2517 tem = fold_convert_const (NOP_EXPR, type, arg);
2518 if (tem != NULL_TREE)
2521 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2522 || TREE_CODE (orig) == OFFSET_TYPE)
2523 return fold_build1 (NOP_EXPR, type, arg);
2524 if (TREE_CODE (orig) == COMPLEX_TYPE)
2526 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2527 return fold_convert (type, tem);
2529 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2530 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2531 return fold_build1 (NOP_EXPR, type, arg);
2534 if (TREE_CODE (arg) == INTEGER_CST)
2536 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2537 if (tem != NULL_TREE)
2540 else if (TREE_CODE (arg) == REAL_CST)
2542 tem = fold_convert_const (NOP_EXPR, type, arg);
2543 if (tem != NULL_TREE)
2546 else if (TREE_CODE (arg) == FIXED_CST)
2548 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2549 if (tem != NULL_TREE)
2553 switch (TREE_CODE (orig))
2556 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2557 case POINTER_TYPE: case REFERENCE_TYPE:
2558 return fold_build1 (FLOAT_EXPR, type, arg);
2561 return fold_build1 (NOP_EXPR, type, arg);
2563 case FIXED_POINT_TYPE:
2564 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2567 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2568 return fold_convert (type, tem);
2574 case FIXED_POINT_TYPE:
2575 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2576 || TREE_CODE (arg) == REAL_CST)
2578 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2579 if (tem != NULL_TREE)
2583 switch (TREE_CODE (orig))
2585 case FIXED_POINT_TYPE:
2590 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2593 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2594 return fold_convert (type, tem);
2601 switch (TREE_CODE (orig))
2604 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2605 case POINTER_TYPE: case REFERENCE_TYPE:
2607 case FIXED_POINT_TYPE:
2608 return build2 (COMPLEX_EXPR, type,
2609 fold_convert (TREE_TYPE (type), arg),
2610 fold_convert (TREE_TYPE (type), integer_zero_node));
2615 if (TREE_CODE (arg) == COMPLEX_EXPR)
2617 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2618 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2619 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2622 arg = save_expr (arg);
2623 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2624 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2625 rpart = fold_convert (TREE_TYPE (type), rpart);
2626 ipart = fold_convert (TREE_TYPE (type), ipart);
2627 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2635 if (integer_zerop (arg))
2636 return build_zero_vector (type);
2637 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2638 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2639 || TREE_CODE (orig) == VECTOR_TYPE);
2640 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2643 tem = fold_ignored_result (arg);
2644 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2646 return fold_build1 (NOP_EXPR, type, tem);
2653 /* Return false if expr can be assumed not to be an lvalue, true
2657 maybe_lvalue_p (const_tree x)
2659 /* We only need to wrap lvalue tree codes. */
2660 switch (TREE_CODE (x))
2671 case ALIGN_INDIRECT_REF:
2672 case MISALIGNED_INDIRECT_REF:
2674 case ARRAY_RANGE_REF:
2680 case PREINCREMENT_EXPR:
2681 case PREDECREMENT_EXPR:
2683 case TRY_CATCH_EXPR:
2684 case WITH_CLEANUP_EXPR:
2687 case GIMPLE_MODIFY_STMT:
2696 /* Assume the worst for front-end tree codes. */
2697 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2705 /* Return an expr equal to X but certainly not valid as an lvalue. */
2710 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2715 if (! maybe_lvalue_p (x))
2717 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2720 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2721 Zero means allow extended lvalues. */
2723 int pedantic_lvalues;
2725 /* When pedantic, return an expr equal to X but certainly not valid as a
2726 pedantic lvalue. Otherwise, return X. */
2729 pedantic_non_lvalue (tree x)
2731 if (pedantic_lvalues)
2732 return non_lvalue (x);
2737 /* Given a tree comparison code, return the code that is the logical inverse
2738 of the given code. It is not safe to do this for floating-point
2739 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2740 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2743 invert_tree_comparison (enum tree_code code, bool honor_nans)
2745 if (honor_nans && flag_trapping_math)
2755 return honor_nans ? UNLE_EXPR : LE_EXPR;
2757 return honor_nans ? UNLT_EXPR : LT_EXPR;
2759 return honor_nans ? UNGE_EXPR : GE_EXPR;
2761 return honor_nans ? UNGT_EXPR : GT_EXPR;
2775 return UNORDERED_EXPR;
2776 case UNORDERED_EXPR:
2777 return ORDERED_EXPR;
2783 /* Similar, but return the comparison that results if the operands are
2784 swapped. This is safe for floating-point. */
2787 swap_tree_comparison (enum tree_code code)
2794 case UNORDERED_EXPR:
2820 /* Convert a comparison tree code from an enum tree_code representation
2821 into a compcode bit-based encoding. This function is the inverse of
2822 compcode_to_comparison. */
2824 static enum comparison_code
2825 comparison_to_compcode (enum tree_code code)
2842 return COMPCODE_ORD;
2843 case UNORDERED_EXPR:
2844 return COMPCODE_UNORD;
2846 return COMPCODE_UNLT;
2848 return COMPCODE_UNEQ;
2850 return COMPCODE_UNLE;
2852 return COMPCODE_UNGT;
2854 return COMPCODE_LTGT;
2856 return COMPCODE_UNGE;
2862 /* Convert a compcode bit-based encoding of a comparison operator back
2863 to GCC's enum tree_code representation. This function is the
2864 inverse of comparison_to_compcode. */
2866 static enum tree_code
2867 compcode_to_comparison (enum comparison_code code)
2884 return ORDERED_EXPR;
2885 case COMPCODE_UNORD:
2886 return UNORDERED_EXPR;
2904 /* Return a tree for the comparison which is the combination of
2905 doing the AND or OR (depending on CODE) of the two operations LCODE
2906 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2907 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2908 if this makes the transformation invalid. */
2911 combine_comparisons (enum tree_code code, enum tree_code lcode,
2912 enum tree_code rcode, tree truth_type,
2913 tree ll_arg, tree lr_arg)
2915 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2916 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2917 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2918 enum comparison_code compcode;
2922 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2923 compcode = lcompcode & rcompcode;
2926 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2927 compcode = lcompcode | rcompcode;
2936 /* Eliminate unordered comparisons, as well as LTGT and ORD
2937 which are not used unless the mode has NaNs. */
2938 compcode &= ~COMPCODE_UNORD;
2939 if (compcode == COMPCODE_LTGT)
2940 compcode = COMPCODE_NE;
2941 else if (compcode == COMPCODE_ORD)
2942 compcode = COMPCODE_TRUE;
2944 else if (flag_trapping_math)
2946 /* Check that the original operation and the optimized ones will trap
2947 under the same condition. */
2948 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2949 && (lcompcode != COMPCODE_EQ)
2950 && (lcompcode != COMPCODE_ORD);
2951 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2952 && (rcompcode != COMPCODE_EQ)
2953 && (rcompcode != COMPCODE_ORD);
2954 bool trap = (compcode & COMPCODE_UNORD) == 0
2955 && (compcode != COMPCODE_EQ)
2956 && (compcode != COMPCODE_ORD);
2958 /* In a short-circuited boolean expression the LHS might be
2959 such that the RHS, if evaluated, will never trap. For
2960 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2961 if neither x nor y is NaN. (This is a mixed blessing: for
2962 example, the expression above will never trap, hence
2963 optimizing it to x < y would be invalid). */
2964 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2965 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2968 /* If the comparison was short-circuited, and only the RHS
2969 trapped, we may now generate a spurious trap. */
2971 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2974 /* If we changed the conditions that cause a trap, we lose. */
2975 if ((ltrap || rtrap) != trap)
2979 if (compcode == COMPCODE_TRUE)
2980 return constant_boolean_node (true, truth_type);
2981 else if (compcode == COMPCODE_FALSE)
2982 return constant_boolean_node (false, truth_type);
2984 return fold_build2 (compcode_to_comparison (compcode),
2985 truth_type, ll_arg, lr_arg);
2988 /* Return nonzero if CODE is a tree code that represents a truth value. */
2991 truth_value_p (enum tree_code code)
2993 return (TREE_CODE_CLASS (code) == tcc_comparison
2994 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2995 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2996 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2999 /* Return nonzero if two operands (typically of the same tree node)
3000 are necessarily equal. If either argument has side-effects this
3001 function returns zero. FLAGS modifies behavior as follows:
3003 If OEP_ONLY_CONST is set, only return nonzero for constants.
3004 This function tests whether the operands are indistinguishable;
3005 it does not test whether they are equal using C's == operation.
3006 The distinction is important for IEEE floating point, because
3007 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3008 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3010 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3011 even though it may hold multiple values during a function.
3012 This is because a GCC tree node guarantees that nothing else is
3013 executed between the evaluation of its "operands" (which may often
3014 be evaluated in arbitrary order). Hence if the operands themselves
3015 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3016 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3017 unset means assuming isochronic (or instantaneous) tree equivalence.
3018 Unless comparing arbitrary expression trees, such as from different
3019 statements, this flag can usually be left unset.
3021 If OEP_PURE_SAME is set, then pure functions with identical arguments
3022 are considered the same. It is used when the caller has other ways
3023 to ensure that global memory is unchanged in between. */
3026 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3028 /* If either is ERROR_MARK, they aren't equal. */
3029 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3032 /* If both types don't have the same signedness, then we can't consider
3033 them equal. We must check this before the STRIP_NOPS calls
3034 because they may change the signedness of the arguments. */
3035 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3038 /* If both types don't have the same precision, then it is not safe
3040 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3046 /* In case both args are comparisons but with different comparison
3047 code, try to swap the comparison operands of one arg to produce
3048 a match and compare that variant. */
3049 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3050 && COMPARISON_CLASS_P (arg0)
3051 && COMPARISON_CLASS_P (arg1))
3053 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3055 if (TREE_CODE (arg0) == swap_code)
3056 return operand_equal_p (TREE_OPERAND (arg0, 0),
3057 TREE_OPERAND (arg1, 1), flags)
3058 && operand_equal_p (TREE_OPERAND (arg0, 1),
3059 TREE_OPERAND (arg1, 0), flags);
3062 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3063 /* This is needed for conversions and for COMPONENT_REF.
3064 Might as well play it safe and always test this. */
3065 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3066 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3067 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3070 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3071 We don't care about side effects in that case because the SAVE_EXPR
3072 takes care of that for us. In all other cases, two expressions are
3073 equal if they have no side effects. If we have two identical
3074 expressions with side effects that should be treated the same due
3075 to the only side effects being identical SAVE_EXPR's, that will
3076 be detected in the recursive calls below. */
3077 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3078 && (TREE_CODE (arg0) == SAVE_EXPR
3079 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3082 /* Next handle constant cases, those for which we can return 1 even
3083 if ONLY_CONST is set. */
3084 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3085 switch (TREE_CODE (arg0))
3088 return tree_int_cst_equal (arg0, arg1);
3091 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3092 TREE_FIXED_CST (arg1));
3095 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3096 TREE_REAL_CST (arg1)))
3100 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3102 /* If we do not distinguish between signed and unsigned zero,
3103 consider them equal. */
3104 if (real_zerop (arg0) && real_zerop (arg1))
3113 v1 = TREE_VECTOR_CST_ELTS (arg0);
3114 v2 = TREE_VECTOR_CST_ELTS (arg1);
3117 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3120 v1 = TREE_CHAIN (v1);
3121 v2 = TREE_CHAIN (v2);
3128 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3130 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3134 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3135 && ! memcmp (TREE_STRING_POINTER (arg0),
3136 TREE_STRING_POINTER (arg1),
3137 TREE_STRING_LENGTH (arg0)));
3140 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3146 if (flags & OEP_ONLY_CONST)
3149 /* Define macros to test an operand from arg0 and arg1 for equality and a
3150 variant that allows null and views null as being different from any
3151 non-null value. In the latter case, if either is null, the both
3152 must be; otherwise, do the normal comparison. */
3153 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3154 TREE_OPERAND (arg1, N), flags)
3156 #define OP_SAME_WITH_NULL(N) \
3157 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3158 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3160 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3163 /* Two conversions are equal only if signedness and modes match. */
3164 switch (TREE_CODE (arg0))
3168 case FIX_TRUNC_EXPR:
3169 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3170 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3180 case tcc_comparison:
3182 if (OP_SAME (0) && OP_SAME (1))
3185 /* For commutative ops, allow the other order. */
3186 return (commutative_tree_code (TREE_CODE (arg0))
3187 && operand_equal_p (TREE_OPERAND (arg0, 0),
3188 TREE_OPERAND (arg1, 1), flags)
3189 && operand_equal_p (TREE_OPERAND (arg0, 1),
3190 TREE_OPERAND (arg1, 0), flags));
3193 /* If either of the pointer (or reference) expressions we are
3194 dereferencing contain a side effect, these cannot be equal. */
3195 if (TREE_SIDE_EFFECTS (arg0)
3196 || TREE_SIDE_EFFECTS (arg1))
3199 switch (TREE_CODE (arg0))
3202 case ALIGN_INDIRECT_REF:
3203 case MISALIGNED_INDIRECT_REF:
3209 case ARRAY_RANGE_REF:
3210 /* Operands 2 and 3 may be null.
3211 Compare the array index by value if it is constant first as we
3212 may have different types but same value here. */
3214 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3215 TREE_OPERAND (arg1, 1))
3217 && OP_SAME_WITH_NULL (2)
3218 && OP_SAME_WITH_NULL (3));
3221 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3222 may be NULL when we're called to compare MEM_EXPRs. */
3223 return OP_SAME_WITH_NULL (0)
3225 && OP_SAME_WITH_NULL (2);
3228 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3234 case tcc_expression:
3235 switch (TREE_CODE (arg0))
3238 case TRUTH_NOT_EXPR:
3241 case TRUTH_ANDIF_EXPR:
3242 case TRUTH_ORIF_EXPR:
3243 return OP_SAME (0) && OP_SAME (1);
3245 case TRUTH_AND_EXPR:
3247 case TRUTH_XOR_EXPR:
3248 if (OP_SAME (0) && OP_SAME (1))
3251 /* Otherwise take into account this is a commutative operation. */
3252 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3253 TREE_OPERAND (arg1, 1), flags)
3254 && operand_equal_p (TREE_OPERAND (arg0, 1),
3255 TREE_OPERAND (arg1, 0), flags));
3262 switch (TREE_CODE (arg0))
3265 /* If the CALL_EXPRs call different functions, then they
3266 clearly can not be equal. */
3267 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3272 unsigned int cef = call_expr_flags (arg0);
3273 if (flags & OEP_PURE_SAME)
3274 cef &= ECF_CONST | ECF_PURE;
3281 /* Now see if all the arguments are the same. */
3283 const_call_expr_arg_iterator iter0, iter1;
3285 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3286 a1 = first_const_call_expr_arg (arg1, &iter1);
3288 a0 = next_const_call_expr_arg (&iter0),
3289 a1 = next_const_call_expr_arg (&iter1))
3290 if (! operand_equal_p (a0, a1, flags))
3293 /* If we get here and both argument lists are exhausted
3294 then the CALL_EXPRs are equal. */
3295 return ! (a0 || a1);
3301 case tcc_declaration:
3302 /* Consider __builtin_sqrt equal to sqrt. */
3303 return (TREE_CODE (arg0) == FUNCTION_DECL
3304 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3305 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3306 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3313 #undef OP_SAME_WITH_NULL
3316 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3317 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3319 When in doubt, return 0. */
3322 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3324 int unsignedp1, unsignedpo;
3325 tree primarg0, primarg1, primother;
3326 unsigned int correct_width;
3328 if (operand_equal_p (arg0, arg1, 0))
3331 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3332 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3335 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3336 and see if the inner values are the same. This removes any
3337 signedness comparison, which doesn't matter here. */
3338 primarg0 = arg0, primarg1 = arg1;
3339 STRIP_NOPS (primarg0);
3340 STRIP_NOPS (primarg1);
3341 if (operand_equal_p (primarg0, primarg1, 0))
3344 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3345 actual comparison operand, ARG0.
3347 First throw away any conversions to wider types
3348 already present in the operands. */
3350 primarg1 = get_narrower (arg1, &unsignedp1);
3351 primother = get_narrower (other, &unsignedpo);
3353 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3354 if (unsignedp1 == unsignedpo
3355 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3356 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3358 tree type = TREE_TYPE (arg0);
3360 /* Make sure shorter operand is extended the right way
3361 to match the longer operand. */
3362 primarg1 = fold_convert (signed_or_unsigned_type_for
3363 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3365 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3372 /* See if ARG is an expression that is either a comparison or is performing
3373 arithmetic on comparisons. The comparisons must only be comparing
3374 two different values, which will be stored in *CVAL1 and *CVAL2; if
3375 they are nonzero it means that some operands have already been found.
3376 No variables may be used anywhere else in the expression except in the
3377 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3378 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3380 If this is true, return 1. Otherwise, return zero. */
3383 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3385 enum tree_code code = TREE_CODE (arg);
3386 enum tree_code_class class = TREE_CODE_CLASS (code);
3388 /* We can handle some of the tcc_expression cases here. */
3389 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3391 else if (class == tcc_expression
3392 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3393 || code == COMPOUND_EXPR))
3396 else if (class == tcc_expression && code == SAVE_EXPR
3397 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3399 /* If we've already found a CVAL1 or CVAL2, this expression is
3400 two complex to handle. */
3401 if (*cval1 || *cval2)
3411 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3414 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3415 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3416 cval1, cval2, save_p));
3421 case tcc_expression:
3422 if (code == COND_EXPR)
3423 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3424 cval1, cval2, save_p)
3425 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3426 cval1, cval2, save_p)
3427 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3428 cval1, cval2, save_p));
3431 case tcc_comparison:
3432 /* First see if we can handle the first operand, then the second. For
3433 the second operand, we know *CVAL1 can't be zero. It must be that
3434 one side of the comparison is each of the values; test for the
3435 case where this isn't true by failing if the two operands
3438 if (operand_equal_p (TREE_OPERAND (arg, 0),
3439 TREE_OPERAND (arg, 1), 0))
3443 *cval1 = TREE_OPERAND (arg, 0);
3444 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3446 else if (*cval2 == 0)
3447 *cval2 = TREE_OPERAND (arg, 0);
3448 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3453 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3455 else if (*cval2 == 0)
3456 *cval2 = TREE_OPERAND (arg, 1);
3457 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3469 /* ARG is a tree that is known to contain just arithmetic operations and
3470 comparisons. Evaluate the operations in the tree substituting NEW0 for
3471 any occurrence of OLD0 as an operand of a comparison and likewise for
3475 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3477 tree type = TREE_TYPE (arg);
3478 enum tree_code code = TREE_CODE (arg);
3479 enum tree_code_class class = TREE_CODE_CLASS (code);
3481 /* We can handle some of the tcc_expression cases here. */
3482 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3484 else if (class == tcc_expression
3485 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3491 return fold_build1 (code, type,
3492 eval_subst (TREE_OPERAND (arg, 0),
3493 old0, new0, old1, new1));
3496 return fold_build2 (code, type,
3497 eval_subst (TREE_OPERAND (arg, 0),
3498 old0, new0, old1, new1),
3499 eval_subst (TREE_OPERAND (arg, 1),
3500 old0, new0, old1, new1));
3502 case tcc_expression:
3506 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3509 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3512 return fold_build3 (code, type,
3513 eval_subst (TREE_OPERAND (arg, 0),
3514 old0, new0, old1, new1),
3515 eval_subst (TREE_OPERAND (arg, 1),
3516 old0, new0, old1, new1),
3517 eval_subst (TREE_OPERAND (arg, 2),
3518 old0, new0, old1, new1));
3522 /* Fall through - ??? */
3524 case tcc_comparison:
3526 tree arg0 = TREE_OPERAND (arg, 0);
3527 tree arg1 = TREE_OPERAND (arg, 1);
3529 /* We need to check both for exact equality and tree equality. The
3530 former will be true if the operand has a side-effect. In that
3531 case, we know the operand occurred exactly once. */
3533 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3535 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3538 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3540 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3543 return fold_build2 (code, type, arg0, arg1);
3551 /* Return a tree for the case when the result of an expression is RESULT
3552 converted to TYPE and OMITTED was previously an operand of the expression
3553 but is now not needed (e.g., we folded OMITTED * 0).
3555 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3556 the conversion of RESULT to TYPE. */
3559 omit_one_operand (tree type, tree result, tree omitted)
3561 tree t = fold_convert (type, result);
3563 /* If the resulting operand is an empty statement, just return the omitted
3564 statement casted to void. */
3565 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3566 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3568 if (TREE_SIDE_EFFECTS (omitted))
3569 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3571 return non_lvalue (t);
3574 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3577 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3579 tree t = fold_convert (type, result);
3581 /* If the resulting operand is an empty statement, just return the omitted
3582 statement casted to void. */
3583 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3584 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3586 if (TREE_SIDE_EFFECTS (omitted))
3587 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3589 return pedantic_non_lvalue (t);
3592 /* Return a tree for the case when the result of an expression is RESULT
3593 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3594 of the expression but are now not needed.
3596 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3597 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3598 evaluated before OMITTED2. Otherwise, if neither has side effects,
3599 just do the conversion of RESULT to TYPE. */
3602 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3604 tree t = fold_convert (type, result);
3606 if (TREE_SIDE_EFFECTS (omitted2))
3607 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3608 if (TREE_SIDE_EFFECTS (omitted1))
3609 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3611 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3615 /* Return a simplified tree node for the truth-negation of ARG. This
3616 never alters ARG itself. We assume that ARG is an operation that
3617 returns a truth value (0 or 1).
3619 FIXME: one would think we would fold the result, but it causes
3620 problems with the dominator optimizer. */
3623 fold_truth_not_expr (tree arg)
3625 tree type = TREE_TYPE (arg);
3626 enum tree_code code = TREE_CODE (arg);
3628 /* If this is a comparison, we can simply invert it, except for
3629 floating-point non-equality comparisons, in which case we just
3630 enclose a TRUTH_NOT_EXPR around what we have. */
3632 if (TREE_CODE_CLASS (code) == tcc_comparison)
3634 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3635 if (FLOAT_TYPE_P (op_type)
3636 && flag_trapping_math
3637 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3638 && code != NE_EXPR && code != EQ_EXPR)
3642 code = invert_tree_comparison (code,
3643 HONOR_NANS (TYPE_MODE (op_type)));
3644 if (code == ERROR_MARK)
3647 return build2 (code, type,
3648 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3655 return constant_boolean_node (integer_zerop (arg), type);
3657 case TRUTH_AND_EXPR:
3658 return build2 (TRUTH_OR_EXPR, type,
3659 invert_truthvalue (TREE_OPERAND (arg, 0)),
3660 invert_truthvalue (TREE_OPERAND (arg, 1)));
3663 return build2 (TRUTH_AND_EXPR, type,
3664 invert_truthvalue (TREE_OPERAND (arg, 0)),
3665 invert_truthvalue (TREE_OPERAND (arg, 1)));
3667 case TRUTH_XOR_EXPR:
3668 /* Here we can invert either operand. We invert the first operand
3669 unless the second operand is a TRUTH_NOT_EXPR in which case our
3670 result is the XOR of the first operand with the inside of the
3671 negation of the second operand. */
3673 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3674 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3675 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3677 return build2 (TRUTH_XOR_EXPR, type,
3678 invert_truthvalue (TREE_OPERAND (arg, 0)),
3679 TREE_OPERAND (arg, 1));
3681 case TRUTH_ANDIF_EXPR:
3682 return build2 (TRUTH_ORIF_EXPR, type,
3683 invert_truthvalue (TREE_OPERAND (arg, 0)),
3684 invert_truthvalue (TREE_OPERAND (arg, 1)));
3686 case TRUTH_ORIF_EXPR:
3687 return build2 (TRUTH_ANDIF_EXPR, type,
3688 invert_truthvalue (TREE_OPERAND (arg, 0)),
3689 invert_truthvalue (TREE_OPERAND (arg, 1)));
3691 case TRUTH_NOT_EXPR:
3692 return TREE_OPERAND (arg, 0);
3696 tree arg1 = TREE_OPERAND (arg, 1);
3697 tree arg2 = TREE_OPERAND (arg, 2);
3698 /* A COND_EXPR may have a throw as one operand, which
3699 then has void type. Just leave void operands
3701 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3702 VOID_TYPE_P (TREE_TYPE (arg1))
3703 ? arg1 : invert_truthvalue (arg1),
3704 VOID_TYPE_P (TREE_TYPE (arg2))
3705 ? arg2 : invert_truthvalue (arg2));
3709 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3710 invert_truthvalue (TREE_OPERAND (arg, 1)));
3712 case NON_LVALUE_EXPR:
3713 return invert_truthvalue (TREE_OPERAND (arg, 0));
3716 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3717 return build1 (TRUTH_NOT_EXPR, type, arg);
3721 return build1 (TREE_CODE (arg), type,
3722 invert_truthvalue (TREE_OPERAND (arg, 0)));
3725 if (!integer_onep (TREE_OPERAND (arg, 1)))
3727 return build2 (EQ_EXPR, type, arg,
3728 build_int_cst (type, 0));
3731 return build1 (TRUTH_NOT_EXPR, type, arg);
3733 case CLEANUP_POINT_EXPR:
3734 return build1 (CLEANUP_POINT_EXPR, type,
3735 invert_truthvalue (TREE_OPERAND (arg, 0)));
3744 /* Return a simplified tree node for the truth-negation of ARG. This
3745 never alters ARG itself. We assume that ARG is an operation that
3746 returns a truth value (0 or 1).
3748 FIXME: one would think we would fold the result, but it causes
3749 problems with the dominator optimizer. */
3752 invert_truthvalue (tree arg)
3756 if (TREE_CODE (arg) == ERROR_MARK)
3759 tem = fold_truth_not_expr (arg);
3761 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3766 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3767 operands are another bit-wise operation with a common input. If so,
3768 distribute the bit operations to save an operation and possibly two if
3769 constants are involved. For example, convert
3770 (A | B) & (A | C) into A | (B & C)
3771 Further simplification will occur if B and C are constants.
3773 If this optimization cannot be done, 0 will be returned. */
3776 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3781 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3782 || TREE_CODE (arg0) == code
3783 || (TREE_CODE (arg0) != BIT_AND_EXPR
3784 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3787 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3789 common = TREE_OPERAND (arg0, 0);
3790 left = TREE_OPERAND (arg0, 1);
3791 right = TREE_OPERAND (arg1, 1);
3793 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3795 common = TREE_OPERAND (arg0, 0);
3796 left = TREE_OPERAND (arg0, 1);
3797 right = TREE_OPERAND (arg1, 0);
3799 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3801 common = TREE_OPERAND (arg0, 1);
3802 left = TREE_OPERAND (arg0, 0);
3803 right = TREE_OPERAND (arg1, 1);
3805 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3807 common = TREE_OPERAND (arg0, 1);
3808 left = TREE_OPERAND (arg0, 0);
3809 right = TREE_OPERAND (arg1, 0);
3814 return fold_build2 (TREE_CODE (arg0), type, common,
3815 fold_build2 (code, type, left, right));
3818 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3819 with code CODE. This optimization is unsafe. */
3821 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3823 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3824 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3826 /* (A / C) +- (B / C) -> (A +- B) / C. */
3828 && operand_equal_p (TREE_OPERAND (arg0, 1),
3829 TREE_OPERAND (arg1, 1), 0))
3830 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3831 fold_build2 (code, type,
3832 TREE_OPERAND (arg0, 0),
3833 TREE_OPERAND (arg1, 0)),
3834 TREE_OPERAND (arg0, 1));
3836 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3837 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3838 TREE_OPERAND (arg1, 0), 0)
3839 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3840 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3842 REAL_VALUE_TYPE r0, r1;
3843 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3844 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3846 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3848 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3849 real_arithmetic (&r0, code, &r0, &r1);
3850 return fold_build2 (MULT_EXPR, type,
3851 TREE_OPERAND (arg0, 0),
3852 build_real (type, r0));
3858 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3859 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3862 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3869 tree size = TYPE_SIZE (TREE_TYPE (inner));
3870 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3871 || POINTER_TYPE_P (TREE_TYPE (inner)))
3872 && host_integerp (size, 0)
3873 && tree_low_cst (size, 0) == bitsize)
3874 return fold_convert (type, inner);
3877 result = build3 (BIT_FIELD_REF, type, inner,
3878 size_int (bitsize), bitsize_int (bitpos));
3880 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3885 /* Optimize a bit-field compare.
3887 There are two cases: First is a compare against a constant and the
3888 second is a comparison of two items where the fields are at the same
3889 bit position relative to the start of a chunk (byte, halfword, word)
3890 large enough to contain it. In these cases we can avoid the shift
3891 implicit in bitfield extractions.
3893 For constants, we emit a compare of the shifted constant with the
3894 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3895 compared. For two fields at the same position, we do the ANDs with the
3896 similar mask and compare the result of the ANDs.
3898 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3899 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3900 are the left and right operands of the comparison, respectively.
3902 If the optimization described above can be done, we return the resulting
3903 tree. Otherwise we return zero. */
3906 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3909 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3910 tree type = TREE_TYPE (lhs);
3911 tree signed_type, unsigned_type;
3912 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3913 enum machine_mode lmode, rmode, nmode;
3914 int lunsignedp, runsignedp;
3915 int lvolatilep = 0, rvolatilep = 0;
3916 tree linner, rinner = NULL_TREE;
3920 /* Get all the information about the extractions being done. If the bit size
3921 if the same as the size of the underlying object, we aren't doing an
3922 extraction at all and so can do nothing. We also don't want to
3923 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3924 then will no longer be able to replace it. */
3925 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3926 &lunsignedp, &lvolatilep, false);
3927 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3928 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3933 /* If this is not a constant, we can only do something if bit positions,
3934 sizes, and signedness are the same. */
3935 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3936 &runsignedp, &rvolatilep, false);
3938 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3939 || lunsignedp != runsignedp || offset != 0
3940 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3944 /* See if we can find a mode to refer to this field. We should be able to,
3945 but fail if we can't. */
3946 nmode = get_best_mode (lbitsize, lbitpos,
3947 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3948 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3949 TYPE_ALIGN (TREE_TYPE (rinner))),
3950 word_mode, lvolatilep || rvolatilep);
3951 if (nmode == VOIDmode)
3954 /* Set signed and unsigned types of the precision of this mode for the
3956 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3957 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3959 /* Compute the bit position and size for the new reference and our offset
3960 within it. If the new reference is the same size as the original, we
3961 won't optimize anything, so return zero. */
3962 nbitsize = GET_MODE_BITSIZE (nmode);
3963 nbitpos = lbitpos & ~ (nbitsize - 1);
3965 if (nbitsize == lbitsize)
3968 if (BYTES_BIG_ENDIAN)
3969 lbitpos = nbitsize - lbitsize - lbitpos;
3971 /* Make the mask to be used against the extracted field. */
3972 mask = build_int_cst_type (unsigned_type, -1);
3973 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3974 mask = const_binop (RSHIFT_EXPR, mask,
3975 size_int (nbitsize - lbitsize - lbitpos), 0);
3978 /* If not comparing with constant, just rework the comparison
3980 return fold_build2 (code, compare_type,
3981 fold_build2 (BIT_AND_EXPR, unsigned_type,
3982 make_bit_field_ref (linner,
3987 fold_build2 (BIT_AND_EXPR, unsigned_type,
3988 make_bit_field_ref (rinner,
3994 /* Otherwise, we are handling the constant case. See if the constant is too
3995 big for the field. Warn and return a tree of for 0 (false) if so. We do
3996 this not only for its own sake, but to avoid having to test for this
3997 error case below. If we didn't, we might generate wrong code.
3999 For unsigned fields, the constant shifted right by the field length should
4000 be all zero. For signed fields, the high-order bits should agree with
4005 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4006 fold_convert (unsigned_type, rhs),
4007 size_int (lbitsize), 0)))
4009 warning (0, "comparison is always %d due to width of bit-field",
4011 return constant_boolean_node (code == NE_EXPR, compare_type);
4016 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4017 size_int (lbitsize - 1), 0);
4018 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4020 warning (0, "comparison is always %d due to width of bit-field",
4022 return constant_boolean_node (code == NE_EXPR, compare_type);
4026 /* Single-bit compares should always be against zero. */
4027 if (lbitsize == 1 && ! integer_zerop (rhs))
4029 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4030 rhs = build_int_cst (type, 0);
4033 /* Make a new bitfield reference, shift the constant over the
4034 appropriate number of bits and mask it with the computed mask
4035 (in case this was a signed field). If we changed it, make a new one. */
4036 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4039 TREE_SIDE_EFFECTS (lhs) = 1;
4040 TREE_THIS_VOLATILE (lhs) = 1;
4043 rhs = const_binop (BIT_AND_EXPR,
4044 const_binop (LSHIFT_EXPR,
4045 fold_convert (unsigned_type, rhs),
4046 size_int (lbitpos), 0),
4049 return build2 (code, compare_type,
4050 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4054 /* Subroutine for fold_truthop: decode a field reference.
4056 If EXP is a comparison reference, we return the innermost reference.
4058 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4059 set to the starting bit number.
4061 If the innermost field can be completely contained in a mode-sized
4062 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4064 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4065 otherwise it is not changed.
4067 *PUNSIGNEDP is set to the signedness of the field.
4069 *PMASK is set to the mask used. This is either contained in a
4070 BIT_AND_EXPR or derived from the width of the field.
4072 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4074 Return 0 if this is not a component reference or is one that we can't
4075 do anything with. */
4078 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4079 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4080 int *punsignedp, int *pvolatilep,
4081 tree *pmask, tree *pand_mask)
4083 tree outer_type = 0;
4085 tree mask, inner, offset;
4087 unsigned int precision;
4089 /* All the optimizations using this function assume integer fields.
4090 There are problems with FP fields since the type_for_size call
4091 below can fail for, e.g., XFmode. */
4092 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4095 /* We are interested in the bare arrangement of bits, so strip everything
4096 that doesn't affect the machine mode. However, record the type of the
4097 outermost expression if it may matter below. */
4098 if (TREE_CODE (exp) == NOP_EXPR
4099 || TREE_CODE (exp) == CONVERT_EXPR
4100 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4101 outer_type = TREE_TYPE (exp);
4104 if (TREE_CODE (exp) == BIT_AND_EXPR)
4106 and_mask = TREE_OPERAND (exp, 1);
4107 exp = TREE_OPERAND (exp, 0);
4108 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4109 if (TREE_CODE (and_mask) != INTEGER_CST)
4113 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4114 punsignedp, pvolatilep, false);
4115 if ((inner == exp && and_mask == 0)
4116 || *pbitsize < 0 || offset != 0
4117 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4120 /* If the number of bits in the reference is the same as the bitsize of
4121 the outer type, then the outer type gives the signedness. Otherwise
4122 (in case of a small bitfield) the signedness is unchanged. */
4123 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4124 *punsignedp = TYPE_UNSIGNED (outer_type);
4126 /* Compute the mask to access the bitfield. */
4127 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4128 precision = TYPE_PRECISION (unsigned_type);
4130 mask = build_int_cst_type (unsigned_type, -1);
4132 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4133 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4135 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4137 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4138 fold_convert (unsigned_type, and_mask), mask);
4141 *pand_mask = and_mask;
4145 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4149 all_ones_mask_p (const_tree mask, int size)
4151 tree type = TREE_TYPE (mask);
4152 unsigned int precision = TYPE_PRECISION (type);
4155 tmask = build_int_cst_type (signed_type_for (type), -1);
4158 tree_int_cst_equal (mask,
4159 const_binop (RSHIFT_EXPR,
4160 const_binop (LSHIFT_EXPR, tmask,
4161 size_int (precision - size),
4163 size_int (precision - size), 0));
4166 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4167 represents the sign bit of EXP's type. If EXP represents a sign
4168 or zero extension, also test VAL against the unextended type.
4169 The return value is the (sub)expression whose sign bit is VAL,
4170 or NULL_TREE otherwise. */
4173 sign_bit_p (tree exp, const_tree val)
4175 unsigned HOST_WIDE_INT mask_lo, lo;
4176 HOST_WIDE_INT mask_hi, hi;
4180 /* Tree EXP must have an integral type. */
4181 t = TREE_TYPE (exp);
4182 if (! INTEGRAL_TYPE_P (t))
4185 /* Tree VAL must be an integer constant. */
4186 if (TREE_CODE (val) != INTEGER_CST
4187 || TREE_OVERFLOW (val))
4190 width = TYPE_PRECISION (t);
4191 if (width > HOST_BITS_PER_WIDE_INT)
4193 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4196 mask_hi = ((unsigned HOST_WIDE_INT) -1
4197 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4203 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4206 mask_lo = ((unsigned HOST_WIDE_INT) -1
4207 >> (HOST_BITS_PER_WIDE_INT - width));
4210 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4211 treat VAL as if it were unsigned. */
4212 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4213 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4216 /* Handle extension from a narrower type. */
4217 if (TREE_CODE (exp) == NOP_EXPR
4218 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4219 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4224 /* Subroutine for fold_truthop: determine if an operand is simple enough
4225 to be evaluated unconditionally. */
4228 simple_operand_p (const_tree exp)
4230 /* Strip any conversions that don't change the machine mode. */
4233 return (CONSTANT_CLASS_P (exp)
4234 || TREE_CODE (exp) == SSA_NAME
4236 && ! TREE_ADDRESSABLE (exp)
4237 && ! TREE_THIS_VOLATILE (exp)
4238 && ! DECL_NONLOCAL (exp)
4239 /* Don't regard global variables as simple. They may be
4240 allocated in ways unknown to the compiler (shared memory,
4241 #pragma weak, etc). */
4242 && ! TREE_PUBLIC (exp)
4243 && ! DECL_EXTERNAL (exp)
4244 /* Loading a static variable is unduly expensive, but global
4245 registers aren't expensive. */
4246 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4249 /* The following functions are subroutines to fold_range_test and allow it to
4250 try to change a logical combination of comparisons into a range test.
4253 X == 2 || X == 3 || X == 4 || X == 5
4257 (unsigned) (X - 2) <= 3
4259 We describe each set of comparisons as being either inside or outside
4260 a range, using a variable named like IN_P, and then describe the
4261 range with a lower and upper bound. If one of the bounds is omitted,
4262 it represents either the highest or lowest value of the type.
4264 In the comments below, we represent a range by two numbers in brackets
4265 preceded by a "+" to designate being inside that range, or a "-" to
4266 designate being outside that range, so the condition can be inverted by
4267 flipping the prefix. An omitted bound is represented by a "-". For
4268 example, "- [-, 10]" means being outside the range starting at the lowest
4269 possible value and ending at 10, in other words, being greater than 10.
4270 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4273 We set up things so that the missing bounds are handled in a consistent
4274 manner so neither a missing bound nor "true" and "false" need to be
4275 handled using a special case. */
4277 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4278 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4279 and UPPER1_P are nonzero if the respective argument is an upper bound
4280 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4281 must be specified for a comparison. ARG1 will be converted to ARG0's
4282 type if both are specified. */
4285 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4286 tree arg1, int upper1_p)
4292 /* If neither arg represents infinity, do the normal operation.
4293 Else, if not a comparison, return infinity. Else handle the special
4294 comparison rules. Note that most of the cases below won't occur, but
4295 are handled for consistency. */
4297 if (arg0 != 0 && arg1 != 0)
4299 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4300 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4302 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4305 if (TREE_CODE_CLASS (code) != tcc_comparison)
4308 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4309 for neither. In real maths, we cannot assume open ended ranges are
4310 the same. But, this is computer arithmetic, where numbers are finite.
4311 We can therefore make the transformation of any unbounded range with
4312 the value Z, Z being greater than any representable number. This permits
4313 us to treat unbounded ranges as equal. */
4314 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4315 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4319 result = sgn0 == sgn1;
4322 result = sgn0 != sgn1;
4325 result = sgn0 < sgn1;
4328 result = sgn0 <= sgn1;
4331 result = sgn0 > sgn1;
4334 result = sgn0 >= sgn1;
4340 return constant_boolean_node (result, type);
4343 /* Given EXP, a logical expression, set the range it is testing into
4344 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4345 actually being tested. *PLOW and *PHIGH will be made of the same
4346 type as the returned expression. If EXP is not a comparison, we
4347 will most likely not be returning a useful value and range. Set
4348 *STRICT_OVERFLOW_P to true if the return value is only valid
4349 because signed overflow is undefined; otherwise, do not change
4350 *STRICT_OVERFLOW_P. */
4353 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4354 bool *strict_overflow_p)
4356 enum tree_code code;
4357 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4358 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4360 tree low, high, n_low, n_high;
4362 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4363 and see if we can refine the range. Some of the cases below may not
4364 happen, but it doesn't seem worth worrying about this. We "continue"
4365 the outer loop when we've changed something; otherwise we "break"
4366 the switch, which will "break" the while. */
4369 low = high = build_int_cst (TREE_TYPE (exp), 0);
4373 code = TREE_CODE (exp);
4374 exp_type = TREE_TYPE (exp);
4376 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4378 if (TREE_OPERAND_LENGTH (exp) > 0)
4379 arg0 = TREE_OPERAND (exp, 0);
4380 if (TREE_CODE_CLASS (code) == tcc_comparison
4381 || TREE_CODE_CLASS (code) == tcc_unary
4382 || TREE_CODE_CLASS (code) == tcc_binary)
4383 arg0_type = TREE_TYPE (arg0);
4384 if (TREE_CODE_CLASS (code) == tcc_binary
4385 || TREE_CODE_CLASS (code) == tcc_comparison
4386 || (TREE_CODE_CLASS (code) == tcc_expression
4387 && TREE_OPERAND_LENGTH (exp) > 1))
4388 arg1 = TREE_OPERAND (exp, 1);
4393 case TRUTH_NOT_EXPR:
4394 in_p = ! in_p, exp = arg0;
4397 case EQ_EXPR: case NE_EXPR:
4398 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4399 /* We can only do something if the range is testing for zero
4400 and if the second operand is an integer constant. Note that
4401 saying something is "in" the range we make is done by
4402 complementing IN_P since it will set in the initial case of
4403 being not equal to zero; "out" is leaving it alone. */
4404 if (low == 0 || high == 0
4405 || ! integer_zerop (low) || ! integer_zerop (high)
4406 || TREE_CODE (arg1) != INTEGER_CST)
4411 case NE_EXPR: /* - [c, c] */
4414 case EQ_EXPR: /* + [c, c] */
4415 in_p = ! in_p, low = high = arg1;
4417 case GT_EXPR: /* - [-, c] */
4418 low = 0, high = arg1;
4420 case GE_EXPR: /* + [c, -] */
4421 in_p = ! in_p, low = arg1, high = 0;
4423 case LT_EXPR: /* - [c, -] */
4424 low = arg1, high = 0;
4426 case LE_EXPR: /* + [-, c] */
4427 in_p = ! in_p, low = 0, high = arg1;
4433 /* If this is an unsigned comparison, we also know that EXP is
4434 greater than or equal to zero. We base the range tests we make
4435 on that fact, so we record it here so we can parse existing
4436 range tests. We test arg0_type since often the return type
4437 of, e.g. EQ_EXPR, is boolean. */
4438 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4440 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4442 build_int_cst (arg0_type, 0),
4446 in_p = n_in_p, low = n_low, high = n_high;
4448 /* If the high bound is missing, but we have a nonzero low
4449 bound, reverse the range so it goes from zero to the low bound
4451 if (high == 0 && low && ! integer_zerop (low))
4454 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4455 integer_one_node, 0);
4456 low = build_int_cst (arg0_type, 0);
4464 /* (-x) IN [a,b] -> x in [-b, -a] */
4465 n_low = range_binop (MINUS_EXPR, exp_type,
4466 build_int_cst (exp_type, 0),
4468 n_high = range_binop (MINUS_EXPR, exp_type,
4469 build_int_cst (exp_type, 0),
4471 low = n_low, high = n_high;
4477 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4478 build_int_cst (exp_type, 1));
4481 case PLUS_EXPR: case MINUS_EXPR:
4482 if (TREE_CODE (arg1) != INTEGER_CST)
4485 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4486 move a constant to the other side. */
4487 if (!TYPE_UNSIGNED (arg0_type)
4488 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4491 /* If EXP is signed, any overflow in the computation is undefined,
4492 so we don't worry about it so long as our computations on
4493 the bounds don't overflow. For unsigned, overflow is defined
4494 and this is exactly the right thing. */
4495 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4496 arg0_type, low, 0, arg1, 0);
4497 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4498 arg0_type, high, 1, arg1, 0);
4499 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4500 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4503 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4504 *strict_overflow_p = true;
4506 /* Check for an unsigned range which has wrapped around the maximum
4507 value thus making n_high < n_low, and normalize it. */
4508 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4510 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4511 integer_one_node, 0);
4512 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4513 integer_one_node, 0);
4515 /* If the range is of the form +/- [ x+1, x ], we won't
4516 be able to normalize it. But then, it represents the
4517 whole range or the empty set, so make it
4519 if (tree_int_cst_equal (n_low, low)
4520 && tree_int_cst_equal (n_high, high))
4526 low = n_low, high = n_high;
4531 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4532 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4535 if (! INTEGRAL_TYPE_P (arg0_type)
4536 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4537 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4540 n_low = low, n_high = high;
4543 n_low = fold_convert (arg0_type, n_low);
4546 n_high = fold_convert (arg0_type, n_high);
4549 /* If we're converting arg0 from an unsigned type, to exp,
4550 a signed type, we will be doing the comparison as unsigned.
4551 The tests above have already verified that LOW and HIGH
4554 So we have to ensure that we will handle large unsigned
4555 values the same way that the current signed bounds treat
4558 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4562 /* For fixed-point modes, we need to pass the saturating flag
4563 as the 2nd parameter. */
4564 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4565 equiv_type = lang_hooks.types.type_for_mode
4566 (TYPE_MODE (arg0_type),
4567 TYPE_SATURATING (arg0_type));
4569 equiv_type = lang_hooks.types.type_for_mode
4570 (TYPE_MODE (arg0_type), 1);
4572 /* A range without an upper bound is, naturally, unbounded.
4573 Since convert would have cropped a very large value, use
4574 the max value for the destination type. */
4576 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4577 : TYPE_MAX_VALUE (arg0_type);
4579 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4580 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4581 fold_convert (arg0_type,
4583 build_int_cst (arg0_type, 1));
4585 /* If the low bound is specified, "and" the range with the
4586 range for which the original unsigned value will be
4590 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4591 1, n_low, n_high, 1,
4592 fold_convert (arg0_type,
4597 in_p = (n_in_p == in_p);
4601 /* Otherwise, "or" the range with the range of the input
4602 that will be interpreted as negative. */
4603 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4604 0, n_low, n_high, 1,
4605 fold_convert (arg0_type,
4610 in_p = (in_p != n_in_p);
4615 low = n_low, high = n_high;
4625 /* If EXP is a constant, we can evaluate whether this is true or false. */
4626 if (TREE_CODE (exp) == INTEGER_CST)
4628 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4630 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4636 *pin_p = in_p, *plow = low, *phigh = high;
4640 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4641 type, TYPE, return an expression to test if EXP is in (or out of, depending
4642 on IN_P) the range. Return 0 if the test couldn't be created. */
4645 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4647 tree etype = TREE_TYPE (exp);
4650 #ifdef HAVE_canonicalize_funcptr_for_compare
4651 /* Disable this optimization for function pointer expressions
4652 on targets that require function pointer canonicalization. */
4653 if (HAVE_canonicalize_funcptr_for_compare
4654 && TREE_CODE (etype) == POINTER_TYPE
4655 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4661 value = build_range_check (type, exp, 1, low, high);
4663 return invert_truthvalue (value);
4668 if (low == 0 && high == 0)
4669 return build_int_cst (type, 1);
4672 return fold_build2 (LE_EXPR, type, exp,
4673 fold_convert (etype, high));
4676 return fold_build2 (GE_EXPR, type, exp,
4677 fold_convert (etype, low));
4679 if (operand_equal_p (low, high, 0))
4680 return fold_build2 (EQ_EXPR, type, exp,
4681 fold_convert (etype, low));
4683 if (integer_zerop (low))
4685 if (! TYPE_UNSIGNED (etype))
4687 etype = unsigned_type_for (etype);
4688 high = fold_convert (etype, high);
4689 exp = fold_convert (etype, exp);
4691 return build_range_check (type, exp, 1, 0, high);
4694 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4695 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4697 unsigned HOST_WIDE_INT lo;
4701 prec = TYPE_PRECISION (etype);
4702 if (prec <= HOST_BITS_PER_WIDE_INT)
4705 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4709 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4710 lo = (unsigned HOST_WIDE_INT) -1;
4713 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4715 if (TYPE_UNSIGNED (etype))
4717 etype = signed_type_for (etype);
4718 exp = fold_convert (etype, exp);
4720 return fold_build2 (GT_EXPR, type, exp,
4721 build_int_cst (etype, 0));
4725 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4726 This requires wrap-around arithmetics for the type of the expression. */
4727 switch (TREE_CODE (etype))
4730 /* There is no requirement that LOW be within the range of ETYPE
4731 if the latter is a subtype. It must, however, be within the base
4732 type of ETYPE. So be sure we do the subtraction in that type. */
4733 if (TREE_TYPE (etype))
4734 etype = TREE_TYPE (etype);
4739 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4740 TYPE_UNSIGNED (etype));
4747 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4748 if (TREE_CODE (etype) == INTEGER_TYPE
4749 && !TYPE_OVERFLOW_WRAPS (etype))
4751 tree utype, minv, maxv;
4753 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4754 for the type in question, as we rely on this here. */
4755 utype = unsigned_type_for (etype);
4756 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4757 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4758 integer_one_node, 1);
4759 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4761 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4768 high = fold_convert (etype, high);
4769 low = fold_convert (etype, low);
4770 exp = fold_convert (etype, exp);
4772 value = const_binop (MINUS_EXPR, high, low, 0);
4775 if (POINTER_TYPE_P (etype))
4777 if (value != 0 && !TREE_OVERFLOW (value))
4779 low = fold_convert (sizetype, low);
4780 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4781 return build_range_check (type,
4782 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4783 1, build_int_cst (etype, 0), value);
4788 if (value != 0 && !TREE_OVERFLOW (value))
4789 return build_range_check (type,
4790 fold_build2 (MINUS_EXPR, etype, exp, low),
4791 1, build_int_cst (etype, 0), value);
4796 /* Return the predecessor of VAL in its type, handling the infinite case. */
4799 range_predecessor (tree val)
4801 tree type = TREE_TYPE (val);
4803 if (INTEGRAL_TYPE_P (type)
4804 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4807 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4810 /* Return the successor of VAL in its type, handling the infinite case. */
4813 range_successor (tree val)
4815 tree type = TREE_TYPE (val);
4817 if (INTEGRAL_TYPE_P (type)
4818 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4821 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4824 /* Given two ranges, see if we can merge them into one. Return 1 if we
4825 can, 0 if we can't. Set the output range into the specified parameters. */
4828 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4829 tree high0, int in1_p, tree low1, tree high1)
4837 int lowequal = ((low0 == 0 && low1 == 0)
4838 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4839 low0, 0, low1, 0)));
4840 int highequal = ((high0 == 0 && high1 == 0)
4841 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4842 high0, 1, high1, 1)));
4844 /* Make range 0 be the range that starts first, or ends last if they
4845 start at the same value. Swap them if it isn't. */
4846 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4849 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4850 high1, 1, high0, 1))))
4852 temp = in0_p, in0_p = in1_p, in1_p = temp;
4853 tem = low0, low0 = low1, low1 = tem;
4854 tem = high0, high0 = high1, high1 = tem;
4857 /* Now flag two cases, whether the ranges are disjoint or whether the
4858 second range is totally subsumed in the first. Note that the tests
4859 below are simplified by the ones above. */
4860 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4861 high0, 1, low1, 0));
4862 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4863 high1, 1, high0, 1));
4865 /* We now have four cases, depending on whether we are including or
4866 excluding the two ranges. */
4869 /* If they don't overlap, the result is false. If the second range
4870 is a subset it is the result. Otherwise, the range is from the start
4871 of the second to the end of the first. */
4873 in_p = 0, low = high = 0;
4875 in_p = 1, low = low1, high = high1;
4877 in_p = 1, low = low1, high = high0;
4880 else if (in0_p && ! in1_p)
4882 /* If they don't overlap, the result is the first range. If they are
4883 equal, the result is false. If the second range is a subset of the
4884 first, and the ranges begin at the same place, we go from just after
4885 the end of the second range to the end of the first. If the second
4886 range is not a subset of the first, or if it is a subset and both
4887 ranges end at the same place, the range starts at the start of the
4888 first range and ends just before the second range.
4889 Otherwise, we can't describe this as a single range. */
4891 in_p = 1, low = low0, high = high0;
4892 else if (lowequal && highequal)
4893 in_p = 0, low = high = 0;
4894 else if (subset && lowequal)
4896 low = range_successor (high1);
4901 /* We are in the weird situation where high0 > high1 but
4902 high1 has no successor. Punt. */
4906 else if (! subset || highequal)
4909 high = range_predecessor (low1);
4913 /* low0 < low1 but low1 has no predecessor. Punt. */
4921 else if (! in0_p && in1_p)
4923 /* If they don't overlap, the result is the second range. If the second
4924 is a subset of the first, the result is false. Otherwise,
4925 the range starts just after the first range and ends at the
4926 end of the second. */
4928 in_p = 1, low = low1, high = high1;
4929 else if (subset || highequal)
4930 in_p = 0, low = high = 0;
4933 low = range_successor (high0);
4938 /* high1 > high0 but high0 has no successor. Punt. */
4946 /* The case where we are excluding both ranges. Here the complex case
4947 is if they don't overlap. In that case, the only time we have a
4948 range is if they are adjacent. If the second is a subset of the
4949 first, the result is the first. Otherwise, the range to exclude
4950 starts at the beginning of the first range and ends at the end of the
4954 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4955 range_successor (high0),
4957 in_p = 0, low = low0, high = high1;
4960 /* Canonicalize - [min, x] into - [-, x]. */
4961 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4962 switch (TREE_CODE (TREE_TYPE (low0)))
4965 if (TYPE_PRECISION (TREE_TYPE (low0))
4966 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4970 if (tree_int_cst_equal (low0,
4971 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4975 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4976 && integer_zerop (low0))
4983 /* Canonicalize - [x, max] into - [x, -]. */
4984 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4985 switch (TREE_CODE (TREE_TYPE (high1)))
4988 if (TYPE_PRECISION (TREE_TYPE (high1))
4989 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4993 if (tree_int_cst_equal (high1,
4994 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4998 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4999 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5001 integer_one_node, 1)))
5008 /* The ranges might be also adjacent between the maximum and
5009 minimum values of the given type. For
5010 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5011 return + [x + 1, y - 1]. */
5012 if (low0 == 0 && high1 == 0)
5014 low = range_successor (high0);
5015 high = range_predecessor (low1);
5016 if (low == 0 || high == 0)
5026 in_p = 0, low = low0, high = high0;
5028 in_p = 0, low = low0, high = high1;
5031 *pin_p = in_p, *plow = low, *phigh = high;
5036 /* Subroutine of fold, looking inside expressions of the form
5037 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5038 of the COND_EXPR. This function is being used also to optimize
5039 A op B ? C : A, by reversing the comparison first.
5041 Return a folded expression whose code is not a COND_EXPR
5042 anymore, or NULL_TREE if no folding opportunity is found. */
5045 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5047 enum tree_code comp_code = TREE_CODE (arg0);
5048 tree arg00 = TREE_OPERAND (arg0, 0);
5049 tree arg01 = TREE_OPERAND (arg0, 1);
5050 tree arg1_type = TREE_TYPE (arg1);
5056 /* If we have A op 0 ? A : -A, consider applying the following
5059 A == 0? A : -A same as -A
5060 A != 0? A : -A same as A
5061 A >= 0? A : -A same as abs (A)
5062 A > 0? A : -A same as abs (A)
5063 A <= 0? A : -A same as -abs (A)
5064 A < 0? A : -A same as -abs (A)
5066 None of these transformations work for modes with signed
5067 zeros. If A is +/-0, the first two transformations will
5068 change the sign of the result (from +0 to -0, or vice
5069 versa). The last four will fix the sign of the result,
5070 even though the original expressions could be positive or
5071 negative, depending on the sign of A.
5073 Note that all these transformations are correct if A is
5074 NaN, since the two alternatives (A and -A) are also NaNs. */
5075 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5076 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5077 ? real_zerop (arg01)
5078 : integer_zerop (arg01))
5079 && ((TREE_CODE (arg2) == NEGATE_EXPR
5080 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5081 /* In the case that A is of the form X-Y, '-A' (arg2) may
5082 have already been folded to Y-X, check for that. */
5083 || (TREE_CODE (arg1) == MINUS_EXPR
5084 && TREE_CODE (arg2) == MINUS_EXPR
5085 && operand_equal_p (TREE_OPERAND (arg1, 0),
5086 TREE_OPERAND (arg2, 1), 0)
5087 && operand_equal_p (TREE_OPERAND (arg1, 1),
5088 TREE_OPERAND (arg2, 0), 0))))
5093 tem = fold_convert (arg1_type, arg1);
5094 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5097 return pedantic_non_lvalue (fold_convert (type, arg1));
5100 if (flag_trapping_math)
5105 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5106 arg1 = fold_convert (signed_type_for
5107 (TREE_TYPE (arg1)), arg1);
5108 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5109 return pedantic_non_lvalue (fold_convert (type, tem));
5112 if (flag_trapping_math)
5116 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5117 arg1 = fold_convert (signed_type_for
5118 (TREE_TYPE (arg1)), arg1);
5119 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5120 return negate_expr (fold_convert (type, tem));
5122 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5126 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5127 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5128 both transformations are correct when A is NaN: A != 0
5129 is then true, and A == 0 is false. */
5131 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5132 && integer_zerop (arg01) && integer_zerop (arg2))
5134 if (comp_code == NE_EXPR)
5135 return pedantic_non_lvalue (fold_convert (type, arg1));
5136 else if (comp_code == EQ_EXPR)
5137 return build_int_cst (type, 0);
5140 /* Try some transformations of A op B ? A : B.
5142 A == B? A : B same as B
5143 A != B? A : B same as A
5144 A >= B? A : B same as max (A, B)
5145 A > B? A : B same as max (B, A)
5146 A <= B? A : B same as min (A, B)
5147 A < B? A : B same as min (B, A)
5149 As above, these transformations don't work in the presence
5150 of signed zeros. For example, if A and B are zeros of
5151 opposite sign, the first two transformations will change
5152 the sign of the result. In the last four, the original
5153 expressions give different results for (A=+0, B=-0) and
5154 (A=-0, B=+0), but the transformed expressions do not.
5156 The first two transformations are correct if either A or B
5157 is a NaN. In the first transformation, the condition will
5158 be false, and B will indeed be chosen. In the case of the
5159 second transformation, the condition A != B will be true,
5160 and A will be chosen.
5162 The conversions to max() and min() are not correct if B is
5163 a number and A is not. The conditions in the original
5164 expressions will be false, so all four give B. The min()
5165 and max() versions would give a NaN instead. */
5166 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5167 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5168 /* Avoid these transformations if the COND_EXPR may be used
5169 as an lvalue in the C++ front-end. PR c++/19199. */
5171 || (strcmp (lang_hooks.name, "GNU C++") != 0
5172 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5173 || ! maybe_lvalue_p (arg1)
5174 || ! maybe_lvalue_p (arg2)))
5176 tree comp_op0 = arg00;
5177 tree comp_op1 = arg01;
5178 tree comp_type = TREE_TYPE (comp_op0);
5180 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5181 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5191 return pedantic_non_lvalue (fold_convert (type, arg2));
5193 return pedantic_non_lvalue (fold_convert (type, arg1));
5198 /* In C++ a ?: expression can be an lvalue, so put the
5199 operand which will be used if they are equal first
5200 so that we can convert this back to the
5201 corresponding COND_EXPR. */
5202 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5204 comp_op0 = fold_convert (comp_type, comp_op0);
5205 comp_op1 = fold_convert (comp_type, comp_op1);
5206 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5207 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5208 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5209 return pedantic_non_lvalue (fold_convert (type, tem));
5216 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5218 comp_op0 = fold_convert (comp_type, comp_op0);
5219 comp_op1 = fold_convert (comp_type, comp_op1);
5220 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5221 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5222 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5223 return pedantic_non_lvalue (fold_convert (type, tem));
5227 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5228 return pedantic_non_lvalue (fold_convert (type, arg2));
5231 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5232 return pedantic_non_lvalue (fold_convert (type, arg1));
5235 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5240 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5241 we might still be able to simplify this. For example,
5242 if C1 is one less or one more than C2, this might have started
5243 out as a MIN or MAX and been transformed by this function.
5244 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5246 if (INTEGRAL_TYPE_P (type)
5247 && TREE_CODE (arg01) == INTEGER_CST
5248 && TREE_CODE (arg2) == INTEGER_CST)
5252 /* We can replace A with C1 in this case. */
5253 arg1 = fold_convert (type, arg01);
5254 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5257 /* If C1 is C2 + 1, this is min(A, C2). */
5258 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5260 && operand_equal_p (arg01,
5261 const_binop (PLUS_EXPR, arg2,
5262 build_int_cst (type, 1), 0),
5264 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5266 fold_convert (type, arg1),
5271 /* If C1 is C2 - 1, this is min(A, C2). */
5272 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5274 && operand_equal_p (arg01,
5275 const_binop (MINUS_EXPR, arg2,
5276 build_int_cst (type, 1), 0),
5278 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5280 fold_convert (type, arg1),
5285 /* If C1 is C2 - 1, this is max(A, C2). */
5286 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5288 && operand_equal_p (arg01,
5289 const_binop (MINUS_EXPR, arg2,
5290 build_int_cst (type, 1), 0),
5292 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5294 fold_convert (type, arg1),
5299 /* If C1 is C2 + 1, this is max(A, C2). */
5300 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5302 && operand_equal_p (arg01,
5303 const_binop (PLUS_EXPR, arg2,
5304 build_int_cst (type, 1), 0),
5306 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5308 fold_convert (type, arg1),
5322 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5323 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
5326 /* EXP is some logical combination of boolean tests. See if we can
5327 merge it into some range test. Return the new tree if so. */
5330 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5332 int or_op = (code == TRUTH_ORIF_EXPR
5333 || code == TRUTH_OR_EXPR);
5334 int in0_p, in1_p, in_p;
5335 tree low0, low1, low, high0, high1, high;
5336 bool strict_overflow_p = false;
5337 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5338 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5340 const char * const warnmsg = G_("assuming signed overflow does not occur "
5341 "when simplifying range test");
5343 /* If this is an OR operation, invert both sides; we will invert
5344 again at the end. */
5346 in0_p = ! in0_p, in1_p = ! in1_p;
5348 /* If both expressions are the same, if we can merge the ranges, and we
5349 can build the range test, return it or it inverted. If one of the
5350 ranges is always true or always false, consider it to be the same
5351 expression as the other. */
5352 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5353 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5355 && 0 != (tem = (build_range_check (type,
5357 : rhs != 0 ? rhs : integer_zero_node,
5360 if (strict_overflow_p)
5361 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5362 return or_op ? invert_truthvalue (tem) : tem;
5365 /* On machines where the branch cost is expensive, if this is a
5366 short-circuited branch and the underlying object on both sides
5367 is the same, make a non-short-circuit operation. */
5368 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5369 && lhs != 0 && rhs != 0
5370 && (code == TRUTH_ANDIF_EXPR
5371 || code == TRUTH_ORIF_EXPR)
5372 && operand_equal_p (lhs, rhs, 0))
5374 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5375 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5376 which cases we can't do this. */
5377 if (simple_operand_p (lhs))
5378 return build2 (code == TRUTH_ANDIF_EXPR
5379 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5382 else if (lang_hooks.decls.global_bindings_p () == 0
5383 && ! CONTAINS_PLACEHOLDER_P (lhs))
5385 tree common = save_expr (lhs);
5387 if (0 != (lhs = build_range_check (type, common,
5388 or_op ? ! in0_p : in0_p,
5390 && (0 != (rhs = build_range_check (type, common,
5391 or_op ? ! in1_p : in1_p,
5394 if (strict_overflow_p)
5395 fold_overflow_warning (warnmsg,
5396 WARN_STRICT_OVERFLOW_COMPARISON);
5397 return build2 (code == TRUTH_ANDIF_EXPR
5398 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5407 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5408 bit value. Arrange things so the extra bits will be set to zero if and
5409 only if C is signed-extended to its full width. If MASK is nonzero,
5410 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5413 unextend (tree c, int p, int unsignedp, tree mask)
5415 tree type = TREE_TYPE (c);
5416 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5419 if (p == modesize || unsignedp)
5422 /* We work by getting just the sign bit into the low-order bit, then
5423 into the high-order bit, then sign-extend. We then XOR that value
5425 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5426 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5428 /* We must use a signed type in order to get an arithmetic right shift.
5429 However, we must also avoid introducing accidental overflows, so that
5430 a subsequent call to integer_zerop will work. Hence we must
5431 do the type conversion here. At this point, the constant is either
5432 zero or one, and the conversion to a signed type can never overflow.
5433 We could get an overflow if this conversion is done anywhere else. */
5434 if (TYPE_UNSIGNED (type))
5435 temp = fold_convert (signed_type_for (type), temp);
5437 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5438 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5440 temp = const_binop (BIT_AND_EXPR, temp,
5441 fold_convert (TREE_TYPE (c), mask), 0);
5442 /* If necessary, convert the type back to match the type of C. */
5443 if (TYPE_UNSIGNED (type))
5444 temp = fold_convert (type, temp);
5446 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5449 /* Find ways of folding logical expressions of LHS and RHS:
5450 Try to merge two comparisons to the same innermost item.
5451 Look for range tests like "ch >= '0' && ch <= '9'".
5452 Look for combinations of simple terms on machines with expensive branches
5453 and evaluate the RHS unconditionally.
5455 For example, if we have p->a == 2 && p->b == 4 and we can make an
5456 object large enough to span both A and B, we can do this with a comparison
5457 against the object ANDed with the a mask.
5459 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5460 operations to do this with one comparison.
5462 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5463 function and the one above.
5465 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5466 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5468 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5471 We return the simplified tree or 0 if no optimization is possible. */
5474 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5476 /* If this is the "or" of two comparisons, we can do something if
5477 the comparisons are NE_EXPR. If this is the "and", we can do something
5478 if the comparisons are EQ_EXPR. I.e.,
5479 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5481 WANTED_CODE is this operation code. For single bit fields, we can
5482 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5483 comparison for one-bit fields. */
5485 enum tree_code wanted_code;
5486 enum tree_code lcode, rcode;
5487 tree ll_arg, lr_arg, rl_arg, rr_arg;
5488 tree ll_inner, lr_inner, rl_inner, rr_inner;
5489 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5490 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5491 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5492 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5493 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5494 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5495 enum machine_mode lnmode, rnmode;
5496 tree ll_mask, lr_mask, rl_mask, rr_mask;
5497 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5498 tree l_const, r_const;
5499 tree lntype, rntype, result;
5500 int first_bit, end_bit;
5502 tree orig_lhs = lhs, orig_rhs = rhs;
5503 enum tree_code orig_code = code;
5505 /* Start by getting the comparison codes. Fail if anything is volatile.
5506 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5507 it were surrounded with a NE_EXPR. */
5509 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5512 lcode = TREE_CODE (lhs);
5513 rcode = TREE_CODE (rhs);
5515 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5517 lhs = build2 (NE_EXPR, truth_type, lhs,
5518 build_int_cst (TREE_TYPE (lhs), 0));
5522 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5524 rhs = build2 (NE_EXPR, truth_type, rhs,
5525 build_int_cst (TREE_TYPE (rhs), 0));
5529 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5530 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5533 ll_arg = TREE_OPERAND (lhs, 0);
5534 lr_arg = TREE_OPERAND (lhs, 1);
5535 rl_arg = TREE_OPERAND (rhs, 0);
5536 rr_arg = TREE_OPERAND (rhs, 1);
5538 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5539 if (simple_operand_p (ll_arg)
5540 && simple_operand_p (lr_arg))
5543 if (operand_equal_p (ll_arg, rl_arg, 0)
5544 && operand_equal_p (lr_arg, rr_arg, 0))
5546 result = combine_comparisons (code, lcode, rcode,
5547 truth_type, ll_arg, lr_arg);
5551 else if (operand_equal_p (ll_arg, rr_arg, 0)
5552 && operand_equal_p (lr_arg, rl_arg, 0))
5554 result = combine_comparisons (code, lcode,
5555 swap_tree_comparison (rcode),
5556 truth_type, ll_arg, lr_arg);
5562 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5563 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5565 /* If the RHS can be evaluated unconditionally and its operands are
5566 simple, it wins to evaluate the RHS unconditionally on machines
5567 with expensive branches. In this case, this isn't a comparison
5568 that can be merged. Avoid doing this if the RHS is a floating-point
5569 comparison since those can trap. */
5571 if (BRANCH_COST >= 2
5572 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5573 && simple_operand_p (rl_arg)
5574 && simple_operand_p (rr_arg))
5576 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5577 if (code == TRUTH_OR_EXPR
5578 && lcode == NE_EXPR && integer_zerop (lr_arg)
5579 && rcode == NE_EXPR && integer_zerop (rr_arg)
5580 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5581 return build2 (NE_EXPR, truth_type,
5582 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5584 build_int_cst (TREE_TYPE (ll_arg), 0));
5586 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5587 if (code == TRUTH_AND_EXPR
5588 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5589 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5590 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5591 return build2 (EQ_EXPR, truth_type,
5592 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5594 build_int_cst (TREE_TYPE (ll_arg), 0));
5596 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5598 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5599 return build2 (code, truth_type, lhs, rhs);
5604 /* See if the comparisons can be merged. Then get all the parameters for
5607 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5608 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5612 ll_inner = decode_field_reference (ll_arg,
5613 &ll_bitsize, &ll_bitpos, &ll_mode,
5614 &ll_unsignedp, &volatilep, &ll_mask,
5616 lr_inner = decode_field_reference (lr_arg,
5617 &lr_bitsize, &lr_bitpos, &lr_mode,
5618 &lr_unsignedp, &volatilep, &lr_mask,
5620 rl_inner = decode_field_reference (rl_arg,
5621 &rl_bitsize, &rl_bitpos, &rl_mode,
5622 &rl_unsignedp, &volatilep, &rl_mask,
5624 rr_inner = decode_field_reference (rr_arg,
5625 &rr_bitsize, &rr_bitpos, &rr_mode,
5626 &rr_unsignedp, &volatilep, &rr_mask,
5629 /* It must be true that the inner operation on the lhs of each
5630 comparison must be the same if we are to be able to do anything.
5631 Then see if we have constants. If not, the same must be true for
5633 if (volatilep || ll_inner == 0 || rl_inner == 0
5634 || ! operand_equal_p (ll_inner, rl_inner, 0))
5637 if (TREE_CODE (lr_arg) == INTEGER_CST
5638 && TREE_CODE (rr_arg) == INTEGER_CST)
5639 l_const = lr_arg, r_const = rr_arg;
5640 else if (lr_inner == 0 || rr_inner == 0
5641 || ! operand_equal_p (lr_inner, rr_inner, 0))
5644 l_const = r_const = 0;
5646 /* If either comparison code is not correct for our logical operation,
5647 fail. However, we can convert a one-bit comparison against zero into
5648 the opposite comparison against that bit being set in the field. */
5650 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5651 if (lcode != wanted_code)
5653 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5655 /* Make the left operand unsigned, since we are only interested
5656 in the value of one bit. Otherwise we are doing the wrong
5665 /* This is analogous to the code for l_const above. */
5666 if (rcode != wanted_code)
5668 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5677 /* See if we can find a mode that contains both fields being compared on
5678 the left. If we can't, fail. Otherwise, update all constants and masks
5679 to be relative to a field of that size. */
5680 first_bit = MIN (ll_bitpos, rl_bitpos);
5681 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5682 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5683 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5685 if (lnmode == VOIDmode)
5688 lnbitsize = GET_MODE_BITSIZE (lnmode);
5689 lnbitpos = first_bit & ~ (lnbitsize - 1);
5690 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5691 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5693 if (BYTES_BIG_ENDIAN)
5695 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5696 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5699 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5700 size_int (xll_bitpos), 0);
5701 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5702 size_int (xrl_bitpos), 0);
5706 l_const = fold_convert (lntype, l_const);
5707 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5708 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5709 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5710 fold_build1 (BIT_NOT_EXPR,
5714 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5716 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5721 r_const = fold_convert (lntype, r_const);
5722 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5723 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5724 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5725 fold_build1 (BIT_NOT_EXPR,
5729 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5731 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5735 /* If the right sides are not constant, do the same for it. Also,
5736 disallow this optimization if a size or signedness mismatch occurs
5737 between the left and right sides. */
5740 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5741 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5742 /* Make sure the two fields on the right
5743 correspond to the left without being swapped. */
5744 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5747 first_bit = MIN (lr_bitpos, rr_bitpos);
5748 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5749 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5750 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5752 if (rnmode == VOIDmode)
5755 rnbitsize = GET_MODE_BITSIZE (rnmode);
5756 rnbitpos = first_bit & ~ (rnbitsize - 1);
5757 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5758 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5760 if (BYTES_BIG_ENDIAN)
5762 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5763 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5766 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5767 size_int (xlr_bitpos), 0);
5768 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5769 size_int (xrr_bitpos), 0);
5771 /* Make a mask that corresponds to both fields being compared.
5772 Do this for both items being compared. If the operands are the
5773 same size and the bits being compared are in the same position
5774 then we can do this by masking both and comparing the masked
5776 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5777 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5778 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5780 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5781 ll_unsignedp || rl_unsignedp);
5782 if (! all_ones_mask_p (ll_mask, lnbitsize))
5783 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5785 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5786 lr_unsignedp || rr_unsignedp);
5787 if (! all_ones_mask_p (lr_mask, rnbitsize))
5788 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5790 return build2 (wanted_code, truth_type, lhs, rhs);
5793 /* There is still another way we can do something: If both pairs of
5794 fields being compared are adjacent, we may be able to make a wider
5795 field containing them both.
5797 Note that we still must mask the lhs/rhs expressions. Furthermore,
5798 the mask must be shifted to account for the shift done by
5799 make_bit_field_ref. */
5800 if ((ll_bitsize + ll_bitpos == rl_bitpos
5801 && lr_bitsize + lr_bitpos == rr_bitpos)
5802 || (ll_bitpos == rl_bitpos + rl_bitsize
5803 && lr_bitpos == rr_bitpos + rr_bitsize))
5807 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5808 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5809 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5810 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5812 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5813 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5814 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5815 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5817 /* Convert to the smaller type before masking out unwanted bits. */
5819 if (lntype != rntype)
5821 if (lnbitsize > rnbitsize)
5823 lhs = fold_convert (rntype, lhs);
5824 ll_mask = fold_convert (rntype, ll_mask);
5827 else if (lnbitsize < rnbitsize)
5829 rhs = fold_convert (lntype, rhs);
5830 lr_mask = fold_convert (lntype, lr_mask);
5835 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5836 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5838 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5839 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5841 return build2 (wanted_code, truth_type, lhs, rhs);
5847 /* Handle the case of comparisons with constants. If there is something in
5848 common between the masks, those bits of the constants must be the same.
5849 If not, the condition is always false. Test for this to avoid generating
5850 incorrect code below. */
5851 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5852 if (! integer_zerop (result)
5853 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5854 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5856 if (wanted_code == NE_EXPR)
5858 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5859 return constant_boolean_node (true, truth_type);
5863 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5864 return constant_boolean_node (false, truth_type);
5868 /* Construct the expression we will return. First get the component
5869 reference we will make. Unless the mask is all ones the width of
5870 that field, perform the mask operation. Then compare with the
5872 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5873 ll_unsignedp || rl_unsignedp);
5875 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5876 if (! all_ones_mask_p (ll_mask, lnbitsize))
5877 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5879 return build2 (wanted_code, truth_type, result,
5880 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5883 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5887 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5890 enum tree_code op_code;
5891 tree comp_const = op1;
5893 int consts_equal, consts_lt;
5896 STRIP_SIGN_NOPS (arg0);
5898 op_code = TREE_CODE (arg0);
5899 minmax_const = TREE_OPERAND (arg0, 1);
5900 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5901 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5902 inner = TREE_OPERAND (arg0, 0);
5904 /* If something does not permit us to optimize, return the original tree. */
5905 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5906 || TREE_CODE (comp_const) != INTEGER_CST
5907 || TREE_OVERFLOW (comp_const)
5908 || TREE_CODE (minmax_const) != INTEGER_CST
5909 || TREE_OVERFLOW (minmax_const))
5912 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5913 and GT_EXPR, doing the rest with recursive calls using logical
5917 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5919 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5922 return invert_truthvalue (tem);
5928 fold_build2 (TRUTH_ORIF_EXPR, type,
5929 optimize_minmax_comparison
5930 (EQ_EXPR, type, arg0, comp_const),
5931 optimize_minmax_comparison
5932 (GT_EXPR, type, arg0, comp_const));
5935 if (op_code == MAX_EXPR && consts_equal)
5936 /* MAX (X, 0) == 0 -> X <= 0 */
5937 return fold_build2 (LE_EXPR, type, inner, comp_const);
5939 else if (op_code == MAX_EXPR && consts_lt)
5940 /* MAX (X, 0) == 5 -> X == 5 */
5941 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5943 else if (op_code == MAX_EXPR)
5944 /* MAX (X, 0) == -1 -> false */
5945 return omit_one_operand (type, integer_zero_node, inner);
5947 else if (consts_equal)
5948 /* MIN (X, 0) == 0 -> X >= 0 */
5949 return fold_build2 (GE_EXPR, type, inner, comp_const);
5952 /* MIN (X, 0) == 5 -> false */
5953 return omit_one_operand (type, integer_zero_node, inner);
5956 /* MIN (X, 0) == -1 -> X == -1 */
5957 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5960 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5961 /* MAX (X, 0) > 0 -> X > 0
5962 MAX (X, 0) > 5 -> X > 5 */
5963 return fold_build2 (GT_EXPR, type, inner, comp_const);
5965 else if (op_code == MAX_EXPR)
5966 /* MAX (X, 0) > -1 -> true */
5967 return omit_one_operand (type, integer_one_node, inner);
5969 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5970 /* MIN (X, 0) > 0 -> false
5971 MIN (X, 0) > 5 -> false */
5972 return omit_one_operand (type, integer_zero_node, inner);
5975 /* MIN (X, 0) > -1 -> X > -1 */
5976 return fold_build2 (GT_EXPR, type, inner, comp_const);
5983 /* T is an integer expression that is being multiplied, divided, or taken a
5984 modulus (CODE says which and what kind of divide or modulus) by a
5985 constant C. See if we can eliminate that operation by folding it with
5986 other operations already in T. WIDE_TYPE, if non-null, is a type that
5987 should be used for the computation if wider than our type.
5989 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5990 (X * 2) + (Y * 4). We must, however, be assured that either the original
5991 expression would not overflow or that overflow is undefined for the type
5992 in the language in question.
5994 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5995 the machine has a multiply-accumulate insn or that this is part of an
5996 addressing calculation.
5998 If we return a non-null expression, it is an equivalent form of the
5999 original computation, but need not be in the original type.
6001 We set *STRICT_OVERFLOW_P to true if the return values depends on
6002 signed overflow being undefined. Otherwise we do not change
6003 *STRICT_OVERFLOW_P. */
6006 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6007 bool *strict_overflow_p)
6009 /* To avoid exponential search depth, refuse to allow recursion past
6010 three levels. Beyond that (1) it's highly unlikely that we'll find
6011 something interesting and (2) we've probably processed it before
6012 when we built the inner expression. */
6021 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6028 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6029 bool *strict_overflow_p)
6031 tree type = TREE_TYPE (t);
6032 enum tree_code tcode = TREE_CODE (t);
6033 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6034 > GET_MODE_SIZE (TYPE_MODE (type)))
6035 ? wide_type : type);
6037 int same_p = tcode == code;
6038 tree op0 = NULL_TREE, op1 = NULL_TREE;
6039 bool sub_strict_overflow_p;
6041 /* Don't deal with constants of zero here; they confuse the code below. */
6042 if (integer_zerop (c))
6045 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6046 op0 = TREE_OPERAND (t, 0);
6048 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6049 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6051 /* Note that we need not handle conditional operations here since fold
6052 already handles those cases. So just do arithmetic here. */
6056 /* For a constant, we can always simplify if we are a multiply
6057 or (for divide and modulus) if it is a multiple of our constant. */
6058 if (code == MULT_EXPR
6059 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6060 return const_binop (code, fold_convert (ctype, t),
6061 fold_convert (ctype, c), 0);
6064 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
6065 /* If op0 is an expression ... */
6066 if ((COMPARISON_CLASS_P (op0)
6067 || UNARY_CLASS_P (op0)
6068 || BINARY_CLASS_P (op0)
6069 || VL_EXP_CLASS_P (op0)
6070 || EXPRESSION_CLASS_P (op0))
6071 /* ... and is unsigned, and its type is smaller than ctype,
6072 then we cannot pass through as widening. */
6073 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
6074 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6075 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6076 && (GET_MODE_SIZE (TYPE_MODE (ctype))
6077 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
6078 /* ... or this is a truncation (t is narrower than op0),
6079 then we cannot pass through this narrowing. */
6080 || (GET_MODE_SIZE (TYPE_MODE (type))
6081 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
6082 /* ... or signedness changes for division or modulus,
6083 then we cannot pass through this conversion. */
6084 || (code != MULT_EXPR
6085 && (TYPE_UNSIGNED (ctype)
6086 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6087 /* ... or has undefined overflow while the converted to
6088 type has not, we cannot do the operation in the inner type
6089 as that would introduce undefined overflow. */
6090 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6091 && !TYPE_OVERFLOW_UNDEFINED (type))))
6094 /* Pass the constant down and see if we can make a simplification. If
6095 we can, replace this expression with the inner simplification for
6096 possible later conversion to our or some other type. */
6097 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6098 && TREE_CODE (t2) == INTEGER_CST
6099 && !TREE_OVERFLOW (t2)
6100 && (0 != (t1 = extract_muldiv (op0, t2, code,
6102 ? ctype : NULL_TREE,
6103 strict_overflow_p))))
6108 /* If widening the type changes it from signed to unsigned, then we
6109 must avoid building ABS_EXPR itself as unsigned. */
6110 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6112 tree cstype = (*signed_type_for) (ctype);
6113 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6116 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6117 return fold_convert (ctype, t1);
6121 /* If the constant is negative, we cannot simplify this. */
6122 if (tree_int_cst_sgn (c) == -1)
6126 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6128 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6131 case MIN_EXPR: case MAX_EXPR:
6132 /* If widening the type changes the signedness, then we can't perform
6133 this optimization as that changes the result. */
6134 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6137 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6138 sub_strict_overflow_p = false;
6139 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6140 &sub_strict_overflow_p)) != 0
6141 && (t2 = extract_muldiv (op1, c, code, wide_type,
6142 &sub_strict_overflow_p)) != 0)
6144 if (tree_int_cst_sgn (c) < 0)
6145 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6146 if (sub_strict_overflow_p)
6147 *strict_overflow_p = true;
6148 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6149 fold_convert (ctype, t2));
6153 case LSHIFT_EXPR: case RSHIFT_EXPR:
6154 /* If the second operand is constant, this is a multiplication
6155 or floor division, by a power of two, so we can treat it that
6156 way unless the multiplier or divisor overflows. Signed
6157 left-shift overflow is implementation-defined rather than
6158 undefined in C90, so do not convert signed left shift into
6160 if (TREE_CODE (op1) == INTEGER_CST
6161 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6162 /* const_binop may not detect overflow correctly,
6163 so check for it explicitly here. */
6164 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6165 && TREE_INT_CST_HIGH (op1) == 0
6166 && 0 != (t1 = fold_convert (ctype,
6167 const_binop (LSHIFT_EXPR,
6170 && !TREE_OVERFLOW (t1))
6171 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6172 ? MULT_EXPR : FLOOR_DIV_EXPR,
6173 ctype, fold_convert (ctype, op0), t1),
6174 c, code, wide_type, strict_overflow_p);
6177 case PLUS_EXPR: case MINUS_EXPR:
6178 /* See if we can eliminate the operation on both sides. If we can, we
6179 can return a new PLUS or MINUS. If we can't, the only remaining
6180 cases where we can do anything are if the second operand is a
6182 sub_strict_overflow_p = false;
6183 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6184 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6185 if (t1 != 0 && t2 != 0
6186 && (code == MULT_EXPR
6187 /* If not multiplication, we can only do this if both operands
6188 are divisible by c. */
6189 || (multiple_of_p (ctype, op0, c)
6190 && multiple_of_p (ctype, op1, c))))
6192 if (sub_strict_overflow_p)
6193 *strict_overflow_p = true;
6194 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6195 fold_convert (ctype, t2));
6198 /* If this was a subtraction, negate OP1 and set it to be an addition.
6199 This simplifies the logic below. */
6200 if (tcode == MINUS_EXPR)
6201 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6203 if (TREE_CODE (op1) != INTEGER_CST)
6206 /* If either OP1 or C are negative, this optimization is not safe for
6207 some of the division and remainder types while for others we need
6208 to change the code. */
6209 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6211 if (code == CEIL_DIV_EXPR)
6212 code = FLOOR_DIV_EXPR;
6213 else if (code == FLOOR_DIV_EXPR)
6214 code = CEIL_DIV_EXPR;
6215 else if (code != MULT_EXPR
6216 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6220 /* If it's a multiply or a division/modulus operation of a multiple
6221 of our constant, do the operation and verify it doesn't overflow. */
6222 if (code == MULT_EXPR
6223 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6225 op1 = const_binop (code, fold_convert (ctype, op1),
6226 fold_convert (ctype, c), 0);
6227 /* We allow the constant to overflow with wrapping semantics. */
6229 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6235 /* If we have an unsigned type is not a sizetype, we cannot widen
6236 the operation since it will change the result if the original
6237 computation overflowed. */
6238 if (TYPE_UNSIGNED (ctype)
6239 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6243 /* If we were able to eliminate our operation from the first side,
6244 apply our operation to the second side and reform the PLUS. */
6245 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6246 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6248 /* The last case is if we are a multiply. In that case, we can
6249 apply the distributive law to commute the multiply and addition
6250 if the multiplication of the constants doesn't overflow. */
6251 if (code == MULT_EXPR)
6252 return fold_build2 (tcode, ctype,
6253 fold_build2 (code, ctype,
6254 fold_convert (ctype, op0),
6255 fold_convert (ctype, c)),
6261 /* We have a special case here if we are doing something like
6262 (C * 8) % 4 since we know that's zero. */
6263 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6264 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6265 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6266 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6267 return omit_one_operand (type, integer_zero_node, op0);
6269 /* ... fall through ... */
6271 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6272 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6273 /* If we can extract our operation from the LHS, do so and return a
6274 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6275 do something only if the second operand is a constant. */
6277 && (t1 = extract_muldiv (op0, c, code, wide_type,
6278 strict_overflow_p)) != 0)
6279 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6280 fold_convert (ctype, op1));
6281 else if (tcode == MULT_EXPR && code == MULT_EXPR
6282 && (t1 = extract_muldiv (op1, c, code, wide_type,
6283 strict_overflow_p)) != 0)
6284 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6285 fold_convert (ctype, t1));
6286 else if (TREE_CODE (op1) != INTEGER_CST)
6289 /* If these are the same operation types, we can associate them
6290 assuming no overflow. */
6292 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
6293 fold_convert (ctype, c), 0))
6294 && !TREE_OVERFLOW (t1))
6295 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6297 /* If these operations "cancel" each other, we have the main
6298 optimizations of this pass, which occur when either constant is a
6299 multiple of the other, in which case we replace this with either an
6300 operation or CODE or TCODE.
6302 If we have an unsigned type that is not a sizetype, we cannot do
6303 this since it will change the result if the original computation
6305 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6306 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6307 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6308 || (tcode == MULT_EXPR
6309 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6310 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6311 && code != MULT_EXPR)))
6313 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6315 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6316 *strict_overflow_p = true;
6317 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6318 fold_convert (ctype,
6319 const_binop (TRUNC_DIV_EXPR,
6322 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6324 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6325 *strict_overflow_p = true;
6326 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6327 fold_convert (ctype,
6328 const_binop (TRUNC_DIV_EXPR,
6341 /* Return a node which has the indicated constant VALUE (either 0 or
6342 1), and is of the indicated TYPE. */
6345 constant_boolean_node (int value, tree type)
6347 if (type == integer_type_node)
6348 return value ? integer_one_node : integer_zero_node;
6349 else if (type == boolean_type_node)
6350 return value ? boolean_true_node : boolean_false_node;
6352 return build_int_cst (type, value);
6356 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6357 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6358 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6359 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6360 COND is the first argument to CODE; otherwise (as in the example
6361 given here), it is the second argument. TYPE is the type of the
6362 original expression. Return NULL_TREE if no simplification is
6366 fold_binary_op_with_conditional_arg (enum tree_code code,
6367 tree type, tree op0, tree op1,
6368 tree cond, tree arg, int cond_first_p)
6370 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6371 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6372 tree test, true_value, false_value;
6373 tree lhs = NULL_TREE;
6374 tree rhs = NULL_TREE;
6376 /* This transformation is only worthwhile if we don't have to wrap
6377 arg in a SAVE_EXPR, and the operation can be simplified on at least
6378 one of the branches once its pushed inside the COND_EXPR. */
6379 if (!TREE_CONSTANT (arg))
6382 if (TREE_CODE (cond) == COND_EXPR)
6384 test = TREE_OPERAND (cond, 0);
6385 true_value = TREE_OPERAND (cond, 1);
6386 false_value = TREE_OPERAND (cond, 2);
6387 /* If this operand throws an expression, then it does not make
6388 sense to try to perform a logical or arithmetic operation
6390 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6392 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6397 tree testtype = TREE_TYPE (cond);
6399 true_value = constant_boolean_node (true, testtype);
6400 false_value = constant_boolean_node (false, testtype);
6403 arg = fold_convert (arg_type, arg);
6406 true_value = fold_convert (cond_type, true_value);
6408 lhs = fold_build2 (code, type, true_value, arg);
6410 lhs = fold_build2 (code, type, arg, true_value);
6414 false_value = fold_convert (cond_type, false_value);
6416 rhs = fold_build2 (code, type, false_value, arg);
6418 rhs = fold_build2 (code, type, arg, false_value);
6421 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6422 return fold_convert (type, test);
6426 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6428 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6429 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6430 ADDEND is the same as X.
6432 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6433 and finite. The problematic cases are when X is zero, and its mode
6434 has signed zeros. In the case of rounding towards -infinity,
6435 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6436 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6439 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6441 if (!real_zerop (addend))
6444 /* Don't allow the fold with -fsignaling-nans. */
6445 if (HONOR_SNANS (TYPE_MODE (type)))
6448 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6449 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6452 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6453 if (TREE_CODE (addend) == REAL_CST
6454 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6457 /* The mode has signed zeros, and we have to honor their sign.
6458 In this situation, there is only one case we can return true for.
6459 X - 0 is the same as X unless rounding towards -infinity is
6461 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6464 /* Subroutine of fold() that checks comparisons of built-in math
6465 functions against real constants.
6467 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6468 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6469 is the type of the result and ARG0 and ARG1 are the operands of the
6470 comparison. ARG1 must be a TREE_REAL_CST.
6472 The function returns the constant folded tree if a simplification
6473 can be made, and NULL_TREE otherwise. */
6476 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6477 tree type, tree arg0, tree arg1)
6481 if (BUILTIN_SQRT_P (fcode))
6483 tree arg = CALL_EXPR_ARG (arg0, 0);
6484 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6486 c = TREE_REAL_CST (arg1);
6487 if (REAL_VALUE_NEGATIVE (c))
6489 /* sqrt(x) < y is always false, if y is negative. */
6490 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6491 return omit_one_operand (type, integer_zero_node, arg);
6493 /* sqrt(x) > y is always true, if y is negative and we
6494 don't care about NaNs, i.e. negative values of x. */
6495 if (code == NE_EXPR || !HONOR_NANS (mode))
6496 return omit_one_operand (type, integer_one_node, arg);
6498 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6499 return fold_build2 (GE_EXPR, type, arg,
6500 build_real (TREE_TYPE (arg), dconst0));
6502 else if (code == GT_EXPR || code == GE_EXPR)
6506 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6507 real_convert (&c2, mode, &c2);
6509 if (REAL_VALUE_ISINF (c2))
6511 /* sqrt(x) > y is x == +Inf, when y is very large. */
6512 if (HONOR_INFINITIES (mode))
6513 return fold_build2 (EQ_EXPR, type, arg,
6514 build_real (TREE_TYPE (arg), c2));
6516 /* sqrt(x) > y is always false, when y is very large
6517 and we don't care about infinities. */
6518 return omit_one_operand (type, integer_zero_node, arg);
6521 /* sqrt(x) > c is the same as x > c*c. */
6522 return fold_build2 (code, type, arg,
6523 build_real (TREE_TYPE (arg), c2));
6525 else if (code == LT_EXPR || code == LE_EXPR)
6529 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6530 real_convert (&c2, mode, &c2);
6532 if (REAL_VALUE_ISINF (c2))
6534 /* sqrt(x) < y is always true, when y is a very large
6535 value and we don't care about NaNs or Infinities. */
6536 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6537 return omit_one_operand (type, integer_one_node, arg);
6539 /* sqrt(x) < y is x != +Inf when y is very large and we
6540 don't care about NaNs. */
6541 if (! HONOR_NANS (mode))
6542 return fold_build2 (NE_EXPR, type, arg,
6543 build_real (TREE_TYPE (arg), c2));
6545 /* sqrt(x) < y is x >= 0 when y is very large and we
6546 don't care about Infinities. */
6547 if (! HONOR_INFINITIES (mode))
6548 return fold_build2 (GE_EXPR, type, arg,
6549 build_real (TREE_TYPE (arg), dconst0));
6551 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6552 if (lang_hooks.decls.global_bindings_p () != 0
6553 || CONTAINS_PLACEHOLDER_P (arg))
6556 arg = save_expr (arg);
6557 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6558 fold_build2 (GE_EXPR, type, arg,
6559 build_real (TREE_TYPE (arg),
6561 fold_build2 (NE_EXPR, type, arg,
6562 build_real (TREE_TYPE (arg),
6566 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6567 if (! HONOR_NANS (mode))
6568 return fold_build2 (code, type, arg,
6569 build_real (TREE_TYPE (arg), c2));
6571 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6572 if (lang_hooks.decls.global_bindings_p () == 0
6573 && ! CONTAINS_PLACEHOLDER_P (arg))
6575 arg = save_expr (arg);
6576 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6577 fold_build2 (GE_EXPR, type, arg,
6578 build_real (TREE_TYPE (arg),
6580 fold_build2 (code, type, arg,
6581 build_real (TREE_TYPE (arg),
6590 /* Subroutine of fold() that optimizes comparisons against Infinities,
6591 either +Inf or -Inf.
6593 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6594 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6595 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6597 The function returns the constant folded tree if a simplification
6598 can be made, and NULL_TREE otherwise. */
6601 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6603 enum machine_mode mode;
6604 REAL_VALUE_TYPE max;
6608 mode = TYPE_MODE (TREE_TYPE (arg0));
6610 /* For negative infinity swap the sense of the comparison. */
6611 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6613 code = swap_tree_comparison (code);
6618 /* x > +Inf is always false, if with ignore sNANs. */
6619 if (HONOR_SNANS (mode))
6621 return omit_one_operand (type, integer_zero_node, arg0);
6624 /* x <= +Inf is always true, if we don't case about NaNs. */
6625 if (! HONOR_NANS (mode))
6626 return omit_one_operand (type, integer_one_node, arg0);
6628 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6629 if (lang_hooks.decls.global_bindings_p () == 0
6630 && ! CONTAINS_PLACEHOLDER_P (arg0))
6632 arg0 = save_expr (arg0);
6633 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6639 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6640 real_maxval (&max, neg, mode);
6641 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6642 arg0, build_real (TREE_TYPE (arg0), max));
6645 /* x < +Inf is always equal to x <= DBL_MAX. */
6646 real_maxval (&max, neg, mode);
6647 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6648 arg0, build_real (TREE_TYPE (arg0), max));
6651 /* x != +Inf is always equal to !(x > DBL_MAX). */
6652 real_maxval (&max, neg, mode);
6653 if (! HONOR_NANS (mode))
6654 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6655 arg0, build_real (TREE_TYPE (arg0), max));
6657 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6658 arg0, build_real (TREE_TYPE (arg0), max));
6659 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6668 /* Subroutine of fold() that optimizes comparisons of a division by
6669 a nonzero integer constant against an integer constant, i.e.
6672 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6673 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6674 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6676 The function returns the constant folded tree if a simplification
6677 can be made, and NULL_TREE otherwise. */
6680 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6682 tree prod, tmp, hi, lo;
6683 tree arg00 = TREE_OPERAND (arg0, 0);
6684 tree arg01 = TREE_OPERAND (arg0, 1);
6685 unsigned HOST_WIDE_INT lpart;
6686 HOST_WIDE_INT hpart;
6687 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6691 /* We have to do this the hard way to detect unsigned overflow.
6692 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6693 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6694 TREE_INT_CST_HIGH (arg01),
6695 TREE_INT_CST_LOW (arg1),
6696 TREE_INT_CST_HIGH (arg1),
6697 &lpart, &hpart, unsigned_p);
6698 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6700 neg_overflow = false;
6704 tmp = int_const_binop (MINUS_EXPR, arg01,
6705 build_int_cst (TREE_TYPE (arg01), 1), 0);
6708 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6709 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6710 TREE_INT_CST_HIGH (prod),
6711 TREE_INT_CST_LOW (tmp),
6712 TREE_INT_CST_HIGH (tmp),
6713 &lpart, &hpart, unsigned_p);
6714 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6715 -1, overflow | TREE_OVERFLOW (prod));
6717 else if (tree_int_cst_sgn (arg01) >= 0)
6719 tmp = int_const_binop (MINUS_EXPR, arg01,
6720 build_int_cst (TREE_TYPE (arg01), 1), 0);
6721 switch (tree_int_cst_sgn (arg1))
6724 neg_overflow = true;
6725 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6730 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6735 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6745 /* A negative divisor reverses the relational operators. */
6746 code = swap_tree_comparison (code);
6748 tmp = int_const_binop (PLUS_EXPR, arg01,
6749 build_int_cst (TREE_TYPE (arg01), 1), 0);
6750 switch (tree_int_cst_sgn (arg1))
6753 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6758 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6763 neg_overflow = true;
6764 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6776 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6777 return omit_one_operand (type, integer_zero_node, arg00);
6778 if (TREE_OVERFLOW (hi))
6779 return fold_build2 (GE_EXPR, type, arg00, lo);
6780 if (TREE_OVERFLOW (lo))
6781 return fold_build2 (LE_EXPR, type, arg00, hi);
6782 return build_range_check (type, arg00, 1, lo, hi);
6785 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6786 return omit_one_operand (type, integer_one_node, arg00);
6787 if (TREE_OVERFLOW (hi))
6788 return fold_build2 (LT_EXPR, type, arg00, lo);
6789 if (TREE_OVERFLOW (lo))
6790 return fold_build2 (GT_EXPR, type, arg00, hi);
6791 return build_range_check (type, arg00, 0, lo, hi);
6794 if (TREE_OVERFLOW (lo))
6796 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6797 return omit_one_operand (type, tmp, arg00);
6799 return fold_build2 (LT_EXPR, type, arg00, lo);
6802 if (TREE_OVERFLOW (hi))
6804 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6805 return omit_one_operand (type, tmp, arg00);
6807 return fold_build2 (LE_EXPR, type, arg00, hi);
6810 if (TREE_OVERFLOW (hi))
6812 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6813 return omit_one_operand (type, tmp, arg00);
6815 return fold_build2 (GT_EXPR, type, arg00, hi);
6818 if (TREE_OVERFLOW (lo))
6820 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6821 return omit_one_operand (type, tmp, arg00);
6823 return fold_build2 (GE_EXPR, type, arg00, lo);
6833 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6834 equality/inequality test, then return a simplified form of the test
6835 using a sign testing. Otherwise return NULL. TYPE is the desired
6839 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6842 /* If this is testing a single bit, we can optimize the test. */
6843 if ((code == NE_EXPR || code == EQ_EXPR)
6844 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6845 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6847 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6848 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6849 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6851 if (arg00 != NULL_TREE
6852 /* This is only a win if casting to a signed type is cheap,
6853 i.e. when arg00's type is not a partial mode. */
6854 && TYPE_PRECISION (TREE_TYPE (arg00))
6855 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6857 tree stype = signed_type_for (TREE_TYPE (arg00));
6858 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6859 result_type, fold_convert (stype, arg00),
6860 build_int_cst (stype, 0));
6867 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6868 equality/inequality test, then return a simplified form of
6869 the test using shifts and logical operations. Otherwise return
6870 NULL. TYPE is the desired result type. */
6873 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6876 /* If this is testing a single bit, we can optimize the test. */
6877 if ((code == NE_EXPR || code == EQ_EXPR)
6878 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6879 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6881 tree inner = TREE_OPERAND (arg0, 0);
6882 tree type = TREE_TYPE (arg0);
6883 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6884 enum machine_mode operand_mode = TYPE_MODE (type);
6886 tree signed_type, unsigned_type, intermediate_type;
6889 /* First, see if we can fold the single bit test into a sign-bit
6891 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6896 /* Otherwise we have (A & C) != 0 where C is a single bit,
6897 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6898 Similarly for (A & C) == 0. */
6900 /* If INNER is a right shift of a constant and it plus BITNUM does
6901 not overflow, adjust BITNUM and INNER. */
6902 if (TREE_CODE (inner) == RSHIFT_EXPR
6903 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6904 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6905 && bitnum < TYPE_PRECISION (type)
6906 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6907 bitnum - TYPE_PRECISION (type)))
6909 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6910 inner = TREE_OPERAND (inner, 0);
6913 /* If we are going to be able to omit the AND below, we must do our
6914 operations as unsigned. If we must use the AND, we have a choice.
6915 Normally unsigned is faster, but for some machines signed is. */
6916 #ifdef LOAD_EXTEND_OP
6917 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6918 && !flag_syntax_only) ? 0 : 1;
6923 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6924 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6925 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6926 inner = fold_convert (intermediate_type, inner);
6929 inner = build2 (RSHIFT_EXPR, intermediate_type,
6930 inner, size_int (bitnum));
6932 one = build_int_cst (intermediate_type, 1);
6934 if (code == EQ_EXPR)
6935 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6937 /* Put the AND last so it can combine with more things. */
6938 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6940 /* Make sure to return the proper type. */
6941 inner = fold_convert (result_type, inner);
6948 /* Check whether we are allowed to reorder operands arg0 and arg1,
6949 such that the evaluation of arg1 occurs before arg0. */
6952 reorder_operands_p (const_tree arg0, const_tree arg1)
6954 if (! flag_evaluation_order)
6956 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6958 return ! TREE_SIDE_EFFECTS (arg0)
6959 && ! TREE_SIDE_EFFECTS (arg1);
6962 /* Test whether it is preferable two swap two operands, ARG0 and
6963 ARG1, for example because ARG0 is an integer constant and ARG1
6964 isn't. If REORDER is true, only recommend swapping if we can
6965 evaluate the operands in reverse order. */
6968 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6970 STRIP_SIGN_NOPS (arg0);
6971 STRIP_SIGN_NOPS (arg1);
6973 if (TREE_CODE (arg1) == INTEGER_CST)
6975 if (TREE_CODE (arg0) == INTEGER_CST)
6978 if (TREE_CODE (arg1) == REAL_CST)
6980 if (TREE_CODE (arg0) == REAL_CST)
6983 if (TREE_CODE (arg1) == FIXED_CST)
6985 if (TREE_CODE (arg0) == FIXED_CST)
6988 if (TREE_CODE (arg1) == COMPLEX_CST)
6990 if (TREE_CODE (arg0) == COMPLEX_CST)
6993 if (TREE_CONSTANT (arg1))
6995 if (TREE_CONSTANT (arg0))
7001 if (reorder && flag_evaluation_order
7002 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7005 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7006 for commutative and comparison operators. Ensuring a canonical
7007 form allows the optimizers to find additional redundancies without
7008 having to explicitly check for both orderings. */
7009 if (TREE_CODE (arg0) == SSA_NAME
7010 && TREE_CODE (arg1) == SSA_NAME
7011 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7014 /* Put SSA_NAMEs last. */
7015 if (TREE_CODE (arg1) == SSA_NAME)
7017 if (TREE_CODE (arg0) == SSA_NAME)
7020 /* Put variables last. */
7029 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7030 ARG0 is extended to a wider type. */
7033 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7035 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7037 tree shorter_type, outer_type;
7041 if (arg0_unw == arg0)
7043 shorter_type = TREE_TYPE (arg0_unw);
7045 #ifdef HAVE_canonicalize_funcptr_for_compare
7046 /* Disable this optimization if we're casting a function pointer
7047 type on targets that require function pointer canonicalization. */
7048 if (HAVE_canonicalize_funcptr_for_compare
7049 && TREE_CODE (shorter_type) == POINTER_TYPE
7050 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7054 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7057 arg1_unw = get_unwidened (arg1, NULL_TREE);
7059 /* If possible, express the comparison in the shorter mode. */
7060 if ((code == EQ_EXPR || code == NE_EXPR
7061 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7062 && (TREE_TYPE (arg1_unw) == shorter_type
7063 || (TYPE_PRECISION (shorter_type)
7064 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7065 || (TREE_CODE (arg1_unw) == INTEGER_CST
7066 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7067 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7068 && int_fits_type_p (arg1_unw, shorter_type))))
7069 return fold_build2 (code, type, arg0_unw,
7070 fold_convert (shorter_type, arg1_unw));
7072 if (TREE_CODE (arg1_unw) != INTEGER_CST
7073 || TREE_CODE (shorter_type) != INTEGER_TYPE
7074 || !int_fits_type_p (arg1_unw, shorter_type))
7077 /* If we are comparing with the integer that does not fit into the range
7078 of the shorter type, the result is known. */
7079 outer_type = TREE_TYPE (arg1_unw);
7080 min = lower_bound_in_type (outer_type, shorter_type);
7081 max = upper_bound_in_type (outer_type, shorter_type);
7083 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7085 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7092 return omit_one_operand (type, integer_zero_node, arg0);
7097 return omit_one_operand (type, integer_one_node, arg0);
7103 return omit_one_operand (type, integer_one_node, arg0);
7105 return omit_one_operand (type, integer_zero_node, arg0);
7110 return omit_one_operand (type, integer_zero_node, arg0);
7112 return omit_one_operand (type, integer_one_node, arg0);
7121 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7122 ARG0 just the signedness is changed. */
7125 fold_sign_changed_comparison (enum tree_code code, tree type,
7126 tree arg0, tree arg1)
7129 tree inner_type, outer_type;
7131 if (TREE_CODE (arg0) != NOP_EXPR
7132 && TREE_CODE (arg0) != CONVERT_EXPR)
7135 outer_type = TREE_TYPE (arg0);
7136 arg0_inner = TREE_OPERAND (arg0, 0);
7137 inner_type = TREE_TYPE (arg0_inner);
7139 #ifdef HAVE_canonicalize_funcptr_for_compare
7140 /* Disable this optimization if we're casting a function pointer
7141 type on targets that require function pointer canonicalization. */
7142 if (HAVE_canonicalize_funcptr_for_compare
7143 && TREE_CODE (inner_type) == POINTER_TYPE
7144 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7148 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7151 if (TREE_CODE (arg1) != INTEGER_CST
7152 && !((TREE_CODE (arg1) == NOP_EXPR
7153 || TREE_CODE (arg1) == CONVERT_EXPR)
7154 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7157 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7162 if (TREE_CODE (arg1) == INTEGER_CST)
7163 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7164 TREE_INT_CST_HIGH (arg1), 0,
7165 TREE_OVERFLOW (arg1));
7167 arg1 = fold_convert (inner_type, arg1);
7169 return fold_build2 (code, type, arg0_inner, arg1);
7172 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7173 step of the array. Reconstructs s and delta in the case of s * delta
7174 being an integer constant (and thus already folded).
7175 ADDR is the address. MULT is the multiplicative expression.
7176 If the function succeeds, the new address expression is returned. Otherwise
7177 NULL_TREE is returned. */
7180 try_move_mult_to_index (tree addr, tree op1)
7182 tree s, delta, step;
7183 tree ref = TREE_OPERAND (addr, 0), pref;
7188 /* Strip the nops that might be added when converting op1 to sizetype. */
7191 /* Canonicalize op1 into a possibly non-constant delta
7192 and an INTEGER_CST s. */
7193 if (TREE_CODE (op1) == MULT_EXPR)
7195 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7200 if (TREE_CODE (arg0) == INTEGER_CST)
7205 else if (TREE_CODE (arg1) == INTEGER_CST)
7213 else if (TREE_CODE (op1) == INTEGER_CST)
7220 /* Simulate we are delta * 1. */
7222 s = integer_one_node;
7225 for (;; ref = TREE_OPERAND (ref, 0))
7227 if (TREE_CODE (ref) == ARRAY_REF)
7229 /* Remember if this was a multi-dimensional array. */
7230 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7233 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7237 step = array_ref_element_size (ref);
7238 if (TREE_CODE (step) != INTEGER_CST)
7243 if (! tree_int_cst_equal (step, s))
7248 /* Try if delta is a multiple of step. */
7249 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
7255 /* Only fold here if we can verify we do not overflow one
7256 dimension of a multi-dimensional array. */
7261 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7262 || !INTEGRAL_TYPE_P (itype)
7263 || !TYPE_MAX_VALUE (itype)
7264 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7267 tmp = fold_binary (PLUS_EXPR, itype,
7268 fold_convert (itype,
7269 TREE_OPERAND (ref, 1)),
7270 fold_convert (itype, delta));
7272 || TREE_CODE (tmp) != INTEGER_CST
7273 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7282 if (!handled_component_p (ref))
7286 /* We found the suitable array reference. So copy everything up to it,
7287 and replace the index. */
7289 pref = TREE_OPERAND (addr, 0);
7290 ret = copy_node (pref);
7295 pref = TREE_OPERAND (pref, 0);
7296 TREE_OPERAND (pos, 0) = copy_node (pref);
7297 pos = TREE_OPERAND (pos, 0);
7300 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7301 fold_convert (itype,
7302 TREE_OPERAND (pos, 1)),
7303 fold_convert (itype, delta));
7305 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7309 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7310 means A >= Y && A != MAX, but in this case we know that
7311 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7314 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7316 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7318 if (TREE_CODE (bound) == LT_EXPR)
7319 a = TREE_OPERAND (bound, 0);
7320 else if (TREE_CODE (bound) == GT_EXPR)
7321 a = TREE_OPERAND (bound, 1);
7325 typea = TREE_TYPE (a);
7326 if (!INTEGRAL_TYPE_P (typea)
7327 && !POINTER_TYPE_P (typea))
7330 if (TREE_CODE (ineq) == LT_EXPR)
7332 a1 = TREE_OPERAND (ineq, 1);
7333 y = TREE_OPERAND (ineq, 0);
7335 else if (TREE_CODE (ineq) == GT_EXPR)
7337 a1 = TREE_OPERAND (ineq, 0);
7338 y = TREE_OPERAND (ineq, 1);
7343 if (TREE_TYPE (a1) != typea)
7346 if (POINTER_TYPE_P (typea))
7348 /* Convert the pointer types into integer before taking the difference. */
7349 tree ta = fold_convert (ssizetype, a);
7350 tree ta1 = fold_convert (ssizetype, a1);
7351 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7354 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7356 if (!diff || !integer_onep (diff))
7359 return fold_build2 (GE_EXPR, type, a, y);
7362 /* Fold a sum or difference of at least one multiplication.
7363 Returns the folded tree or NULL if no simplification could be made. */
7366 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7368 tree arg00, arg01, arg10, arg11;
7369 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7371 /* (A * C) +- (B * C) -> (A+-B) * C.
7372 (A * C) +- A -> A * (C+-1).
7373 We are most concerned about the case where C is a constant,
7374 but other combinations show up during loop reduction. Since
7375 it is not difficult, try all four possibilities. */
7377 if (TREE_CODE (arg0) == MULT_EXPR)
7379 arg00 = TREE_OPERAND (arg0, 0);
7380 arg01 = TREE_OPERAND (arg0, 1);
7382 else if (TREE_CODE (arg0) == INTEGER_CST)
7384 arg00 = build_one_cst (type);
7389 /* We cannot generate constant 1 for fract. */
7390 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7393 arg01 = build_one_cst (type);
7395 if (TREE_CODE (arg1) == MULT_EXPR)
7397 arg10 = TREE_OPERAND (arg1, 0);
7398 arg11 = TREE_OPERAND (arg1, 1);
7400 else if (TREE_CODE (arg1) == INTEGER_CST)
7402 arg10 = build_one_cst (type);
7407 /* We cannot generate constant 1 for fract. */
7408 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7411 arg11 = build_one_cst (type);
7415 if (operand_equal_p (arg01, arg11, 0))
7416 same = arg01, alt0 = arg00, alt1 = arg10;
7417 else if (operand_equal_p (arg00, arg10, 0))
7418 same = arg00, alt0 = arg01, alt1 = arg11;
7419 else if (operand_equal_p (arg00, arg11, 0))
7420 same = arg00, alt0 = arg01, alt1 = arg10;
7421 else if (operand_equal_p (arg01, arg10, 0))
7422 same = arg01, alt0 = arg00, alt1 = arg11;
7424 /* No identical multiplicands; see if we can find a common
7425 power-of-two factor in non-power-of-two multiplies. This
7426 can help in multi-dimensional array access. */
7427 else if (host_integerp (arg01, 0)
7428 && host_integerp (arg11, 0))
7430 HOST_WIDE_INT int01, int11, tmp;
7433 int01 = TREE_INT_CST_LOW (arg01);
7434 int11 = TREE_INT_CST_LOW (arg11);
7436 /* Move min of absolute values to int11. */
7437 if ((int01 >= 0 ? int01 : -int01)
7438 < (int11 >= 0 ? int11 : -int11))
7440 tmp = int01, int01 = int11, int11 = tmp;
7441 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7448 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
7450 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7451 build_int_cst (TREE_TYPE (arg00),
7456 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7461 return fold_build2 (MULT_EXPR, type,
7462 fold_build2 (code, type,
7463 fold_convert (type, alt0),
7464 fold_convert (type, alt1)),
7465 fold_convert (type, same));
7470 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7471 specified by EXPR into the buffer PTR of length LEN bytes.
7472 Return the number of bytes placed in the buffer, or zero
7476 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7478 tree type = TREE_TYPE (expr);
7479 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7480 int byte, offset, word, words;
7481 unsigned char value;
7483 if (total_bytes > len)
7485 words = total_bytes / UNITS_PER_WORD;
7487 for (byte = 0; byte < total_bytes; byte++)
7489 int bitpos = byte * BITS_PER_UNIT;
7490 if (bitpos < HOST_BITS_PER_WIDE_INT)
7491 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7493 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7494 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7496 if (total_bytes > UNITS_PER_WORD)
7498 word = byte / UNITS_PER_WORD;
7499 if (WORDS_BIG_ENDIAN)
7500 word = (words - 1) - word;
7501 offset = word * UNITS_PER_WORD;
7502 if (BYTES_BIG_ENDIAN)
7503 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7505 offset += byte % UNITS_PER_WORD;
7508 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7509 ptr[offset] = value;
7515 /* Subroutine of native_encode_expr. Encode the REAL_CST
7516 specified by EXPR into the buffer PTR of length LEN bytes.
7517 Return the number of bytes placed in the buffer, or zero
7521 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7523 tree type = TREE_TYPE (expr);
7524 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7525 int byte, offset, word, words, bitpos;
7526 unsigned char value;
7528 /* There are always 32 bits in each long, no matter the size of
7529 the hosts long. We handle floating point representations with
7533 if (total_bytes > len)
7535 words = 32 / UNITS_PER_WORD;
7537 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7539 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7540 bitpos += BITS_PER_UNIT)
7542 byte = (bitpos / BITS_PER_UNIT) & 3;
7543 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7545 if (UNITS_PER_WORD < 4)
7547 word = byte / UNITS_PER_WORD;
7548 if (WORDS_BIG_ENDIAN)
7549 word = (words - 1) - word;
7550 offset = word * UNITS_PER_WORD;
7551 if (BYTES_BIG_ENDIAN)
7552 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7554 offset += byte % UNITS_PER_WORD;
7557 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7558 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7563 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7564 specified by EXPR into the buffer PTR of length LEN bytes.
7565 Return the number of bytes placed in the buffer, or zero
7569 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7574 part = TREE_REALPART (expr);
7575 rsize = native_encode_expr (part, ptr, len);
7578 part = TREE_IMAGPART (expr);
7579 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7582 return rsize + isize;
7586 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7587 specified by EXPR into the buffer PTR of length LEN bytes.
7588 Return the number of bytes placed in the buffer, or zero
7592 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7594 int i, size, offset, count;
7595 tree itype, elem, elements;
7598 elements = TREE_VECTOR_CST_ELTS (expr);
7599 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7600 itype = TREE_TYPE (TREE_TYPE (expr));
7601 size = GET_MODE_SIZE (TYPE_MODE (itype));
7602 for (i = 0; i < count; i++)
7606 elem = TREE_VALUE (elements);
7607 elements = TREE_CHAIN (elements);
7614 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7619 if (offset + size > len)
7621 memset (ptr+offset, 0, size);
7629 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7630 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7631 buffer PTR of length LEN bytes. Return the number of bytes
7632 placed in the buffer, or zero upon failure. */
7635 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7637 switch (TREE_CODE (expr))
7640 return native_encode_int (expr, ptr, len);
7643 return native_encode_real (expr, ptr, len);
7646 return native_encode_complex (expr, ptr, len);
7649 return native_encode_vector (expr, ptr, len);
7657 /* Subroutine of native_interpret_expr. Interpret the contents of
7658 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7659 If the buffer cannot be interpreted, return NULL_TREE. */
7662 native_interpret_int (tree type, const unsigned char *ptr, int len)
7664 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7665 int byte, offset, word, words;
7666 unsigned char value;
7667 unsigned int HOST_WIDE_INT lo = 0;
7668 HOST_WIDE_INT hi = 0;
7670 if (total_bytes > len)
7672 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7674 words = total_bytes / UNITS_PER_WORD;
7676 for (byte = 0; byte < total_bytes; byte++)
7678 int bitpos = byte * BITS_PER_UNIT;
7679 if (total_bytes > UNITS_PER_WORD)
7681 word = byte / UNITS_PER_WORD;
7682 if (WORDS_BIG_ENDIAN)
7683 word = (words - 1) - word;
7684 offset = word * UNITS_PER_WORD;
7685 if (BYTES_BIG_ENDIAN)
7686 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7688 offset += byte % UNITS_PER_WORD;
7691 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7692 value = ptr[offset];
7694 if (bitpos < HOST_BITS_PER_WIDE_INT)
7695 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7697 hi |= (unsigned HOST_WIDE_INT) value
7698 << (bitpos - HOST_BITS_PER_WIDE_INT);
7701 return build_int_cst_wide_type (type, lo, hi);
7705 /* Subroutine of native_interpret_expr. Interpret the contents of
7706 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7707 If the buffer cannot be interpreted, return NULL_TREE. */
7710 native_interpret_real (tree type, const unsigned char *ptr, int len)
7712 enum machine_mode mode = TYPE_MODE (type);
7713 int total_bytes = GET_MODE_SIZE (mode);
7714 int byte, offset, word, words, bitpos;
7715 unsigned char value;
7716 /* There are always 32 bits in each long, no matter the size of
7717 the hosts long. We handle floating point representations with
7722 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7723 if (total_bytes > len || total_bytes > 24)
7725 words = 32 / UNITS_PER_WORD;
7727 memset (tmp, 0, sizeof (tmp));
7728 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7729 bitpos += BITS_PER_UNIT)
7731 byte = (bitpos / BITS_PER_UNIT) & 3;
7732 if (UNITS_PER_WORD < 4)
7734 word = byte / UNITS_PER_WORD;
7735 if (WORDS_BIG_ENDIAN)
7736 word = (words - 1) - word;
7737 offset = word * UNITS_PER_WORD;
7738 if (BYTES_BIG_ENDIAN)
7739 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7741 offset += byte % UNITS_PER_WORD;
7744 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7745 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7747 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7750 real_from_target (&r, tmp, mode);
7751 return build_real (type, r);
7755 /* Subroutine of native_interpret_expr. Interpret the contents of
7756 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7757 If the buffer cannot be interpreted, return NULL_TREE. */
7760 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7762 tree etype, rpart, ipart;
7765 etype = TREE_TYPE (type);
7766 size = GET_MODE_SIZE (TYPE_MODE (etype));
7769 rpart = native_interpret_expr (etype, ptr, size);
7772 ipart = native_interpret_expr (etype, ptr+size, size);
7775 return build_complex (type, rpart, ipart);
7779 /* Subroutine of native_interpret_expr. Interpret the contents of
7780 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7781 If the buffer cannot be interpreted, return NULL_TREE. */
7784 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7786 tree etype, elem, elements;
7789 etype = TREE_TYPE (type);
7790 size = GET_MODE_SIZE (TYPE_MODE (etype));
7791 count = TYPE_VECTOR_SUBPARTS (type);
7792 if (size * count > len)
7795 elements = NULL_TREE;
7796 for (i = count - 1; i >= 0; i--)
7798 elem = native_interpret_expr (etype, ptr+(i*size), size);
7801 elements = tree_cons (NULL_TREE, elem, elements);
7803 return build_vector (type, elements);
7807 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7808 the buffer PTR of length LEN as a constant of type TYPE. For
7809 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7810 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7811 return NULL_TREE. */
7814 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7816 switch (TREE_CODE (type))
7821 return native_interpret_int (type, ptr, len);
7824 return native_interpret_real (type, ptr, len);
7827 return native_interpret_complex (type, ptr, len);
7830 return native_interpret_vector (type, ptr, len);
7838 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7839 TYPE at compile-time. If we're unable to perform the conversion
7840 return NULL_TREE. */
7843 fold_view_convert_expr (tree type, tree expr)
7845 /* We support up to 512-bit values (for V8DFmode). */
7846 unsigned char buffer[64];
7849 /* Check that the host and target are sane. */
7850 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7853 len = native_encode_expr (expr, buffer, sizeof (buffer));
7857 return native_interpret_expr (type, buffer, len);
7860 /* Build an expression for the address of T. Folds away INDIRECT_REF
7861 to avoid confusing the gimplify process. When IN_FOLD is true
7862 avoid modifications of T. */
7865 build_fold_addr_expr_with_type_1 (tree t, tree ptrtype, bool in_fold)
7867 /* The size of the object is not relevant when talking about its address. */
7868 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7869 t = TREE_OPERAND (t, 0);
7871 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7872 if (TREE_CODE (t) == INDIRECT_REF
7873 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7875 t = TREE_OPERAND (t, 0);
7877 if (TREE_TYPE (t) != ptrtype)
7878 t = build1 (NOP_EXPR, ptrtype, t);
7884 while (handled_component_p (base))
7885 base = TREE_OPERAND (base, 0);
7888 TREE_ADDRESSABLE (base) = 1;
7890 t = build1 (ADDR_EXPR, ptrtype, t);
7893 t = build1 (ADDR_EXPR, ptrtype, t);
7898 /* Build an expression for the address of T with type PTRTYPE. This
7899 function modifies the input parameter 'T' by sometimes setting the
7900 TREE_ADDRESSABLE flag. */
7903 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7905 return build_fold_addr_expr_with_type_1 (t, ptrtype, false);
7908 /* Build an expression for the address of T. This function modifies
7909 the input parameter 'T' by sometimes setting the TREE_ADDRESSABLE
7910 flag. When called from fold functions, use fold_addr_expr instead. */
7913 build_fold_addr_expr (tree t)
7915 return build_fold_addr_expr_with_type_1 (t,
7916 build_pointer_type (TREE_TYPE (t)),
7920 /* Same as build_fold_addr_expr, builds an expression for the address
7921 of T, but avoids touching the input node 't'. Fold functions
7922 should use this version. */
7925 fold_addr_expr (tree t)
7927 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7929 return build_fold_addr_expr_with_type_1 (t, ptrtype, true);
7932 /* Fold a unary expression of code CODE and type TYPE with operand
7933 OP0. Return the folded expression if folding is successful.
7934 Otherwise, return NULL_TREE. */
7937 fold_unary (enum tree_code code, tree type, tree op0)
7941 enum tree_code_class kind = TREE_CODE_CLASS (code);
7943 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7944 && TREE_CODE_LENGTH (code) == 1);
7949 if (code == NOP_EXPR || code == CONVERT_EXPR
7950 || code == FLOAT_EXPR || code == ABS_EXPR)
7952 /* Don't use STRIP_NOPS, because signedness of argument type
7954 STRIP_SIGN_NOPS (arg0);
7958 /* Strip any conversions that don't change the mode. This
7959 is safe for every expression, except for a comparison
7960 expression because its signedness is derived from its
7963 Note that this is done as an internal manipulation within
7964 the constant folder, in order to find the simplest
7965 representation of the arguments so that their form can be
7966 studied. In any cases, the appropriate type conversions
7967 should be put back in the tree that will get out of the
7973 if (TREE_CODE_CLASS (code) == tcc_unary)
7975 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7976 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7977 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7978 else if (TREE_CODE (arg0) == COND_EXPR)
7980 tree arg01 = TREE_OPERAND (arg0, 1);
7981 tree arg02 = TREE_OPERAND (arg0, 2);
7982 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7983 arg01 = fold_build1 (code, type, arg01);
7984 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7985 arg02 = fold_build1 (code, type, arg02);
7986 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7989 /* If this was a conversion, and all we did was to move into
7990 inside the COND_EXPR, bring it back out. But leave it if
7991 it is a conversion from integer to integer and the
7992 result precision is no wider than a word since such a
7993 conversion is cheap and may be optimized away by combine,
7994 while it couldn't if it were outside the COND_EXPR. Then return
7995 so we don't get into an infinite recursion loop taking the
7996 conversion out and then back in. */
7998 if ((code == NOP_EXPR || code == CONVERT_EXPR
7999 || code == NON_LVALUE_EXPR)
8000 && TREE_CODE (tem) == COND_EXPR
8001 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8002 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8003 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8004 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8005 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8006 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8007 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8009 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8010 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8011 || flag_syntax_only))
8012 tem = build1 (code, type,
8014 TREE_TYPE (TREE_OPERAND
8015 (TREE_OPERAND (tem, 1), 0)),
8016 TREE_OPERAND (tem, 0),
8017 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8018 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8021 else if (COMPARISON_CLASS_P (arg0))
8023 if (TREE_CODE (type) == BOOLEAN_TYPE)
8025 arg0 = copy_node (arg0);
8026 TREE_TYPE (arg0) = type;
8029 else if (TREE_CODE (type) != INTEGER_TYPE)
8030 return fold_build3 (COND_EXPR, type, arg0,
8031 fold_build1 (code, type,
8033 fold_build1 (code, type,
8034 integer_zero_node));
8041 /* Re-association barriers around constants and other re-association
8042 barriers can be removed. */
8043 if (CONSTANT_CLASS_P (op0)
8044 || TREE_CODE (op0) == PAREN_EXPR)
8045 return fold_convert (type, op0);
8051 case FIX_TRUNC_EXPR:
8052 if (TREE_TYPE (op0) == type)
8055 /* If we have (type) (a CMP b) and type is an integral type, return
8056 new expression involving the new type. */
8057 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8058 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8059 TREE_OPERAND (op0, 1));
8061 /* Handle cases of two conversions in a row. */
8062 if (TREE_CODE (op0) == NOP_EXPR
8063 || TREE_CODE (op0) == CONVERT_EXPR)
8065 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8066 tree inter_type = TREE_TYPE (op0);
8067 int inside_int = INTEGRAL_TYPE_P (inside_type);
8068 int inside_ptr = POINTER_TYPE_P (inside_type);
8069 int inside_float = FLOAT_TYPE_P (inside_type);
8070 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8071 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8072 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8073 int inter_int = INTEGRAL_TYPE_P (inter_type);
8074 int inter_ptr = POINTER_TYPE_P (inter_type);
8075 int inter_float = FLOAT_TYPE_P (inter_type);
8076 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8077 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8078 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8079 int final_int = INTEGRAL_TYPE_P (type);
8080 int final_ptr = POINTER_TYPE_P (type);
8081 int final_float = FLOAT_TYPE_P (type);
8082 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8083 unsigned int final_prec = TYPE_PRECISION (type);
8084 int final_unsignedp = TYPE_UNSIGNED (type);
8086 /* In addition to the cases of two conversions in a row
8087 handled below, if we are converting something to its own
8088 type via an object of identical or wider precision, neither
8089 conversion is needed. */
8090 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8091 && (((inter_int || inter_ptr) && final_int)
8092 || (inter_float && final_float))
8093 && inter_prec >= final_prec)
8094 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8096 /* Likewise, if the intermediate and final types are either both
8097 float or both integer, we don't need the middle conversion if
8098 it is wider than the final type and doesn't change the signedness
8099 (for integers). Avoid this if the final type is a pointer
8100 since then we sometimes need the inner conversion. Likewise if
8101 the outer has a precision not equal to the size of its mode. */
8102 if (((inter_int && inside_int)
8103 || (inter_float && inside_float)
8104 || (inter_vec && inside_vec))
8105 && inter_prec >= inside_prec
8106 && (inter_float || inter_vec
8107 || inter_unsignedp == inside_unsignedp)
8108 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8109 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8111 && (! final_vec || inter_prec == inside_prec))
8112 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8114 /* If we have a sign-extension of a zero-extended value, we can
8115 replace that by a single zero-extension. */
8116 if (inside_int && inter_int && final_int
8117 && inside_prec < inter_prec && inter_prec < final_prec
8118 && inside_unsignedp && !inter_unsignedp)
8119 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8121 /* Two conversions in a row are not needed unless:
8122 - some conversion is floating-point (overstrict for now), or
8123 - some conversion is a vector (overstrict for now), or
8124 - the intermediate type is narrower than both initial and
8126 - the intermediate type and innermost type differ in signedness,
8127 and the outermost type is wider than the intermediate, or
8128 - the initial type is a pointer type and the precisions of the
8129 intermediate and final types differ, or
8130 - the final type is a pointer type and the precisions of the
8131 initial and intermediate types differ.
8132 - the initial type is a pointer to an array and the final type
8134 if (! inside_float && ! inter_float && ! final_float
8135 && ! inside_vec && ! inter_vec && ! final_vec
8136 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8137 && ! (inside_int && inter_int
8138 && inter_unsignedp != inside_unsignedp
8139 && inter_prec < final_prec)
8140 && ((inter_unsignedp && inter_prec > inside_prec)
8141 == (final_unsignedp && final_prec > inter_prec))
8142 && ! (inside_ptr && inter_prec != final_prec)
8143 && ! (final_ptr && inside_prec != inter_prec)
8144 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8145 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8146 && ! (inside_ptr && final_ptr
8147 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
8148 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
8149 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8152 /* Handle (T *)&A.B.C for A being of type T and B and C
8153 living at offset zero. This occurs frequently in
8154 C++ upcasting and then accessing the base. */
8155 if (TREE_CODE (op0) == ADDR_EXPR
8156 && POINTER_TYPE_P (type)
8157 && handled_component_p (TREE_OPERAND (op0, 0)))
8159 HOST_WIDE_INT bitsize, bitpos;
8161 enum machine_mode mode;
8162 int unsignedp, volatilep;
8163 tree base = TREE_OPERAND (op0, 0);
8164 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8165 &mode, &unsignedp, &volatilep, false);
8166 /* If the reference was to a (constant) zero offset, we can use
8167 the address of the base if it has the same base type
8168 as the result type. */
8169 if (! offset && bitpos == 0
8170 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8171 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8172 return fold_convert (type, fold_addr_expr (base));
8175 if ((TREE_CODE (op0) == MODIFY_EXPR
8176 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
8177 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
8178 /* Detect assigning a bitfield. */
8179 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
8181 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
8183 /* Don't leave an assignment inside a conversion
8184 unless assigning a bitfield. */
8185 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
8186 /* First do the assignment, then return converted constant. */
8187 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8188 TREE_NO_WARNING (tem) = 1;
8189 TREE_USED (tem) = 1;
8193 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8194 constants (if x has signed type, the sign bit cannot be set
8195 in c). This folds extension into the BIT_AND_EXPR. */
8196 if (INTEGRAL_TYPE_P (type)
8197 && TREE_CODE (type) != BOOLEAN_TYPE
8198 && TREE_CODE (op0) == BIT_AND_EXPR
8199 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8202 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8205 if (TYPE_UNSIGNED (TREE_TYPE (and))
8206 || (TYPE_PRECISION (type)
8207 <= TYPE_PRECISION (TREE_TYPE (and))))
8209 else if (TYPE_PRECISION (TREE_TYPE (and1))
8210 <= HOST_BITS_PER_WIDE_INT
8211 && host_integerp (and1, 1))
8213 unsigned HOST_WIDE_INT cst;
8215 cst = tree_low_cst (and1, 1);
8216 cst &= (HOST_WIDE_INT) -1
8217 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8218 change = (cst == 0);
8219 #ifdef LOAD_EXTEND_OP
8221 && !flag_syntax_only
8222 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8225 tree uns = unsigned_type_for (TREE_TYPE (and0));
8226 and0 = fold_convert (uns, and0);
8227 and1 = fold_convert (uns, and1);
8233 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8234 TREE_INT_CST_HIGH (and1), 0,
8235 TREE_OVERFLOW (and1));
8236 return fold_build2 (BIT_AND_EXPR, type,
8237 fold_convert (type, and0), tem);
8241 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8242 when one of the new casts will fold away. Conservatively we assume
8243 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8244 if (POINTER_TYPE_P (type)
8245 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8246 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8247 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8248 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8250 tree arg00 = TREE_OPERAND (arg0, 0);
8251 tree arg01 = TREE_OPERAND (arg0, 1);
8253 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8254 fold_convert (sizetype, arg01));
8257 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8258 of the same precision, and X is an integer type not narrower than
8259 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8260 if (INTEGRAL_TYPE_P (type)
8261 && TREE_CODE (op0) == BIT_NOT_EXPR
8262 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8263 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
8264 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
8265 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8267 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8268 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8269 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8270 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8273 tem = fold_convert_const (code, type, op0);
8274 return tem ? tem : NULL_TREE;
8276 case FIXED_CONVERT_EXPR:
8277 tem = fold_convert_const (code, type, arg0);
8278 return tem ? tem : NULL_TREE;
8280 case VIEW_CONVERT_EXPR:
8281 if (TREE_TYPE (op0) == type)
8283 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8284 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8286 /* For integral conversions with the same precision or pointer
8287 conversions use a NOP_EXPR instead. */
8288 if ((INTEGRAL_TYPE_P (type) && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8289 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8290 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8291 a sub-type to its base type as generated by the Ada FE. */
8292 && !TREE_TYPE (TREE_TYPE (op0)))
8293 || (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (op0))))
8294 return fold_convert (type, op0);
8296 /* Strip inner integral conversions that do not change the precision. */
8297 if ((TREE_CODE (op0) == NOP_EXPR
8298 || TREE_CODE (op0) == CONVERT_EXPR)
8299 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8300 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8301 && (TYPE_PRECISION (TREE_TYPE (op0))
8302 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8303 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8305 return fold_view_convert_expr (type, op0);
8308 tem = fold_negate_expr (arg0);
8310 return fold_convert (type, tem);
8314 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8315 return fold_abs_const (arg0, type);
8316 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8317 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8318 /* Convert fabs((double)float) into (double)fabsf(float). */
8319 else if (TREE_CODE (arg0) == NOP_EXPR
8320 && TREE_CODE (type) == REAL_TYPE)
8322 tree targ0 = strip_float_extensions (arg0);
8324 return fold_convert (type, fold_build1 (ABS_EXPR,
8328 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8329 else if (TREE_CODE (arg0) == ABS_EXPR)
8331 else if (tree_expr_nonnegative_p (arg0))
8334 /* Strip sign ops from argument. */
8335 if (TREE_CODE (type) == REAL_TYPE)
8337 tem = fold_strip_sign_ops (arg0);
8339 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8344 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8345 return fold_convert (type, arg0);
8346 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8348 tree itype = TREE_TYPE (type);
8349 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8350 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8351 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8353 if (TREE_CODE (arg0) == COMPLEX_CST)
8355 tree itype = TREE_TYPE (type);
8356 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8357 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8358 return build_complex (type, rpart, negate_expr (ipart));
8360 if (TREE_CODE (arg0) == CONJ_EXPR)
8361 return fold_convert (type, TREE_OPERAND (arg0, 0));
8365 if (TREE_CODE (arg0) == INTEGER_CST)
8366 return fold_not_const (arg0, type);
8367 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8368 return fold_convert (type, TREE_OPERAND (arg0, 0));
8369 /* Convert ~ (-A) to A - 1. */
8370 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8371 return fold_build2 (MINUS_EXPR, type,
8372 fold_convert (type, TREE_OPERAND (arg0, 0)),
8373 build_int_cst (type, 1));
8374 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8375 else if (INTEGRAL_TYPE_P (type)
8376 && ((TREE_CODE (arg0) == MINUS_EXPR
8377 && integer_onep (TREE_OPERAND (arg0, 1)))
8378 || (TREE_CODE (arg0) == PLUS_EXPR
8379 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8380 return fold_build1 (NEGATE_EXPR, type,
8381 fold_convert (type, TREE_OPERAND (arg0, 0)));
8382 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8383 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8384 && (tem = fold_unary (BIT_NOT_EXPR, type,
8386 TREE_OPERAND (arg0, 0)))))
8387 return fold_build2 (BIT_XOR_EXPR, type, tem,
8388 fold_convert (type, TREE_OPERAND (arg0, 1)));
8389 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8390 && (tem = fold_unary (BIT_NOT_EXPR, type,
8392 TREE_OPERAND (arg0, 1)))))
8393 return fold_build2 (BIT_XOR_EXPR, type,
8394 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8395 /* Perform BIT_NOT_EXPR on each element individually. */
8396 else if (TREE_CODE (arg0) == VECTOR_CST)
8398 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8399 int count = TYPE_VECTOR_SUBPARTS (type), i;
8401 for (i = 0; i < count; i++)
8405 elem = TREE_VALUE (elements);
8406 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8407 if (elem == NULL_TREE)
8409 elements = TREE_CHAIN (elements);
8412 elem = build_int_cst (TREE_TYPE (type), -1);
8413 list = tree_cons (NULL_TREE, elem, list);
8416 return build_vector (type, nreverse (list));
8421 case TRUTH_NOT_EXPR:
8422 /* The argument to invert_truthvalue must have Boolean type. */
8423 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8424 arg0 = fold_convert (boolean_type_node, arg0);
8426 /* Note that the operand of this must be an int
8427 and its values must be 0 or 1.
8428 ("true" is a fixed value perhaps depending on the language,
8429 but we don't handle values other than 1 correctly yet.) */
8430 tem = fold_truth_not_expr (arg0);
8433 return fold_convert (type, tem);
8436 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8437 return fold_convert (type, arg0);
8438 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8439 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8440 TREE_OPERAND (arg0, 1));
8441 if (TREE_CODE (arg0) == COMPLEX_CST)
8442 return fold_convert (type, TREE_REALPART (arg0));
8443 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8445 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8446 tem = fold_build2 (TREE_CODE (arg0), itype,
8447 fold_build1 (REALPART_EXPR, itype,
8448 TREE_OPERAND (arg0, 0)),
8449 fold_build1 (REALPART_EXPR, itype,
8450 TREE_OPERAND (arg0, 1)));
8451 return fold_convert (type, tem);
8453 if (TREE_CODE (arg0) == CONJ_EXPR)
8455 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8456 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8457 return fold_convert (type, tem);
8459 if (TREE_CODE (arg0) == CALL_EXPR)
8461 tree fn = get_callee_fndecl (arg0);
8462 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8463 switch (DECL_FUNCTION_CODE (fn))
8465 CASE_FLT_FN (BUILT_IN_CEXPI):
8466 fn = mathfn_built_in (type, BUILT_IN_COS);
8468 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8478 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8479 return fold_convert (type, integer_zero_node);
8480 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8481 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8482 TREE_OPERAND (arg0, 0));
8483 if (TREE_CODE (arg0) == COMPLEX_CST)
8484 return fold_convert (type, TREE_IMAGPART (arg0));
8485 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8487 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8488 tem = fold_build2 (TREE_CODE (arg0), itype,
8489 fold_build1 (IMAGPART_EXPR, itype,
8490 TREE_OPERAND (arg0, 0)),
8491 fold_build1 (IMAGPART_EXPR, itype,
8492 TREE_OPERAND (arg0, 1)));
8493 return fold_convert (type, tem);
8495 if (TREE_CODE (arg0) == CONJ_EXPR)
8497 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8498 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8499 return fold_convert (type, negate_expr (tem));
8501 if (TREE_CODE (arg0) == CALL_EXPR)
8503 tree fn = get_callee_fndecl (arg0);
8504 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8505 switch (DECL_FUNCTION_CODE (fn))
8507 CASE_FLT_FN (BUILT_IN_CEXPI):
8508 fn = mathfn_built_in (type, BUILT_IN_SIN);
8510 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8521 } /* switch (code) */
8524 /* Fold a binary expression of code CODE and type TYPE with operands
8525 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8526 Return the folded expression if folding is successful. Otherwise,
8527 return NULL_TREE. */
8530 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8532 enum tree_code compl_code;
8534 if (code == MIN_EXPR)
8535 compl_code = MAX_EXPR;
8536 else if (code == MAX_EXPR)
8537 compl_code = MIN_EXPR;
8541 /* MIN (MAX (a, b), b) == b. */
8542 if (TREE_CODE (op0) == compl_code
8543 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8544 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8546 /* MIN (MAX (b, a), b) == b. */
8547 if (TREE_CODE (op0) == compl_code
8548 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8549 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8550 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8552 /* MIN (a, MAX (a, b)) == a. */
8553 if (TREE_CODE (op1) == compl_code
8554 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8555 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8556 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8558 /* MIN (a, MAX (b, a)) == a. */
8559 if (TREE_CODE (op1) == compl_code
8560 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8561 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8562 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8567 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8568 by changing CODE to reduce the magnitude of constants involved in
8569 ARG0 of the comparison.
8570 Returns a canonicalized comparison tree if a simplification was
8571 possible, otherwise returns NULL_TREE.
8572 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8573 valid if signed overflow is undefined. */
8576 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8577 tree arg0, tree arg1,
8578 bool *strict_overflow_p)
8580 enum tree_code code0 = TREE_CODE (arg0);
8581 tree t, cst0 = NULL_TREE;
8585 /* Match A +- CST code arg1 and CST code arg1. */
8586 if (!(((code0 == MINUS_EXPR
8587 || code0 == PLUS_EXPR)
8588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8589 || code0 == INTEGER_CST))
8592 /* Identify the constant in arg0 and its sign. */
8593 if (code0 == INTEGER_CST)
8596 cst0 = TREE_OPERAND (arg0, 1);
8597 sgn0 = tree_int_cst_sgn (cst0);
8599 /* Overflowed constants and zero will cause problems. */
8600 if (integer_zerop (cst0)
8601 || TREE_OVERFLOW (cst0))
8604 /* See if we can reduce the magnitude of the constant in
8605 arg0 by changing the comparison code. */
8606 if (code0 == INTEGER_CST)
8608 /* CST <= arg1 -> CST-1 < arg1. */
8609 if (code == LE_EXPR && sgn0 == 1)
8611 /* -CST < arg1 -> -CST-1 <= arg1. */
8612 else if (code == LT_EXPR && sgn0 == -1)
8614 /* CST > arg1 -> CST-1 >= arg1. */
8615 else if (code == GT_EXPR && sgn0 == 1)
8617 /* -CST >= arg1 -> -CST-1 > arg1. */
8618 else if (code == GE_EXPR && sgn0 == -1)
8622 /* arg1 code' CST' might be more canonical. */
8627 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8629 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8631 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8632 else if (code == GT_EXPR
8633 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8635 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8636 else if (code == LE_EXPR
8637 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8639 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8640 else if (code == GE_EXPR
8641 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8645 *strict_overflow_p = true;
8648 /* Now build the constant reduced in magnitude. */
8649 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8650 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8651 if (code0 != INTEGER_CST)
8652 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8654 /* If swapping might yield to a more canonical form, do so. */
8656 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8658 return fold_build2 (code, type, t, arg1);
8661 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8662 overflow further. Try to decrease the magnitude of constants involved
8663 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8664 and put sole constants at the second argument position.
8665 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8668 maybe_canonicalize_comparison (enum tree_code code, tree type,
8669 tree arg0, tree arg1)
8672 bool strict_overflow_p;
8673 const char * const warnmsg = G_("assuming signed overflow does not occur "
8674 "when reducing constant in comparison");
8676 /* In principle pointers also have undefined overflow behavior,
8677 but that causes problems elsewhere. */
8678 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8679 || POINTER_TYPE_P (TREE_TYPE (arg0)))
8682 /* Try canonicalization by simplifying arg0. */
8683 strict_overflow_p = false;
8684 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8685 &strict_overflow_p);
8688 if (strict_overflow_p)
8689 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8693 /* Try canonicalization by simplifying arg1 using the swapped
8695 code = swap_tree_comparison (code);
8696 strict_overflow_p = false;
8697 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8698 &strict_overflow_p);
8699 if (t && strict_overflow_p)
8700 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8704 /* Subroutine of fold_binary. This routine performs all of the
8705 transformations that are common to the equality/inequality
8706 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8707 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8708 fold_binary should call fold_binary. Fold a comparison with
8709 tree code CODE and type TYPE with operands OP0 and OP1. Return
8710 the folded comparison or NULL_TREE. */
8713 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8715 tree arg0, arg1, tem;
8720 STRIP_SIGN_NOPS (arg0);
8721 STRIP_SIGN_NOPS (arg1);
8723 tem = fold_relational_const (code, type, arg0, arg1);
8724 if (tem != NULL_TREE)
8727 /* If one arg is a real or integer constant, put it last. */
8728 if (tree_swap_operands_p (arg0, arg1, true))
8729 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8731 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8732 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8733 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8734 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8735 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8736 && (TREE_CODE (arg1) == INTEGER_CST
8737 && !TREE_OVERFLOW (arg1)))
8739 tree const1 = TREE_OPERAND (arg0, 1);
8741 tree variable = TREE_OPERAND (arg0, 0);
8744 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8746 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8747 TREE_TYPE (arg1), const2, const1);
8749 /* If the constant operation overflowed this can be
8750 simplified as a comparison against INT_MAX/INT_MIN. */
8751 if (TREE_CODE (lhs) == INTEGER_CST
8752 && TREE_OVERFLOW (lhs))
8754 int const1_sgn = tree_int_cst_sgn (const1);
8755 enum tree_code code2 = code;
8757 /* Get the sign of the constant on the lhs if the
8758 operation were VARIABLE + CONST1. */
8759 if (TREE_CODE (arg0) == MINUS_EXPR)
8760 const1_sgn = -const1_sgn;
8762 /* The sign of the constant determines if we overflowed
8763 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8764 Canonicalize to the INT_MIN overflow by swapping the comparison
8766 if (const1_sgn == -1)
8767 code2 = swap_tree_comparison (code);
8769 /* We now can look at the canonicalized case
8770 VARIABLE + 1 CODE2 INT_MIN
8771 and decide on the result. */
8772 if (code2 == LT_EXPR
8774 || code2 == EQ_EXPR)
8775 return omit_one_operand (type, boolean_false_node, variable);
8776 else if (code2 == NE_EXPR
8778 || code2 == GT_EXPR)
8779 return omit_one_operand (type, boolean_true_node, variable);
8782 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8783 && (TREE_CODE (lhs) != INTEGER_CST
8784 || !TREE_OVERFLOW (lhs)))
8786 fold_overflow_warning (("assuming signed overflow does not occur "
8787 "when changing X +- C1 cmp C2 to "
8789 WARN_STRICT_OVERFLOW_COMPARISON);
8790 return fold_build2 (code, type, variable, lhs);
8794 /* For comparisons of pointers we can decompose it to a compile time
8795 comparison of the base objects and the offsets into the object.
8796 This requires at least one operand being an ADDR_EXPR or a
8797 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8798 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8799 && (TREE_CODE (arg0) == ADDR_EXPR
8800 || TREE_CODE (arg1) == ADDR_EXPR
8801 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8802 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8804 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8805 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8806 enum machine_mode mode;
8807 int volatilep, unsignedp;
8808 bool indirect_base0 = false;
8810 /* Get base and offset for the access. Strip ADDR_EXPR for
8811 get_inner_reference, but put it back by stripping INDIRECT_REF
8812 off the base object if possible. */
8814 if (TREE_CODE (arg0) == ADDR_EXPR)
8816 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8817 &bitsize, &bitpos0, &offset0, &mode,
8818 &unsignedp, &volatilep, false);
8819 if (TREE_CODE (base0) == INDIRECT_REF)
8820 base0 = TREE_OPERAND (base0, 0);
8822 indirect_base0 = true;
8824 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8826 base0 = TREE_OPERAND (arg0, 0);
8827 offset0 = TREE_OPERAND (arg0, 1);
8831 if (TREE_CODE (arg1) == ADDR_EXPR)
8833 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8834 &bitsize, &bitpos1, &offset1, &mode,
8835 &unsignedp, &volatilep, false);
8836 /* We have to make sure to have an indirect/non-indirect base1
8837 just the same as we did for base0. */
8838 if (TREE_CODE (base1) == INDIRECT_REF
8840 base1 = TREE_OPERAND (base1, 0);
8841 else if (!indirect_base0)
8844 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8846 base1 = TREE_OPERAND (arg1, 0);
8847 offset1 = TREE_OPERAND (arg1, 1);
8849 else if (indirect_base0)
8852 /* If we have equivalent bases we might be able to simplify. */
8854 && operand_equal_p (base0, base1, 0))
8856 /* We can fold this expression to a constant if the non-constant
8857 offset parts are equal. */
8858 if (offset0 == offset1
8859 || (offset0 && offset1
8860 && operand_equal_p (offset0, offset1, 0)))
8865 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8867 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8869 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8871 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8873 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8875 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8879 /* We can simplify the comparison to a comparison of the variable
8880 offset parts if the constant offset parts are equal.
8881 Be careful to use signed size type here because otherwise we
8882 mess with array offsets in the wrong way. This is possible
8883 because pointer arithmetic is restricted to retain within an
8884 object and overflow on pointer differences is undefined as of
8885 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8886 else if (bitpos0 == bitpos1)
8888 tree signed_size_type_node;
8889 signed_size_type_node = signed_type_for (size_type_node);
8891 /* By converting to signed size type we cover middle-end pointer
8892 arithmetic which operates on unsigned pointer types of size
8893 type size and ARRAY_REF offsets which are properly sign or
8894 zero extended from their type in case it is narrower than
8896 if (offset0 == NULL_TREE)
8897 offset0 = build_int_cst (signed_size_type_node, 0);
8899 offset0 = fold_convert (signed_size_type_node, offset0);
8900 if (offset1 == NULL_TREE)
8901 offset1 = build_int_cst (signed_size_type_node, 0);
8903 offset1 = fold_convert (signed_size_type_node, offset1);
8905 return fold_build2 (code, type, offset0, offset1);
8910 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8911 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8912 the resulting offset is smaller in absolute value than the
8914 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8915 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8916 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8917 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8918 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8919 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8920 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8922 tree const1 = TREE_OPERAND (arg0, 1);
8923 tree const2 = TREE_OPERAND (arg1, 1);
8924 tree variable1 = TREE_OPERAND (arg0, 0);
8925 tree variable2 = TREE_OPERAND (arg1, 0);
8927 const char * const warnmsg = G_("assuming signed overflow does not "
8928 "occur when combining constants around "
8931 /* Put the constant on the side where it doesn't overflow and is
8932 of lower absolute value than before. */
8933 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8934 ? MINUS_EXPR : PLUS_EXPR,
8936 if (!TREE_OVERFLOW (cst)
8937 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8939 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8940 return fold_build2 (code, type,
8942 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8946 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8947 ? MINUS_EXPR : PLUS_EXPR,
8949 if (!TREE_OVERFLOW (cst)
8950 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8952 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8953 return fold_build2 (code, type,
8954 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8960 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8961 signed arithmetic case. That form is created by the compiler
8962 often enough for folding it to be of value. One example is in
8963 computing loop trip counts after Operator Strength Reduction. */
8964 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8965 && TREE_CODE (arg0) == MULT_EXPR
8966 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8967 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8968 && integer_zerop (arg1))
8970 tree const1 = TREE_OPERAND (arg0, 1);
8971 tree const2 = arg1; /* zero */
8972 tree variable1 = TREE_OPERAND (arg0, 0);
8973 enum tree_code cmp_code = code;
8975 gcc_assert (!integer_zerop (const1));
8977 fold_overflow_warning (("assuming signed overflow does not occur when "
8978 "eliminating multiplication in comparison "
8980 WARN_STRICT_OVERFLOW_COMPARISON);
8982 /* If const1 is negative we swap the sense of the comparison. */
8983 if (tree_int_cst_sgn (const1) < 0)
8984 cmp_code = swap_tree_comparison (cmp_code);
8986 return fold_build2 (cmp_code, type, variable1, const2);
8989 tem = maybe_canonicalize_comparison (code, type, op0, op1);
8993 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8995 tree targ0 = strip_float_extensions (arg0);
8996 tree targ1 = strip_float_extensions (arg1);
8997 tree newtype = TREE_TYPE (targ0);
8999 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9000 newtype = TREE_TYPE (targ1);
9002 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9003 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9004 return fold_build2 (code, type, fold_convert (newtype, targ0),
9005 fold_convert (newtype, targ1));
9007 /* (-a) CMP (-b) -> b CMP a */
9008 if (TREE_CODE (arg0) == NEGATE_EXPR
9009 && TREE_CODE (arg1) == NEGATE_EXPR)
9010 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9011 TREE_OPERAND (arg0, 0));
9013 if (TREE_CODE (arg1) == REAL_CST)
9015 REAL_VALUE_TYPE cst;
9016 cst = TREE_REAL_CST (arg1);
9018 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9019 if (TREE_CODE (arg0) == NEGATE_EXPR)
9020 return fold_build2 (swap_tree_comparison (code), type,
9021 TREE_OPERAND (arg0, 0),
9022 build_real (TREE_TYPE (arg1),
9023 REAL_VALUE_NEGATE (cst)));
9025 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9026 /* a CMP (-0) -> a CMP 0 */
9027 if (REAL_VALUE_MINUS_ZERO (cst))
9028 return fold_build2 (code, type, arg0,
9029 build_real (TREE_TYPE (arg1), dconst0));
9031 /* x != NaN is always true, other ops are always false. */
9032 if (REAL_VALUE_ISNAN (cst)
9033 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9035 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9036 return omit_one_operand (type, tem, arg0);
9039 /* Fold comparisons against infinity. */
9040 if (REAL_VALUE_ISINF (cst))
9042 tem = fold_inf_compare (code, type, arg0, arg1);
9043 if (tem != NULL_TREE)
9048 /* If this is a comparison of a real constant with a PLUS_EXPR
9049 or a MINUS_EXPR of a real constant, we can convert it into a
9050 comparison with a revised real constant as long as no overflow
9051 occurs when unsafe_math_optimizations are enabled. */
9052 if (flag_unsafe_math_optimizations
9053 && TREE_CODE (arg1) == REAL_CST
9054 && (TREE_CODE (arg0) == PLUS_EXPR
9055 || TREE_CODE (arg0) == MINUS_EXPR)
9056 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9057 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9058 ? MINUS_EXPR : PLUS_EXPR,
9059 arg1, TREE_OPERAND (arg0, 1), 0))
9060 && !TREE_OVERFLOW (tem))
9061 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9063 /* Likewise, we can simplify a comparison of a real constant with
9064 a MINUS_EXPR whose first operand is also a real constant, i.e.
9065 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9066 floating-point types only if -fassociative-math is set. */
9067 if (flag_associative_math
9068 && TREE_CODE (arg1) == REAL_CST
9069 && TREE_CODE (arg0) == MINUS_EXPR
9070 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9071 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9073 && !TREE_OVERFLOW (tem))
9074 return fold_build2 (swap_tree_comparison (code), type,
9075 TREE_OPERAND (arg0, 1), tem);
9077 /* Fold comparisons against built-in math functions. */
9078 if (TREE_CODE (arg1) == REAL_CST
9079 && flag_unsafe_math_optimizations
9080 && ! flag_errno_math)
9082 enum built_in_function fcode = builtin_mathfn_code (arg0);
9084 if (fcode != END_BUILTINS)
9086 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9087 if (tem != NULL_TREE)
9093 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9094 && (TREE_CODE (arg0) == NOP_EXPR
9095 || TREE_CODE (arg0) == CONVERT_EXPR))
9097 /* If we are widening one operand of an integer comparison,
9098 see if the other operand is similarly being widened. Perhaps we
9099 can do the comparison in the narrower type. */
9100 tem = fold_widened_comparison (code, type, arg0, arg1);
9104 /* Or if we are changing signedness. */
9105 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9110 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9111 constant, we can simplify it. */
9112 if (TREE_CODE (arg1) == INTEGER_CST
9113 && (TREE_CODE (arg0) == MIN_EXPR
9114 || TREE_CODE (arg0) == MAX_EXPR)
9115 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9117 tem = optimize_minmax_comparison (code, type, op0, op1);
9122 /* Simplify comparison of something with itself. (For IEEE
9123 floating-point, we can only do some of these simplifications.) */
9124 if (operand_equal_p (arg0, arg1, 0))
9129 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9130 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9131 return constant_boolean_node (1, type);
9136 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9137 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9138 return constant_boolean_node (1, type);
9139 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9142 /* For NE, we can only do this simplification if integer
9143 or we don't honor IEEE floating point NaNs. */
9144 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9145 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9147 /* ... fall through ... */
9150 return constant_boolean_node (0, type);
9156 /* If we are comparing an expression that just has comparisons
9157 of two integer values, arithmetic expressions of those comparisons,
9158 and constants, we can simplify it. There are only three cases
9159 to check: the two values can either be equal, the first can be
9160 greater, or the second can be greater. Fold the expression for
9161 those three values. Since each value must be 0 or 1, we have
9162 eight possibilities, each of which corresponds to the constant 0
9163 or 1 or one of the six possible comparisons.
9165 This handles common cases like (a > b) == 0 but also handles
9166 expressions like ((x > y) - (y > x)) > 0, which supposedly
9167 occur in macroized code. */
9169 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9171 tree cval1 = 0, cval2 = 0;
9174 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9175 /* Don't handle degenerate cases here; they should already
9176 have been handled anyway. */
9177 && cval1 != 0 && cval2 != 0
9178 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9179 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9180 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9181 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9182 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9183 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9184 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9186 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9187 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9189 /* We can't just pass T to eval_subst in case cval1 or cval2
9190 was the same as ARG1. */
9193 = fold_build2 (code, type,
9194 eval_subst (arg0, cval1, maxval,
9198 = fold_build2 (code, type,
9199 eval_subst (arg0, cval1, maxval,
9203 = fold_build2 (code, type,
9204 eval_subst (arg0, cval1, minval,
9208 /* All three of these results should be 0 or 1. Confirm they are.
9209 Then use those values to select the proper code to use. */
9211 if (TREE_CODE (high_result) == INTEGER_CST
9212 && TREE_CODE (equal_result) == INTEGER_CST
9213 && TREE_CODE (low_result) == INTEGER_CST)
9215 /* Make a 3-bit mask with the high-order bit being the
9216 value for `>', the next for '=', and the low for '<'. */
9217 switch ((integer_onep (high_result) * 4)
9218 + (integer_onep (equal_result) * 2)
9219 + integer_onep (low_result))
9223 return omit_one_operand (type, integer_zero_node, arg0);
9244 return omit_one_operand (type, integer_one_node, arg0);
9248 return save_expr (build2 (code, type, cval1, cval2));
9249 return fold_build2 (code, type, cval1, cval2);
9254 /* Fold a comparison of the address of COMPONENT_REFs with the same
9255 type and component to a comparison of the address of the base
9256 object. In short, &x->a OP &y->a to x OP y and
9257 &x->a OP &y.a to x OP &y */
9258 if (TREE_CODE (arg0) == ADDR_EXPR
9259 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9260 && TREE_CODE (arg1) == ADDR_EXPR
9261 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9263 tree cref0 = TREE_OPERAND (arg0, 0);
9264 tree cref1 = TREE_OPERAND (arg1, 0);
9265 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9267 tree op0 = TREE_OPERAND (cref0, 0);
9268 tree op1 = TREE_OPERAND (cref1, 0);
9269 return fold_build2 (code, type,
9270 fold_addr_expr (op0),
9271 fold_addr_expr (op1));
9275 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9276 into a single range test. */
9277 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9278 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9279 && TREE_CODE (arg1) == INTEGER_CST
9280 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9281 && !integer_zerop (TREE_OPERAND (arg0, 1))
9282 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9283 && !TREE_OVERFLOW (arg1))
9285 tem = fold_div_compare (code, type, arg0, arg1);
9286 if (tem != NULL_TREE)
9290 /* Fold ~X op ~Y as Y op X. */
9291 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9292 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9294 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9295 return fold_build2 (code, type,
9296 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9297 TREE_OPERAND (arg0, 0));
9300 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9301 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9302 && TREE_CODE (arg1) == INTEGER_CST)
9304 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9305 return fold_build2 (swap_tree_comparison (code), type,
9306 TREE_OPERAND (arg0, 0),
9307 fold_build1 (BIT_NOT_EXPR, cmp_type,
9308 fold_convert (cmp_type, arg1)));
9315 /* Subroutine of fold_binary. Optimize complex multiplications of the
9316 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9317 argument EXPR represents the expression "z" of type TYPE. */
9320 fold_mult_zconjz (tree type, tree expr)
9322 tree itype = TREE_TYPE (type);
9323 tree rpart, ipart, tem;
9325 if (TREE_CODE (expr) == COMPLEX_EXPR)
9327 rpart = TREE_OPERAND (expr, 0);
9328 ipart = TREE_OPERAND (expr, 1);
9330 else if (TREE_CODE (expr) == COMPLEX_CST)
9332 rpart = TREE_REALPART (expr);
9333 ipart = TREE_IMAGPART (expr);
9337 expr = save_expr (expr);
9338 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9339 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9342 rpart = save_expr (rpart);
9343 ipart = save_expr (ipart);
9344 tem = fold_build2 (PLUS_EXPR, itype,
9345 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9346 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9347 return fold_build2 (COMPLEX_EXPR, type, tem,
9348 fold_convert (itype, integer_zero_node));
9352 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9353 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9354 guarantees that P and N have the same least significant log2(M) bits.
9355 N is not otherwise constrained. In particular, N is not normalized to
9356 0 <= N < M as is common. In general, the precise value of P is unknown.
9357 M is chosen as large as possible such that constant N can be determined.
9359 Returns M and sets *RESIDUE to N. */
9361 static unsigned HOST_WIDE_INT
9362 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue)
9364 enum tree_code code;
9368 code = TREE_CODE (expr);
9369 if (code == ADDR_EXPR)
9371 expr = TREE_OPERAND (expr, 0);
9372 if (handled_component_p (expr))
9374 HOST_WIDE_INT bitsize, bitpos;
9376 enum machine_mode mode;
9377 int unsignedp, volatilep;
9379 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9380 &mode, &unsignedp, &volatilep, false);
9381 *residue = bitpos / BITS_PER_UNIT;
9384 if (TREE_CODE (offset) == INTEGER_CST)
9385 *residue += TREE_INT_CST_LOW (offset);
9387 /* We don't handle more complicated offset expressions. */
9393 return DECL_ALIGN_UNIT (expr);
9395 else if (code == POINTER_PLUS_EXPR)
9398 unsigned HOST_WIDE_INT modulus;
9399 enum tree_code inner_code;
9401 op0 = TREE_OPERAND (expr, 0);
9403 modulus = get_pointer_modulus_and_residue (op0, residue);
9405 op1 = TREE_OPERAND (expr, 1);
9407 inner_code = TREE_CODE (op1);
9408 if (inner_code == INTEGER_CST)
9410 *residue += TREE_INT_CST_LOW (op1);
9413 else if (inner_code == MULT_EXPR)
9415 op1 = TREE_OPERAND (op1, 1);
9416 if (TREE_CODE (op1) == INTEGER_CST)
9418 unsigned HOST_WIDE_INT align;
9420 /* Compute the greatest power-of-2 divisor of op1. */
9421 align = TREE_INT_CST_LOW (op1);
9424 /* If align is non-zero and less than *modulus, replace
9425 *modulus with align., If align is 0, then either op1 is 0
9426 or the greatest power-of-2 divisor of op1 doesn't fit in an
9427 unsigned HOST_WIDE_INT. In either case, no additional
9428 constraint is imposed. */
9430 modulus = MIN (modulus, align);
9437 /* If we get here, we were unable to determine anything useful about the
9443 /* Fold a binary expression of code CODE and type TYPE with operands
9444 OP0 and OP1. Return the folded expression if folding is
9445 successful. Otherwise, return NULL_TREE. */
9448 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9450 enum tree_code_class kind = TREE_CODE_CLASS (code);
9451 tree arg0, arg1, tem;
9452 tree t1 = NULL_TREE;
9453 bool strict_overflow_p;
9455 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
9456 || IS_GIMPLE_STMT_CODE_CLASS (kind))
9457 && TREE_CODE_LENGTH (code) == 2
9459 && op1 != NULL_TREE);
9464 /* Strip any conversions that don't change the mode. This is
9465 safe for every expression, except for a comparison expression
9466 because its signedness is derived from its operands. So, in
9467 the latter case, only strip conversions that don't change the
9470 Note that this is done as an internal manipulation within the
9471 constant folder, in order to find the simplest representation
9472 of the arguments so that their form can be studied. In any
9473 cases, the appropriate type conversions should be put back in
9474 the tree that will get out of the constant folder. */
9476 if (kind == tcc_comparison)
9478 STRIP_SIGN_NOPS (arg0);
9479 STRIP_SIGN_NOPS (arg1);
9487 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9488 constant but we can't do arithmetic on them. */
9489 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9490 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9491 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9492 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9493 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9494 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9496 if (kind == tcc_binary)
9498 /* Make sure type and arg0 have the same saturating flag. */
9499 gcc_assert (TYPE_SATURATING (type)
9500 == TYPE_SATURATING (TREE_TYPE (arg0)));
9501 tem = const_binop (code, arg0, arg1, 0);
9503 else if (kind == tcc_comparison)
9504 tem = fold_relational_const (code, type, arg0, arg1);
9508 if (tem != NULL_TREE)
9510 if (TREE_TYPE (tem) != type)
9511 tem = fold_convert (type, tem);
9516 /* If this is a commutative operation, and ARG0 is a constant, move it
9517 to ARG1 to reduce the number of tests below. */
9518 if (commutative_tree_code (code)
9519 && tree_swap_operands_p (arg0, arg1, true))
9520 return fold_build2 (code, type, op1, op0);
9522 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9524 First check for cases where an arithmetic operation is applied to a
9525 compound, conditional, or comparison operation. Push the arithmetic
9526 operation inside the compound or conditional to see if any folding
9527 can then be done. Convert comparison to conditional for this purpose.
9528 The also optimizes non-constant cases that used to be done in
9531 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9532 one of the operands is a comparison and the other is a comparison, a
9533 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9534 code below would make the expression more complex. Change it to a
9535 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9536 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9538 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9539 || code == EQ_EXPR || code == NE_EXPR)
9540 && ((truth_value_p (TREE_CODE (arg0))
9541 && (truth_value_p (TREE_CODE (arg1))
9542 || (TREE_CODE (arg1) == BIT_AND_EXPR
9543 && integer_onep (TREE_OPERAND (arg1, 1)))))
9544 || (truth_value_p (TREE_CODE (arg1))
9545 && (truth_value_p (TREE_CODE (arg0))
9546 || (TREE_CODE (arg0) == BIT_AND_EXPR
9547 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9549 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9550 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9553 fold_convert (boolean_type_node, arg0),
9554 fold_convert (boolean_type_node, arg1));
9556 if (code == EQ_EXPR)
9557 tem = invert_truthvalue (tem);
9559 return fold_convert (type, tem);
9562 if (TREE_CODE_CLASS (code) == tcc_binary
9563 || TREE_CODE_CLASS (code) == tcc_comparison)
9565 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9566 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9567 fold_build2 (code, type,
9568 fold_convert (TREE_TYPE (op0),
9569 TREE_OPERAND (arg0, 1)),
9571 if (TREE_CODE (arg1) == COMPOUND_EXPR
9572 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9573 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9574 fold_build2 (code, type, op0,
9575 fold_convert (TREE_TYPE (op1),
9576 TREE_OPERAND (arg1, 1))));
9578 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9580 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9582 /*cond_first_p=*/1);
9583 if (tem != NULL_TREE)
9587 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9589 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9591 /*cond_first_p=*/0);
9592 if (tem != NULL_TREE)
9599 case POINTER_PLUS_EXPR:
9600 /* 0 +p index -> (type)index */
9601 if (integer_zerop (arg0))
9602 return non_lvalue (fold_convert (type, arg1));
9604 /* PTR +p 0 -> PTR */
9605 if (integer_zerop (arg1))
9606 return non_lvalue (fold_convert (type, arg0));
9608 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9609 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9610 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9611 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9612 fold_convert (sizetype, arg1),
9613 fold_convert (sizetype, arg0)));
9615 /* index +p PTR -> PTR +p index */
9616 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9617 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9618 return fold_build2 (POINTER_PLUS_EXPR, type,
9619 fold_convert (type, arg1),
9620 fold_convert (sizetype, arg0));
9622 /* (PTR +p B) +p A -> PTR +p (B + A) */
9623 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9626 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9627 tree arg00 = TREE_OPERAND (arg0, 0);
9628 inner = fold_build2 (PLUS_EXPR, sizetype,
9629 arg01, fold_convert (sizetype, arg1));
9630 return fold_convert (type,
9631 fold_build2 (POINTER_PLUS_EXPR,
9632 TREE_TYPE (arg00), arg00, inner));
9635 /* PTR_CST +p CST -> CST1 */
9636 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9637 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9639 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9640 of the array. Loop optimizer sometimes produce this type of
9642 if (TREE_CODE (arg0) == ADDR_EXPR)
9644 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9646 return fold_convert (type, tem);
9652 /* PTR + INT -> (INT)(PTR p+ INT) */
9653 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9654 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9655 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9658 fold_convert (sizetype, arg1)));
9659 /* INT + PTR -> (INT)(PTR p+ INT) */
9660 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9661 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9662 return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
9665 fold_convert (sizetype, arg0)));
9666 /* A + (-B) -> A - B */
9667 if (TREE_CODE (arg1) == NEGATE_EXPR)
9668 return fold_build2 (MINUS_EXPR, type,
9669 fold_convert (type, arg0),
9670 fold_convert (type, TREE_OPERAND (arg1, 0)));
9671 /* (-A) + B -> B - A */
9672 if (TREE_CODE (arg0) == NEGATE_EXPR
9673 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9674 return fold_build2 (MINUS_EXPR, type,
9675 fold_convert (type, arg1),
9676 fold_convert (type, TREE_OPERAND (arg0, 0)));
9678 if (INTEGRAL_TYPE_P (type))
9680 /* Convert ~A + 1 to -A. */
9681 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9682 && integer_onep (arg1))
9683 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
9686 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9687 && !TYPE_OVERFLOW_TRAPS (type))
9689 tree tem = TREE_OPERAND (arg0, 0);
9692 if (operand_equal_p (tem, arg1, 0))
9694 t1 = build_int_cst_type (type, -1);
9695 return omit_one_operand (type, t1, arg1);
9700 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9701 && !TYPE_OVERFLOW_TRAPS (type))
9703 tree tem = TREE_OPERAND (arg1, 0);
9706 if (operand_equal_p (arg0, tem, 0))
9708 t1 = build_int_cst_type (type, -1);
9709 return omit_one_operand (type, t1, arg0);
9713 /* X + (X / CST) * -CST is X % CST. */
9714 if (TREE_CODE (arg1) == MULT_EXPR
9715 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9716 && operand_equal_p (arg0,
9717 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9719 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9720 tree cst1 = TREE_OPERAND (arg1, 1);
9721 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9722 if (sum && integer_zerop (sum))
9723 return fold_convert (type,
9724 fold_build2 (TRUNC_MOD_EXPR,
9725 TREE_TYPE (arg0), arg0, cst0));
9729 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9730 same or one. Make sure type is not saturating.
9731 fold_plusminus_mult_expr will re-associate. */
9732 if ((TREE_CODE (arg0) == MULT_EXPR
9733 || TREE_CODE (arg1) == MULT_EXPR)
9734 && !TYPE_SATURATING (type)
9735 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9737 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9742 if (! FLOAT_TYPE_P (type))
9744 if (integer_zerop (arg1))
9745 return non_lvalue (fold_convert (type, arg0));
9747 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9748 with a constant, and the two constants have no bits in common,
9749 we should treat this as a BIT_IOR_EXPR since this may produce more
9751 if (TREE_CODE (arg0) == BIT_AND_EXPR
9752 && TREE_CODE (arg1) == BIT_AND_EXPR
9753 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9754 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9755 && integer_zerop (const_binop (BIT_AND_EXPR,
9756 TREE_OPERAND (arg0, 1),
9757 TREE_OPERAND (arg1, 1), 0)))
9759 code = BIT_IOR_EXPR;
9763 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9764 (plus (plus (mult) (mult)) (foo)) so that we can
9765 take advantage of the factoring cases below. */
9766 if (((TREE_CODE (arg0) == PLUS_EXPR
9767 || TREE_CODE (arg0) == MINUS_EXPR)
9768 && TREE_CODE (arg1) == MULT_EXPR)
9769 || ((TREE_CODE (arg1) == PLUS_EXPR
9770 || TREE_CODE (arg1) == MINUS_EXPR)
9771 && TREE_CODE (arg0) == MULT_EXPR))
9773 tree parg0, parg1, parg, marg;
9774 enum tree_code pcode;
9776 if (TREE_CODE (arg1) == MULT_EXPR)
9777 parg = arg0, marg = arg1;
9779 parg = arg1, marg = arg0;
9780 pcode = TREE_CODE (parg);
9781 parg0 = TREE_OPERAND (parg, 0);
9782 parg1 = TREE_OPERAND (parg, 1);
9786 if (TREE_CODE (parg0) == MULT_EXPR
9787 && TREE_CODE (parg1) != MULT_EXPR)
9788 return fold_build2 (pcode, type,
9789 fold_build2 (PLUS_EXPR, type,
9790 fold_convert (type, parg0),
9791 fold_convert (type, marg)),
9792 fold_convert (type, parg1));
9793 if (TREE_CODE (parg0) != MULT_EXPR
9794 && TREE_CODE (parg1) == MULT_EXPR)
9795 return fold_build2 (PLUS_EXPR, type,
9796 fold_convert (type, parg0),
9797 fold_build2 (pcode, type,
9798 fold_convert (type, marg),
9805 /* See if ARG1 is zero and X + ARG1 reduces to X. */
9806 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
9807 return non_lvalue (fold_convert (type, arg0));
9809 /* Likewise if the operands are reversed. */
9810 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9811 return non_lvalue (fold_convert (type, arg1));
9813 /* Convert X + -C into X - C. */
9814 if (TREE_CODE (arg1) == REAL_CST
9815 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
9817 tem = fold_negate_const (arg1, type);
9818 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
9819 return fold_build2 (MINUS_EXPR, type,
9820 fold_convert (type, arg0),
9821 fold_convert (type, tem));
9824 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9825 to __complex__ ( x, y ). This is not the same for SNaNs or
9826 if signed zeros are involved. */
9827 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9828 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9829 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9831 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9832 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
9833 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
9834 bool arg0rz = false, arg0iz = false;
9835 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9836 || (arg0i && (arg0iz = real_zerop (arg0i))))
9838 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
9839 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
9840 if (arg0rz && arg1i && real_zerop (arg1i))
9842 tree rp = arg1r ? arg1r
9843 : build1 (REALPART_EXPR, rtype, arg1);
9844 tree ip = arg0i ? arg0i
9845 : build1 (IMAGPART_EXPR, rtype, arg0);
9846 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9848 else if (arg0iz && arg1r && real_zerop (arg1r))
9850 tree rp = arg0r ? arg0r
9851 : build1 (REALPART_EXPR, rtype, arg0);
9852 tree ip = arg1i ? arg1i
9853 : build1 (IMAGPART_EXPR, rtype, arg1);
9854 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
9859 if (flag_unsafe_math_optimizations
9860 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9861 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9862 && (tem = distribute_real_division (code, type, arg0, arg1)))
9865 /* Convert x+x into x*2.0. */
9866 if (operand_equal_p (arg0, arg1, 0)
9867 && SCALAR_FLOAT_TYPE_P (type))
9868 return fold_build2 (MULT_EXPR, type, arg0,
9869 build_real (type, dconst2));
9871 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9872 We associate floats only if the user has specified
9873 -fassociative-math. */
9874 if (flag_associative_math
9875 && TREE_CODE (arg1) == PLUS_EXPR
9876 && TREE_CODE (arg0) != MULT_EXPR)
9878 tree tree10 = TREE_OPERAND (arg1, 0);
9879 tree tree11 = TREE_OPERAND (arg1, 1);
9880 if (TREE_CODE (tree11) == MULT_EXPR
9881 && TREE_CODE (tree10) == MULT_EXPR)
9884 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
9885 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
9888 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9889 We associate floats only if the user has specified
9890 -fassociative-math. */
9891 if (flag_associative_math
9892 && TREE_CODE (arg0) == PLUS_EXPR
9893 && TREE_CODE (arg1) != MULT_EXPR)
9895 tree tree00 = TREE_OPERAND (arg0, 0);
9896 tree tree01 = TREE_OPERAND (arg0, 1);
9897 if (TREE_CODE (tree01) == MULT_EXPR
9898 && TREE_CODE (tree00) == MULT_EXPR)
9901 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
9902 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9908 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9909 is a rotate of A by C1 bits. */
9910 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9911 is a rotate of A by B bits. */
9913 enum tree_code code0, code1;
9915 code0 = TREE_CODE (arg0);
9916 code1 = TREE_CODE (arg1);
9917 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9918 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9919 && operand_equal_p (TREE_OPERAND (arg0, 0),
9920 TREE_OPERAND (arg1, 0), 0)
9921 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9922 TYPE_UNSIGNED (rtype))
9923 /* Only create rotates in complete modes. Other cases are not
9924 expanded properly. */
9925 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
9927 tree tree01, tree11;
9928 enum tree_code code01, code11;
9930 tree01 = TREE_OPERAND (arg0, 1);
9931 tree11 = TREE_OPERAND (arg1, 1);
9932 STRIP_NOPS (tree01);
9933 STRIP_NOPS (tree11);
9934 code01 = TREE_CODE (tree01);
9935 code11 = TREE_CODE (tree11);
9936 if (code01 == INTEGER_CST
9937 && code11 == INTEGER_CST
9938 && TREE_INT_CST_HIGH (tree01) == 0
9939 && TREE_INT_CST_HIGH (tree11) == 0
9940 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9941 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9942 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9943 code0 == LSHIFT_EXPR ? tree01 : tree11);
9944 else if (code11 == MINUS_EXPR)
9946 tree tree110, tree111;
9947 tree110 = TREE_OPERAND (tree11, 0);
9948 tree111 = TREE_OPERAND (tree11, 1);
9949 STRIP_NOPS (tree110);
9950 STRIP_NOPS (tree111);
9951 if (TREE_CODE (tree110) == INTEGER_CST
9952 && 0 == compare_tree_int (tree110,
9954 (TREE_TYPE (TREE_OPERAND
9956 && operand_equal_p (tree01, tree111, 0))
9957 return build2 ((code0 == LSHIFT_EXPR
9960 type, TREE_OPERAND (arg0, 0), tree01);
9962 else if (code01 == MINUS_EXPR)
9964 tree tree010, tree011;
9965 tree010 = TREE_OPERAND (tree01, 0);
9966 tree011 = TREE_OPERAND (tree01, 1);
9967 STRIP_NOPS (tree010);
9968 STRIP_NOPS (tree011);
9969 if (TREE_CODE (tree010) == INTEGER_CST
9970 && 0 == compare_tree_int (tree010,
9972 (TREE_TYPE (TREE_OPERAND
9974 && operand_equal_p (tree11, tree011, 0))
9975 return build2 ((code0 != LSHIFT_EXPR
9978 type, TREE_OPERAND (arg0, 0), tree11);
9984 /* In most languages, can't associate operations on floats through
9985 parentheses. Rather than remember where the parentheses were, we
9986 don't associate floats at all, unless the user has specified
9988 And, we need to make sure type is not saturating. */
9990 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9991 && !TYPE_SATURATING (type))
9993 tree var0, con0, lit0, minus_lit0;
9994 tree var1, con1, lit1, minus_lit1;
9997 /* Split both trees into variables, constants, and literals. Then
9998 associate each group together, the constants with literals,
9999 then the result with variables. This increases the chances of
10000 literals being recombined later and of generating relocatable
10001 expressions for the sum of a constant and literal. */
10002 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10003 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10004 code == MINUS_EXPR);
10006 /* With undefined overflow we can only associate constants
10007 with one variable. */
10008 if ((POINTER_TYPE_P (type)
10009 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10015 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10016 tmp0 = TREE_OPERAND (tmp0, 0);
10017 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10018 tmp1 = TREE_OPERAND (tmp1, 0);
10019 /* The only case we can still associate with two variables
10020 is if they are the same, modulo negation. */
10021 if (!operand_equal_p (tmp0, tmp1, 0))
10025 /* Only do something if we found more than two objects. Otherwise,
10026 nothing has changed and we risk infinite recursion. */
10028 && (2 < ((var0 != 0) + (var1 != 0)
10029 + (con0 != 0) + (con1 != 0)
10030 + (lit0 != 0) + (lit1 != 0)
10031 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10033 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10034 if (code == MINUS_EXPR)
10037 var0 = associate_trees (var0, var1, code, type);
10038 con0 = associate_trees (con0, con1, code, type);
10039 lit0 = associate_trees (lit0, lit1, code, type);
10040 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10042 /* Preserve the MINUS_EXPR if the negative part of the literal is
10043 greater than the positive part. Otherwise, the multiplicative
10044 folding code (i.e extract_muldiv) may be fooled in case
10045 unsigned constants are subtracted, like in the following
10046 example: ((X*2 + 4) - 8U)/2. */
10047 if (minus_lit0 && lit0)
10049 if (TREE_CODE (lit0) == INTEGER_CST
10050 && TREE_CODE (minus_lit0) == INTEGER_CST
10051 && tree_int_cst_lt (lit0, minus_lit0))
10053 minus_lit0 = associate_trees (minus_lit0, lit0,
10059 lit0 = associate_trees (lit0, minus_lit0,
10067 return fold_convert (type,
10068 associate_trees (var0, minus_lit0,
10069 MINUS_EXPR, type));
10072 con0 = associate_trees (con0, minus_lit0,
10074 return fold_convert (type,
10075 associate_trees (var0, con0,
10080 con0 = associate_trees (con0, lit0, code, type);
10081 return fold_convert (type, associate_trees (var0, con0,
10089 /* Pointer simplifications for subtraction, simple reassociations. */
10090 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10092 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10093 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10094 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10096 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10097 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10098 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10099 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10100 return fold_build2 (PLUS_EXPR, type,
10101 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10102 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10104 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10105 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10107 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10108 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10109 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10111 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10114 /* A - (-B) -> A + B */
10115 if (TREE_CODE (arg1) == NEGATE_EXPR)
10116 return fold_build2 (PLUS_EXPR, type, op0,
10117 fold_convert (type, TREE_OPERAND (arg1, 0)));
10118 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10119 if (TREE_CODE (arg0) == NEGATE_EXPR
10120 && (FLOAT_TYPE_P (type)
10121 || INTEGRAL_TYPE_P (type))
10122 && negate_expr_p (arg1)
10123 && reorder_operands_p (arg0, arg1))
10124 return fold_build2 (MINUS_EXPR, type,
10125 fold_convert (type, negate_expr (arg1)),
10126 fold_convert (type, TREE_OPERAND (arg0, 0)));
10127 /* Convert -A - 1 to ~A. */
10128 if (INTEGRAL_TYPE_P (type)
10129 && TREE_CODE (arg0) == NEGATE_EXPR
10130 && integer_onep (arg1)
10131 && !TYPE_OVERFLOW_TRAPS (type))
10132 return fold_build1 (BIT_NOT_EXPR, type,
10133 fold_convert (type, TREE_OPERAND (arg0, 0)));
10135 /* Convert -1 - A to ~A. */
10136 if (INTEGRAL_TYPE_P (type)
10137 && integer_all_onesp (arg0))
10138 return fold_build1 (BIT_NOT_EXPR, type, op1);
10141 /* X - (X / CST) * CST is X % CST. */
10142 if (INTEGRAL_TYPE_P (type)
10143 && TREE_CODE (arg1) == MULT_EXPR
10144 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10145 && operand_equal_p (arg0,
10146 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10147 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10148 TREE_OPERAND (arg1, 1), 0))
10149 return fold_convert (type,
10150 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10151 arg0, TREE_OPERAND (arg1, 1)));
10153 if (! FLOAT_TYPE_P (type))
10155 if (integer_zerop (arg0))
10156 return negate_expr (fold_convert (type, arg1));
10157 if (integer_zerop (arg1))
10158 return non_lvalue (fold_convert (type, arg0));
10160 /* Fold A - (A & B) into ~B & A. */
10161 if (!TREE_SIDE_EFFECTS (arg0)
10162 && TREE_CODE (arg1) == BIT_AND_EXPR)
10164 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10166 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10167 return fold_build2 (BIT_AND_EXPR, type,
10168 fold_build1 (BIT_NOT_EXPR, type, arg10),
10169 fold_convert (type, arg0));
10171 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10173 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10174 return fold_build2 (BIT_AND_EXPR, type,
10175 fold_build1 (BIT_NOT_EXPR, type, arg11),
10176 fold_convert (type, arg0));
10180 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10181 any power of 2 minus 1. */
10182 if (TREE_CODE (arg0) == BIT_AND_EXPR
10183 && TREE_CODE (arg1) == BIT_AND_EXPR
10184 && operand_equal_p (TREE_OPERAND (arg0, 0),
10185 TREE_OPERAND (arg1, 0), 0))
10187 tree mask0 = TREE_OPERAND (arg0, 1);
10188 tree mask1 = TREE_OPERAND (arg1, 1);
10189 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10191 if (operand_equal_p (tem, mask1, 0))
10193 tem = fold_build2 (BIT_XOR_EXPR, type,
10194 TREE_OPERAND (arg0, 0), mask1);
10195 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10200 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10201 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10202 return non_lvalue (fold_convert (type, arg0));
10204 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10205 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10206 (-ARG1 + ARG0) reduces to -ARG1. */
10207 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10208 return negate_expr (fold_convert (type, arg1));
10210 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10211 __complex__ ( x, -y ). This is not the same for SNaNs or if
10212 signed zeros are involved. */
10213 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10214 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10215 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10217 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10218 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10219 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10220 bool arg0rz = false, arg0iz = false;
10221 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10222 || (arg0i && (arg0iz = real_zerop (arg0i))))
10224 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10225 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10226 if (arg0rz && arg1i && real_zerop (arg1i))
10228 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10230 : build1 (REALPART_EXPR, rtype, arg1));
10231 tree ip = arg0i ? arg0i
10232 : build1 (IMAGPART_EXPR, rtype, arg0);
10233 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10235 else if (arg0iz && arg1r && real_zerop (arg1r))
10237 tree rp = arg0r ? arg0r
10238 : build1 (REALPART_EXPR, rtype, arg0);
10239 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10241 : build1 (IMAGPART_EXPR, rtype, arg1));
10242 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10247 /* Fold &x - &x. This can happen from &x.foo - &x.
10248 This is unsafe for certain floats even in non-IEEE formats.
10249 In IEEE, it is unsafe because it does wrong for NaNs.
10250 Also note that operand_equal_p is always false if an operand
10253 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10254 && operand_equal_p (arg0, arg1, 0))
10255 return fold_convert (type, integer_zero_node);
10257 /* A - B -> A + (-B) if B is easily negatable. */
10258 if (negate_expr_p (arg1)
10259 && ((FLOAT_TYPE_P (type)
10260 /* Avoid this transformation if B is a positive REAL_CST. */
10261 && (TREE_CODE (arg1) != REAL_CST
10262 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10263 || INTEGRAL_TYPE_P (type)))
10264 return fold_build2 (PLUS_EXPR, type,
10265 fold_convert (type, arg0),
10266 fold_convert (type, negate_expr (arg1)));
10268 /* Try folding difference of addresses. */
10270 HOST_WIDE_INT diff;
10272 if ((TREE_CODE (arg0) == ADDR_EXPR
10273 || TREE_CODE (arg1) == ADDR_EXPR)
10274 && ptr_difference_const (arg0, arg1, &diff))
10275 return build_int_cst_type (type, diff);
10278 /* Fold &a[i] - &a[j] to i-j. */
10279 if (TREE_CODE (arg0) == ADDR_EXPR
10280 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10281 && TREE_CODE (arg1) == ADDR_EXPR
10282 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10284 tree aref0 = TREE_OPERAND (arg0, 0);
10285 tree aref1 = TREE_OPERAND (arg1, 0);
10286 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10287 TREE_OPERAND (aref1, 0), 0))
10289 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10290 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10291 tree esz = array_ref_element_size (aref0);
10292 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10293 return fold_build2 (MULT_EXPR, type, diff,
10294 fold_convert (type, esz));
10299 if (flag_unsafe_math_optimizations
10300 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10301 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10302 && (tem = distribute_real_division (code, type, arg0, arg1)))
10305 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10306 same or one. Make sure type is not saturating.
10307 fold_plusminus_mult_expr will re-associate. */
10308 if ((TREE_CODE (arg0) == MULT_EXPR
10309 || TREE_CODE (arg1) == MULT_EXPR)
10310 && !TYPE_SATURATING (type)
10311 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10313 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10321 /* (-A) * (-B) -> A * B */
10322 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10323 return fold_build2 (MULT_EXPR, type,
10324 fold_convert (type, TREE_OPERAND (arg0, 0)),
10325 fold_convert (type, negate_expr (arg1)));
10326 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10327 return fold_build2 (MULT_EXPR, type,
10328 fold_convert (type, negate_expr (arg0)),
10329 fold_convert (type, TREE_OPERAND (arg1, 0)));
10331 if (! FLOAT_TYPE_P (type))
10333 if (integer_zerop (arg1))
10334 return omit_one_operand (type, arg1, arg0);
10335 if (integer_onep (arg1))
10336 return non_lvalue (fold_convert (type, arg0));
10337 /* Transform x * -1 into -x. Make sure to do the negation
10338 on the original operand with conversions not stripped
10339 because we can only strip non-sign-changing conversions. */
10340 if (integer_all_onesp (arg1))
10341 return fold_convert (type, negate_expr (op0));
10342 /* Transform x * -C into -x * C if x is easily negatable. */
10343 if (TREE_CODE (arg1) == INTEGER_CST
10344 && tree_int_cst_sgn (arg1) == -1
10345 && negate_expr_p (arg0)
10346 && (tem = negate_expr (arg1)) != arg1
10347 && !TREE_OVERFLOW (tem))
10348 return fold_build2 (MULT_EXPR, type,
10349 fold_convert (type, negate_expr (arg0)), tem);
10351 /* (a * (1 << b)) is (a << b) */
10352 if (TREE_CODE (arg1) == LSHIFT_EXPR
10353 && integer_onep (TREE_OPERAND (arg1, 0)))
10354 return fold_build2 (LSHIFT_EXPR, type, op0,
10355 TREE_OPERAND (arg1, 1));
10356 if (TREE_CODE (arg0) == LSHIFT_EXPR
10357 && integer_onep (TREE_OPERAND (arg0, 0)))
10358 return fold_build2 (LSHIFT_EXPR, type, op1,
10359 TREE_OPERAND (arg0, 1));
10361 strict_overflow_p = false;
10362 if (TREE_CODE (arg1) == INTEGER_CST
10363 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10364 &strict_overflow_p)))
10366 if (strict_overflow_p)
10367 fold_overflow_warning (("assuming signed overflow does not "
10368 "occur when simplifying "
10370 WARN_STRICT_OVERFLOW_MISC);
10371 return fold_convert (type, tem);
10374 /* Optimize z * conj(z) for integer complex numbers. */
10375 if (TREE_CODE (arg0) == CONJ_EXPR
10376 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10377 return fold_mult_zconjz (type, arg1);
10378 if (TREE_CODE (arg1) == CONJ_EXPR
10379 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10380 return fold_mult_zconjz (type, arg0);
10384 /* Maybe fold x * 0 to 0. The expressions aren't the same
10385 when x is NaN, since x * 0 is also NaN. Nor are they the
10386 same in modes with signed zeros, since multiplying a
10387 negative value by 0 gives -0, not +0. */
10388 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10389 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10390 && real_zerop (arg1))
10391 return omit_one_operand (type, arg1, arg0);
10392 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10393 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10394 && real_onep (arg1))
10395 return non_lvalue (fold_convert (type, arg0));
10397 /* Transform x * -1.0 into -x. */
10398 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10399 && real_minus_onep (arg1))
10400 return fold_convert (type, negate_expr (arg0));
10402 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10403 the result for floating point types due to rounding so it is applied
10404 only if -fassociative-math was specify. */
10405 if (flag_associative_math
10406 && TREE_CODE (arg0) == RDIV_EXPR
10407 && TREE_CODE (arg1) == REAL_CST
10408 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10410 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10413 return fold_build2 (RDIV_EXPR, type, tem,
10414 TREE_OPERAND (arg0, 1));
10417 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10418 if (operand_equal_p (arg0, arg1, 0))
10420 tree tem = fold_strip_sign_ops (arg0);
10421 if (tem != NULL_TREE)
10423 tem = fold_convert (type, tem);
10424 return fold_build2 (MULT_EXPR, type, tem, tem);
10428 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10429 This is not the same for NaNs or if signed zeros are
10431 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10432 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10433 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10434 && TREE_CODE (arg1) == COMPLEX_CST
10435 && real_zerop (TREE_REALPART (arg1)))
10437 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10438 if (real_onep (TREE_IMAGPART (arg1)))
10439 return fold_build2 (COMPLEX_EXPR, type,
10440 negate_expr (fold_build1 (IMAGPART_EXPR,
10442 fold_build1 (REALPART_EXPR, rtype, arg0));
10443 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10444 return fold_build2 (COMPLEX_EXPR, type,
10445 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10446 negate_expr (fold_build1 (REALPART_EXPR,
10450 /* Optimize z * conj(z) for floating point complex numbers.
10451 Guarded by flag_unsafe_math_optimizations as non-finite
10452 imaginary components don't produce scalar results. */
10453 if (flag_unsafe_math_optimizations
10454 && TREE_CODE (arg0) == CONJ_EXPR
10455 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10456 return fold_mult_zconjz (type, arg1);
10457 if (flag_unsafe_math_optimizations
10458 && TREE_CODE (arg1) == CONJ_EXPR
10459 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10460 return fold_mult_zconjz (type, arg0);
10462 if (flag_unsafe_math_optimizations)
10464 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10465 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10467 /* Optimizations of root(...)*root(...). */
10468 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10471 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10472 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10474 /* Optimize sqrt(x)*sqrt(x) as x. */
10475 if (BUILTIN_SQRT_P (fcode0)
10476 && operand_equal_p (arg00, arg10, 0)
10477 && ! HONOR_SNANS (TYPE_MODE (type)))
10480 /* Optimize root(x)*root(y) as root(x*y). */
10481 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10482 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10483 return build_call_expr (rootfn, 1, arg);
10486 /* Optimize expN(x)*expN(y) as expN(x+y). */
10487 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10489 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10490 tree arg = fold_build2 (PLUS_EXPR, type,
10491 CALL_EXPR_ARG (arg0, 0),
10492 CALL_EXPR_ARG (arg1, 0));
10493 return build_call_expr (expfn, 1, arg);
10496 /* Optimizations of pow(...)*pow(...). */
10497 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10498 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10499 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10501 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10502 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10503 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10504 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10506 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10507 if (operand_equal_p (arg01, arg11, 0))
10509 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10510 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10511 return build_call_expr (powfn, 2, arg, arg01);
10514 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10515 if (operand_equal_p (arg00, arg10, 0))
10517 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10518 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10519 return build_call_expr (powfn, 2, arg00, arg);
10523 /* Optimize tan(x)*cos(x) as sin(x). */
10524 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10525 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10526 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10527 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10528 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10529 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10530 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10531 CALL_EXPR_ARG (arg1, 0), 0))
10533 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10535 if (sinfn != NULL_TREE)
10536 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10539 /* Optimize x*pow(x,c) as pow(x,c+1). */
10540 if (fcode1 == BUILT_IN_POW
10541 || fcode1 == BUILT_IN_POWF
10542 || fcode1 == BUILT_IN_POWL)
10544 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10545 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10546 if (TREE_CODE (arg11) == REAL_CST
10547 && !TREE_OVERFLOW (arg11)
10548 && operand_equal_p (arg0, arg10, 0))
10550 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10554 c = TREE_REAL_CST (arg11);
10555 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10556 arg = build_real (type, c);
10557 return build_call_expr (powfn, 2, arg0, arg);
10561 /* Optimize pow(x,c)*x as pow(x,c+1). */
10562 if (fcode0 == BUILT_IN_POW
10563 || fcode0 == BUILT_IN_POWF
10564 || fcode0 == BUILT_IN_POWL)
10566 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10567 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10568 if (TREE_CODE (arg01) == REAL_CST
10569 && !TREE_OVERFLOW (arg01)
10570 && operand_equal_p (arg1, arg00, 0))
10572 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10576 c = TREE_REAL_CST (arg01);
10577 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10578 arg = build_real (type, c);
10579 return build_call_expr (powfn, 2, arg1, arg);
10583 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10584 if (! optimize_size
10585 && operand_equal_p (arg0, arg1, 0))
10587 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10591 tree arg = build_real (type, dconst2);
10592 return build_call_expr (powfn, 2, arg0, arg);
10601 if (integer_all_onesp (arg1))
10602 return omit_one_operand (type, arg1, arg0);
10603 if (integer_zerop (arg1))
10604 return non_lvalue (fold_convert (type, arg0));
10605 if (operand_equal_p (arg0, arg1, 0))
10606 return non_lvalue (fold_convert (type, arg0));
10608 /* ~X | X is -1. */
10609 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10610 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10612 t1 = fold_convert (type, integer_zero_node);
10613 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10614 return omit_one_operand (type, t1, arg1);
10617 /* X | ~X is -1. */
10618 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10619 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10621 t1 = fold_convert (type, integer_zero_node);
10622 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10623 return omit_one_operand (type, t1, arg0);
10626 /* Canonicalize (X & C1) | C2. */
10627 if (TREE_CODE (arg0) == BIT_AND_EXPR
10628 && TREE_CODE (arg1) == INTEGER_CST
10629 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10631 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10632 int width = TYPE_PRECISION (type), w;
10633 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10634 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10635 hi2 = TREE_INT_CST_HIGH (arg1);
10636 lo2 = TREE_INT_CST_LOW (arg1);
10638 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10639 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10640 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10642 if (width > HOST_BITS_PER_WIDE_INT)
10644 mhi = (unsigned HOST_WIDE_INT) -1
10645 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10651 mlo = (unsigned HOST_WIDE_INT) -1
10652 >> (HOST_BITS_PER_WIDE_INT - width);
10655 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10656 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10657 return fold_build2 (BIT_IOR_EXPR, type,
10658 TREE_OPERAND (arg0, 0), arg1);
10660 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10661 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10662 mode which allows further optimizations. */
10669 for (w = BITS_PER_UNIT;
10670 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10673 unsigned HOST_WIDE_INT mask
10674 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10675 if (((lo1 | lo2) & mask) == mask
10676 && (lo1 & ~mask) == 0 && hi1 == 0)
10683 if (hi3 != hi1 || lo3 != lo1)
10684 return fold_build2 (BIT_IOR_EXPR, type,
10685 fold_build2 (BIT_AND_EXPR, type,
10686 TREE_OPERAND (arg0, 0),
10687 build_int_cst_wide (type,
10692 /* (X & Y) | Y is (X, Y). */
10693 if (TREE_CODE (arg0) == BIT_AND_EXPR
10694 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10695 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10696 /* (X & Y) | X is (Y, X). */
10697 if (TREE_CODE (arg0) == BIT_AND_EXPR
10698 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10699 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10700 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10701 /* X | (X & Y) is (Y, X). */
10702 if (TREE_CODE (arg1) == BIT_AND_EXPR
10703 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10704 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10705 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10706 /* X | (Y & X) is (Y, X). */
10707 if (TREE_CODE (arg1) == BIT_AND_EXPR
10708 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10709 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10710 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10712 t1 = distribute_bit_expr (code, type, arg0, arg1);
10713 if (t1 != NULL_TREE)
10716 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10718 This results in more efficient code for machines without a NAND
10719 instruction. Combine will canonicalize to the first form
10720 which will allow use of NAND instructions provided by the
10721 backend if they exist. */
10722 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10723 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10725 return fold_build1 (BIT_NOT_EXPR, type,
10726 build2 (BIT_AND_EXPR, type,
10727 TREE_OPERAND (arg0, 0),
10728 TREE_OPERAND (arg1, 0)));
10731 /* See if this can be simplified into a rotate first. If that
10732 is unsuccessful continue in the association code. */
10736 if (integer_zerop (arg1))
10737 return non_lvalue (fold_convert (type, arg0));
10738 if (integer_all_onesp (arg1))
10739 return fold_build1 (BIT_NOT_EXPR, type, op0);
10740 if (operand_equal_p (arg0, arg1, 0))
10741 return omit_one_operand (type, integer_zero_node, arg0);
10743 /* ~X ^ X is -1. */
10744 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10745 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10747 t1 = fold_convert (type, integer_zero_node);
10748 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10749 return omit_one_operand (type, t1, arg1);
10752 /* X ^ ~X is -1. */
10753 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10754 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10756 t1 = fold_convert (type, integer_zero_node);
10757 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10758 return omit_one_operand (type, t1, arg0);
10761 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10762 with a constant, and the two constants have no bits in common,
10763 we should treat this as a BIT_IOR_EXPR since this may produce more
10764 simplifications. */
10765 if (TREE_CODE (arg0) == BIT_AND_EXPR
10766 && TREE_CODE (arg1) == BIT_AND_EXPR
10767 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10768 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10769 && integer_zerop (const_binop (BIT_AND_EXPR,
10770 TREE_OPERAND (arg0, 1),
10771 TREE_OPERAND (arg1, 1), 0)))
10773 code = BIT_IOR_EXPR;
10777 /* (X | Y) ^ X -> Y & ~ X*/
10778 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10781 tree t2 = TREE_OPERAND (arg0, 1);
10782 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10784 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10785 fold_convert (type, t1));
10789 /* (Y | X) ^ X -> Y & ~ X*/
10790 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10791 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10793 tree t2 = TREE_OPERAND (arg0, 0);
10794 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
10796 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10797 fold_convert (type, t1));
10801 /* X ^ (X | Y) -> Y & ~ X*/
10802 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10803 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
10805 tree t2 = TREE_OPERAND (arg1, 1);
10806 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10808 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10809 fold_convert (type, t1));
10813 /* X ^ (Y | X) -> Y & ~ X*/
10814 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10815 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
10817 tree t2 = TREE_OPERAND (arg1, 0);
10818 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
10820 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
10821 fold_convert (type, t1));
10825 /* Convert ~X ^ ~Y to X ^ Y. */
10826 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10827 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10828 return fold_build2 (code, type,
10829 fold_convert (type, TREE_OPERAND (arg0, 0)),
10830 fold_convert (type, TREE_OPERAND (arg1, 0)));
10832 /* Convert ~X ^ C to X ^ ~C. */
10833 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10834 && TREE_CODE (arg1) == INTEGER_CST)
10835 return fold_build2 (code, type,
10836 fold_convert (type, TREE_OPERAND (arg0, 0)),
10837 fold_build1 (BIT_NOT_EXPR, type, arg1));
10839 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10840 if (TREE_CODE (arg0) == BIT_AND_EXPR
10841 && integer_onep (TREE_OPERAND (arg0, 1))
10842 && integer_onep (arg1))
10843 return fold_build2 (EQ_EXPR, type, arg0,
10844 build_int_cst (TREE_TYPE (arg0), 0));
10846 /* Fold (X & Y) ^ Y as ~X & Y. */
10847 if (TREE_CODE (arg0) == BIT_AND_EXPR
10848 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10850 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10851 return fold_build2 (BIT_AND_EXPR, type,
10852 fold_build1 (BIT_NOT_EXPR, type, tem),
10853 fold_convert (type, arg1));
10855 /* Fold (X & Y) ^ X as ~Y & X. */
10856 if (TREE_CODE (arg0) == BIT_AND_EXPR
10857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10858 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10860 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10861 return fold_build2 (BIT_AND_EXPR, type,
10862 fold_build1 (BIT_NOT_EXPR, type, tem),
10863 fold_convert (type, arg1));
10865 /* Fold X ^ (X & Y) as X & ~Y. */
10866 if (TREE_CODE (arg1) == BIT_AND_EXPR
10867 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10869 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10870 return fold_build2 (BIT_AND_EXPR, type,
10871 fold_convert (type, arg0),
10872 fold_build1 (BIT_NOT_EXPR, type, tem));
10874 /* Fold X ^ (Y & X) as ~Y & X. */
10875 if (TREE_CODE (arg1) == BIT_AND_EXPR
10876 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10877 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10879 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10880 return fold_build2 (BIT_AND_EXPR, type,
10881 fold_build1 (BIT_NOT_EXPR, type, tem),
10882 fold_convert (type, arg0));
10885 /* See if this can be simplified into a rotate first. If that
10886 is unsuccessful continue in the association code. */
10890 if (integer_all_onesp (arg1))
10891 return non_lvalue (fold_convert (type, arg0));
10892 if (integer_zerop (arg1))
10893 return omit_one_operand (type, arg1, arg0);
10894 if (operand_equal_p (arg0, arg1, 0))
10895 return non_lvalue (fold_convert (type, arg0));
10897 /* ~X & X is always zero. */
10898 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10899 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10900 return omit_one_operand (type, integer_zero_node, arg1);
10902 /* X & ~X is always zero. */
10903 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10904 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10905 return omit_one_operand (type, integer_zero_node, arg0);
10907 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
10908 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10909 && TREE_CODE (arg1) == INTEGER_CST
10910 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10912 tree tmp1 = fold_convert (TREE_TYPE (arg0), arg1);
10913 tree tmp2 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10914 TREE_OPERAND (arg0, 0), tmp1);
10915 tree tmp3 = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10916 TREE_OPERAND (arg0, 1), tmp1);
10917 return fold_convert (type,
10918 fold_build2 (BIT_IOR_EXPR, TREE_TYPE (arg0),
10922 /* (X | Y) & Y is (X, Y). */
10923 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10924 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10925 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10926 /* (X | Y) & X is (Y, X). */
10927 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10928 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10929 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10930 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10931 /* X & (X | Y) is (Y, X). */
10932 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10933 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10934 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10935 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10936 /* X & (Y | X) is (Y, X). */
10937 if (TREE_CODE (arg1) == BIT_IOR_EXPR
10938 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10939 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10940 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10942 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10943 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10944 && integer_onep (TREE_OPERAND (arg0, 1))
10945 && integer_onep (arg1))
10947 tem = TREE_OPERAND (arg0, 0);
10948 return fold_build2 (EQ_EXPR, type,
10949 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10950 build_int_cst (TREE_TYPE (tem), 1)),
10951 build_int_cst (TREE_TYPE (tem), 0));
10953 /* Fold ~X & 1 as (X & 1) == 0. */
10954 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10955 && integer_onep (arg1))
10957 tem = TREE_OPERAND (arg0, 0);
10958 return fold_build2 (EQ_EXPR, type,
10959 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
10960 build_int_cst (TREE_TYPE (tem), 1)),
10961 build_int_cst (TREE_TYPE (tem), 0));
10964 /* Fold (X ^ Y) & Y as ~X & Y. */
10965 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10966 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10968 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
10969 return fold_build2 (BIT_AND_EXPR, type,
10970 fold_build1 (BIT_NOT_EXPR, type, tem),
10971 fold_convert (type, arg1));
10973 /* Fold (X ^ Y) & X as ~Y & X. */
10974 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10975 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10976 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10978 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
10979 return fold_build2 (BIT_AND_EXPR, type,
10980 fold_build1 (BIT_NOT_EXPR, type, tem),
10981 fold_convert (type, arg1));
10983 /* Fold X & (X ^ Y) as X & ~Y. */
10984 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10987 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
10988 return fold_build2 (BIT_AND_EXPR, type,
10989 fold_convert (type, arg0),
10990 fold_build1 (BIT_NOT_EXPR, type, tem));
10992 /* Fold X & (Y ^ X) as ~Y & X. */
10993 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10995 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10997 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
10998 return fold_build2 (BIT_AND_EXPR, type,
10999 fold_build1 (BIT_NOT_EXPR, type, tem),
11000 fold_convert (type, arg0));
11003 t1 = distribute_bit_expr (code, type, arg0, arg1);
11004 if (t1 != NULL_TREE)
11006 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11007 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11008 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11011 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11013 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11014 && (~TREE_INT_CST_LOW (arg1)
11015 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11016 return fold_convert (type, TREE_OPERAND (arg0, 0));
11019 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11021 This results in more efficient code for machines without a NOR
11022 instruction. Combine will canonicalize to the first form
11023 which will allow use of NOR instructions provided by the
11024 backend if they exist. */
11025 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11026 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11028 return fold_build1 (BIT_NOT_EXPR, type,
11029 build2 (BIT_IOR_EXPR, type,
11030 fold_convert (type,
11031 TREE_OPERAND (arg0, 0)),
11032 fold_convert (type,
11033 TREE_OPERAND (arg1, 0))));
11036 /* If arg0 is derived from the address of an object or function, we may
11037 be able to fold this expression using the object or function's
11039 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11041 unsigned HOST_WIDE_INT modulus, residue;
11042 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11044 modulus = get_pointer_modulus_and_residue (arg0, &residue);
11046 /* This works because modulus is a power of 2. If this weren't the
11047 case, we'd have to replace it by its greatest power-of-2
11048 divisor: modulus & -modulus. */
11050 return build_int_cst (type, residue & low);
11053 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11054 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11055 if the new mask might be further optimized. */
11056 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11057 || TREE_CODE (arg0) == RSHIFT_EXPR)
11058 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11059 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11060 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11061 < TYPE_PRECISION (TREE_TYPE (arg0))
11062 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11063 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11065 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11066 unsigned HOST_WIDE_INT mask
11067 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11068 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11069 tree shift_type = TREE_TYPE (arg0);
11071 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11072 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11073 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11074 && TYPE_PRECISION (TREE_TYPE (arg0))
11075 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11077 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11078 tree arg00 = TREE_OPERAND (arg0, 0);
11079 /* See if more bits can be proven as zero because of
11081 if (TREE_CODE (arg00) == NOP_EXPR
11082 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11084 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11085 if (TYPE_PRECISION (inner_type)
11086 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11087 && TYPE_PRECISION (inner_type) < prec)
11089 prec = TYPE_PRECISION (inner_type);
11090 /* See if we can shorten the right shift. */
11092 shift_type = inner_type;
11095 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11096 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11097 zerobits <<= prec - shiftc;
11098 /* For arithmetic shift if sign bit could be set, zerobits
11099 can contain actually sign bits, so no transformation is
11100 possible, unless MASK masks them all away. In that
11101 case the shift needs to be converted into logical shift. */
11102 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11103 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11105 if ((mask & zerobits) == 0)
11106 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11112 /* ((X << 16) & 0xff00) is (X, 0). */
11113 if ((mask & zerobits) == mask)
11114 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11116 newmask = mask | zerobits;
11117 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11121 /* Only do the transformation if NEWMASK is some integer
11123 for (prec = BITS_PER_UNIT;
11124 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11125 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11127 if (prec < HOST_BITS_PER_WIDE_INT
11128 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11130 if (shift_type != TREE_TYPE (arg0))
11132 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11133 fold_convert (shift_type,
11134 TREE_OPERAND (arg0, 0)),
11135 TREE_OPERAND (arg0, 1));
11136 tem = fold_convert (type, tem);
11140 return fold_build2 (BIT_AND_EXPR, type, tem,
11141 build_int_cst_type (TREE_TYPE (op1),
11150 /* Don't touch a floating-point divide by zero unless the mode
11151 of the constant can represent infinity. */
11152 if (TREE_CODE (arg1) == REAL_CST
11153 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11154 && real_zerop (arg1))
11157 /* Optimize A / A to 1.0 if we don't care about
11158 NaNs or Infinities. Skip the transformation
11159 for non-real operands. */
11160 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11161 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11162 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11163 && operand_equal_p (arg0, arg1, 0))
11165 tree r = build_real (TREE_TYPE (arg0), dconst1);
11167 return omit_two_operands (type, r, arg0, arg1);
11170 /* The complex version of the above A / A optimization. */
11171 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11172 && operand_equal_p (arg0, arg1, 0))
11174 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11175 if (! HONOR_NANS (TYPE_MODE (elem_type))
11176 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11178 tree r = build_real (elem_type, dconst1);
11179 /* omit_two_operands will call fold_convert for us. */
11180 return omit_two_operands (type, r, arg0, arg1);
11184 /* (-A) / (-B) -> A / B */
11185 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11186 return fold_build2 (RDIV_EXPR, type,
11187 TREE_OPERAND (arg0, 0),
11188 negate_expr (arg1));
11189 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11190 return fold_build2 (RDIV_EXPR, type,
11191 negate_expr (arg0),
11192 TREE_OPERAND (arg1, 0));
11194 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11195 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11196 && real_onep (arg1))
11197 return non_lvalue (fold_convert (type, arg0));
11199 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11200 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11201 && real_minus_onep (arg1))
11202 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11204 /* If ARG1 is a constant, we can convert this to a multiply by the
11205 reciprocal. This does not have the same rounding properties,
11206 so only do this if -freciprocal-math. We can actually
11207 always safely do it if ARG1 is a power of two, but it's hard to
11208 tell if it is or not in a portable manner. */
11209 if (TREE_CODE (arg1) == REAL_CST)
11211 if (flag_reciprocal_math
11212 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11214 return fold_build2 (MULT_EXPR, type, arg0, tem);
11215 /* Find the reciprocal if optimizing and the result is exact. */
11219 r = TREE_REAL_CST (arg1);
11220 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11222 tem = build_real (type, r);
11223 return fold_build2 (MULT_EXPR, type,
11224 fold_convert (type, arg0), tem);
11228 /* Convert A/B/C to A/(B*C). */
11229 if (flag_reciprocal_math
11230 && TREE_CODE (arg0) == RDIV_EXPR)
11231 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11232 fold_build2 (MULT_EXPR, type,
11233 TREE_OPERAND (arg0, 1), arg1));
11235 /* Convert A/(B/C) to (A/B)*C. */
11236 if (flag_reciprocal_math
11237 && TREE_CODE (arg1) == RDIV_EXPR)
11238 return fold_build2 (MULT_EXPR, type,
11239 fold_build2 (RDIV_EXPR, type, arg0,
11240 TREE_OPERAND (arg1, 0)),
11241 TREE_OPERAND (arg1, 1));
11243 /* Convert C1/(X*C2) into (C1/C2)/X. */
11244 if (flag_reciprocal_math
11245 && TREE_CODE (arg1) == MULT_EXPR
11246 && TREE_CODE (arg0) == REAL_CST
11247 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11249 tree tem = const_binop (RDIV_EXPR, arg0,
11250 TREE_OPERAND (arg1, 1), 0);
11252 return fold_build2 (RDIV_EXPR, type, tem,
11253 TREE_OPERAND (arg1, 0));
11256 if (flag_unsafe_math_optimizations)
11258 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11259 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11261 /* Optimize sin(x)/cos(x) as tan(x). */
11262 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11263 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11264 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11265 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11266 CALL_EXPR_ARG (arg1, 0), 0))
11268 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11270 if (tanfn != NULL_TREE)
11271 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11274 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11275 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11276 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11277 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11278 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11279 CALL_EXPR_ARG (arg1, 0), 0))
11281 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11283 if (tanfn != NULL_TREE)
11285 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11286 return fold_build2 (RDIV_EXPR, type,
11287 build_real (type, dconst1), tmp);
11291 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11292 NaNs or Infinities. */
11293 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11294 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11295 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11297 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11298 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11300 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11301 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11302 && operand_equal_p (arg00, arg01, 0))
11304 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11306 if (cosfn != NULL_TREE)
11307 return build_call_expr (cosfn, 1, arg00);
11311 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11312 NaNs or Infinities. */
11313 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11314 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11315 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11317 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11318 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11320 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11321 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11322 && operand_equal_p (arg00, arg01, 0))
11324 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11326 if (cosfn != NULL_TREE)
11328 tree tmp = build_call_expr (cosfn, 1, arg00);
11329 return fold_build2 (RDIV_EXPR, type,
11330 build_real (type, dconst1),
11336 /* Optimize pow(x,c)/x as pow(x,c-1). */
11337 if (fcode0 == BUILT_IN_POW
11338 || fcode0 == BUILT_IN_POWF
11339 || fcode0 == BUILT_IN_POWL)
11341 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11342 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11343 if (TREE_CODE (arg01) == REAL_CST
11344 && !TREE_OVERFLOW (arg01)
11345 && operand_equal_p (arg1, arg00, 0))
11347 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11351 c = TREE_REAL_CST (arg01);
11352 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11353 arg = build_real (type, c);
11354 return build_call_expr (powfn, 2, arg1, arg);
11358 /* Optimize a/root(b/c) into a*root(c/b). */
11359 if (BUILTIN_ROOT_P (fcode1))
11361 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11363 if (TREE_CODE (rootarg) == RDIV_EXPR)
11365 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11366 tree b = TREE_OPERAND (rootarg, 0);
11367 tree c = TREE_OPERAND (rootarg, 1);
11369 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11371 tmp = build_call_expr (rootfn, 1, tmp);
11372 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11376 /* Optimize x/expN(y) into x*expN(-y). */
11377 if (BUILTIN_EXPONENT_P (fcode1))
11379 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11380 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11381 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11382 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11385 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11386 if (fcode1 == BUILT_IN_POW
11387 || fcode1 == BUILT_IN_POWF
11388 || fcode1 == BUILT_IN_POWL)
11390 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11391 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11392 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11393 tree neg11 = fold_convert (type, negate_expr (arg11));
11394 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11395 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11400 case TRUNC_DIV_EXPR:
11401 case FLOOR_DIV_EXPR:
11402 /* Simplify A / (B << N) where A and B are positive and B is
11403 a power of 2, to A >> (N + log2(B)). */
11404 strict_overflow_p = false;
11405 if (TREE_CODE (arg1) == LSHIFT_EXPR
11406 && (TYPE_UNSIGNED (type)
11407 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11409 tree sval = TREE_OPERAND (arg1, 0);
11410 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11412 tree sh_cnt = TREE_OPERAND (arg1, 1);
11413 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11415 if (strict_overflow_p)
11416 fold_overflow_warning (("assuming signed overflow does not "
11417 "occur when simplifying A / (B << N)"),
11418 WARN_STRICT_OVERFLOW_MISC);
11420 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11421 sh_cnt, build_int_cst (NULL_TREE, pow2));
11422 return fold_build2 (RSHIFT_EXPR, type,
11423 fold_convert (type, arg0), sh_cnt);
11427 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11428 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11429 if (INTEGRAL_TYPE_P (type)
11430 && TYPE_UNSIGNED (type)
11431 && code == FLOOR_DIV_EXPR)
11432 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11436 case ROUND_DIV_EXPR:
11437 case CEIL_DIV_EXPR:
11438 case EXACT_DIV_EXPR:
11439 if (integer_onep (arg1))
11440 return non_lvalue (fold_convert (type, arg0));
11441 if (integer_zerop (arg1))
11443 /* X / -1 is -X. */
11444 if (!TYPE_UNSIGNED (type)
11445 && TREE_CODE (arg1) == INTEGER_CST
11446 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11447 && TREE_INT_CST_HIGH (arg1) == -1)
11448 return fold_convert (type, negate_expr (arg0));
11450 /* Convert -A / -B to A / B when the type is signed and overflow is
11452 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11453 && TREE_CODE (arg0) == NEGATE_EXPR
11454 && negate_expr_p (arg1))
11456 if (INTEGRAL_TYPE_P (type))
11457 fold_overflow_warning (("assuming signed overflow does not occur "
11458 "when distributing negation across "
11460 WARN_STRICT_OVERFLOW_MISC);
11461 return fold_build2 (code, type,
11462 fold_convert (type, TREE_OPERAND (arg0, 0)),
11463 negate_expr (arg1));
11465 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11466 && TREE_CODE (arg1) == NEGATE_EXPR
11467 && negate_expr_p (arg0))
11469 if (INTEGRAL_TYPE_P (type))
11470 fold_overflow_warning (("assuming signed overflow does not occur "
11471 "when distributing negation across "
11473 WARN_STRICT_OVERFLOW_MISC);
11474 return fold_build2 (code, type, negate_expr (arg0),
11475 TREE_OPERAND (arg1, 0));
11478 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11479 operation, EXACT_DIV_EXPR.
11481 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11482 At one time others generated faster code, it's not clear if they do
11483 after the last round to changes to the DIV code in expmed.c. */
11484 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11485 && multiple_of_p (type, arg0, arg1))
11486 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11488 strict_overflow_p = false;
11489 if (TREE_CODE (arg1) == INTEGER_CST
11490 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11491 &strict_overflow_p)))
11493 if (strict_overflow_p)
11494 fold_overflow_warning (("assuming signed overflow does not occur "
11495 "when simplifying division"),
11496 WARN_STRICT_OVERFLOW_MISC);
11497 return fold_convert (type, tem);
11502 case CEIL_MOD_EXPR:
11503 case FLOOR_MOD_EXPR:
11504 case ROUND_MOD_EXPR:
11505 case TRUNC_MOD_EXPR:
11506 /* X % 1 is always zero, but be sure to preserve any side
11508 if (integer_onep (arg1))
11509 return omit_one_operand (type, integer_zero_node, arg0);
11511 /* X % 0, return X % 0 unchanged so that we can get the
11512 proper warnings and errors. */
11513 if (integer_zerop (arg1))
11516 /* 0 % X is always zero, but be sure to preserve any side
11517 effects in X. Place this after checking for X == 0. */
11518 if (integer_zerop (arg0))
11519 return omit_one_operand (type, integer_zero_node, arg1);
11521 /* X % -1 is zero. */
11522 if (!TYPE_UNSIGNED (type)
11523 && TREE_CODE (arg1) == INTEGER_CST
11524 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11525 && TREE_INT_CST_HIGH (arg1) == -1)
11526 return omit_one_operand (type, integer_zero_node, arg0);
11528 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11529 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11530 strict_overflow_p = false;
11531 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11532 && (TYPE_UNSIGNED (type)
11533 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11536 /* Also optimize A % (C << N) where C is a power of 2,
11537 to A & ((C << N) - 1). */
11538 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11539 c = TREE_OPERAND (arg1, 0);
11541 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11543 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11544 build_int_cst (TREE_TYPE (arg1), 1));
11545 if (strict_overflow_p)
11546 fold_overflow_warning (("assuming signed overflow does not "
11547 "occur when simplifying "
11548 "X % (power of two)"),
11549 WARN_STRICT_OVERFLOW_MISC);
11550 return fold_build2 (BIT_AND_EXPR, type,
11551 fold_convert (type, arg0),
11552 fold_convert (type, mask));
11556 /* X % -C is the same as X % C. */
11557 if (code == TRUNC_MOD_EXPR
11558 && !TYPE_UNSIGNED (type)
11559 && TREE_CODE (arg1) == INTEGER_CST
11560 && !TREE_OVERFLOW (arg1)
11561 && TREE_INT_CST_HIGH (arg1) < 0
11562 && !TYPE_OVERFLOW_TRAPS (type)
11563 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11564 && !sign_bit_p (arg1, arg1))
11565 return fold_build2 (code, type, fold_convert (type, arg0),
11566 fold_convert (type, negate_expr (arg1)));
11568 /* X % -Y is the same as X % Y. */
11569 if (code == TRUNC_MOD_EXPR
11570 && !TYPE_UNSIGNED (type)
11571 && TREE_CODE (arg1) == NEGATE_EXPR
11572 && !TYPE_OVERFLOW_TRAPS (type))
11573 return fold_build2 (code, type, fold_convert (type, arg0),
11574 fold_convert (type, TREE_OPERAND (arg1, 0)));
11576 if (TREE_CODE (arg1) == INTEGER_CST
11577 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11578 &strict_overflow_p)))
11580 if (strict_overflow_p)
11581 fold_overflow_warning (("assuming signed overflow does not occur "
11582 "when simplifying modulos"),
11583 WARN_STRICT_OVERFLOW_MISC);
11584 return fold_convert (type, tem);
11591 if (integer_all_onesp (arg0))
11592 return omit_one_operand (type, arg0, arg1);
11596 /* Optimize -1 >> x for arithmetic right shifts. */
11597 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
11598 return omit_one_operand (type, arg0, arg1);
11599 /* ... fall through ... */
11603 if (integer_zerop (arg1))
11604 return non_lvalue (fold_convert (type, arg0));
11605 if (integer_zerop (arg0))
11606 return omit_one_operand (type, arg0, arg1);
11608 /* Since negative shift count is not well-defined,
11609 don't try to compute it in the compiler. */
11610 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11613 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11614 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11615 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11616 && host_integerp (TREE_OPERAND (arg0, 1), false)
11617 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11619 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11620 + TREE_INT_CST_LOW (arg1));
11622 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11623 being well defined. */
11624 if (low >= TYPE_PRECISION (type))
11626 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11627 low = low % TYPE_PRECISION (type);
11628 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11629 return build_int_cst (type, 0);
11631 low = TYPE_PRECISION (type) - 1;
11634 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11635 build_int_cst (type, low));
11638 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11639 into x & ((unsigned)-1 >> c) for unsigned types. */
11640 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11641 || (TYPE_UNSIGNED (type)
11642 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11643 && host_integerp (arg1, false)
11644 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11645 && host_integerp (TREE_OPERAND (arg0, 1), false)
11646 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11648 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11649 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11655 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11657 lshift = build_int_cst (type, -1);
11658 lshift = int_const_binop (code, lshift, arg1, 0);
11660 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11664 /* Rewrite an LROTATE_EXPR by a constant into an
11665 RROTATE_EXPR by a new constant. */
11666 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11668 tree tem = build_int_cst (TREE_TYPE (arg1),
11669 TYPE_PRECISION (type));
11670 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11671 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11674 /* If we have a rotate of a bit operation with the rotate count and
11675 the second operand of the bit operation both constant,
11676 permute the two operations. */
11677 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11678 && (TREE_CODE (arg0) == BIT_AND_EXPR
11679 || TREE_CODE (arg0) == BIT_IOR_EXPR
11680 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11681 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11682 return fold_build2 (TREE_CODE (arg0), type,
11683 fold_build2 (code, type,
11684 TREE_OPERAND (arg0, 0), arg1),
11685 fold_build2 (code, type,
11686 TREE_OPERAND (arg0, 1), arg1));
11688 /* Two consecutive rotates adding up to the precision of the
11689 type can be ignored. */
11690 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11691 && TREE_CODE (arg0) == RROTATE_EXPR
11692 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11693 && TREE_INT_CST_HIGH (arg1) == 0
11694 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11695 && ((TREE_INT_CST_LOW (arg1)
11696 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11697 == (unsigned int) TYPE_PRECISION (type)))
11698 return TREE_OPERAND (arg0, 0);
11700 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11701 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11702 if the latter can be further optimized. */
11703 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11704 && TREE_CODE (arg0) == BIT_AND_EXPR
11705 && TREE_CODE (arg1) == INTEGER_CST
11706 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11708 tree mask = fold_build2 (code, type,
11709 fold_convert (type, TREE_OPERAND (arg0, 1)),
11711 tree shift = fold_build2 (code, type,
11712 fold_convert (type, TREE_OPERAND (arg0, 0)),
11714 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11722 if (operand_equal_p (arg0, arg1, 0))
11723 return omit_one_operand (type, arg0, arg1);
11724 if (INTEGRAL_TYPE_P (type)
11725 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11726 return omit_one_operand (type, arg1, arg0);
11727 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11733 if (operand_equal_p (arg0, arg1, 0))
11734 return omit_one_operand (type, arg0, arg1);
11735 if (INTEGRAL_TYPE_P (type)
11736 && TYPE_MAX_VALUE (type)
11737 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11738 return omit_one_operand (type, arg1, arg0);
11739 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11744 case TRUTH_ANDIF_EXPR:
11745 /* Note that the operands of this must be ints
11746 and their values must be 0 or 1.
11747 ("true" is a fixed value perhaps depending on the language.) */
11748 /* If first arg is constant zero, return it. */
11749 if (integer_zerop (arg0))
11750 return fold_convert (type, arg0);
11751 case TRUTH_AND_EXPR:
11752 /* If either arg is constant true, drop it. */
11753 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11754 return non_lvalue (fold_convert (type, arg1));
11755 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11756 /* Preserve sequence points. */
11757 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11758 return non_lvalue (fold_convert (type, arg0));
11759 /* If second arg is constant zero, result is zero, but first arg
11760 must be evaluated. */
11761 if (integer_zerop (arg1))
11762 return omit_one_operand (type, arg1, arg0);
11763 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
11764 case will be handled here. */
11765 if (integer_zerop (arg0))
11766 return omit_one_operand (type, arg0, arg1);
11768 /* !X && X is always false. */
11769 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11770 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11771 return omit_one_operand (type, integer_zero_node, arg1);
11772 /* X && !X is always false. */
11773 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11774 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11775 return omit_one_operand (type, integer_zero_node, arg0);
11777 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
11778 means A >= Y && A != MAX, but in this case we know that
11781 if (!TREE_SIDE_EFFECTS (arg0)
11782 && !TREE_SIDE_EFFECTS (arg1))
11784 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
11785 if (tem && !operand_equal_p (tem, arg0, 0))
11786 return fold_build2 (code, type, tem, arg1);
11788 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
11789 if (tem && !operand_equal_p (tem, arg1, 0))
11790 return fold_build2 (code, type, arg0, tem);
11794 /* We only do these simplifications if we are optimizing. */
11798 /* Check for things like (A || B) && (A || C). We can convert this
11799 to A || (B && C). Note that either operator can be any of the four
11800 truth and/or operations and the transformation will still be
11801 valid. Also note that we only care about order for the
11802 ANDIF and ORIF operators. If B contains side effects, this
11803 might change the truth-value of A. */
11804 if (TREE_CODE (arg0) == TREE_CODE (arg1)
11805 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
11806 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
11807 || TREE_CODE (arg0) == TRUTH_AND_EXPR
11808 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
11809 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
11811 tree a00 = TREE_OPERAND (arg0, 0);
11812 tree a01 = TREE_OPERAND (arg0, 1);
11813 tree a10 = TREE_OPERAND (arg1, 0);
11814 tree a11 = TREE_OPERAND (arg1, 1);
11815 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
11816 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
11817 && (code == TRUTH_AND_EXPR
11818 || code == TRUTH_OR_EXPR));
11820 if (operand_equal_p (a00, a10, 0))
11821 return fold_build2 (TREE_CODE (arg0), type, a00,
11822 fold_build2 (code, type, a01, a11));
11823 else if (commutative && operand_equal_p (a00, a11, 0))
11824 return fold_build2 (TREE_CODE (arg0), type, a00,
11825 fold_build2 (code, type, a01, a10));
11826 else if (commutative && operand_equal_p (a01, a10, 0))
11827 return fold_build2 (TREE_CODE (arg0), type, a01,
11828 fold_build2 (code, type, a00, a11));
11830 /* This case if tricky because we must either have commutative
11831 operators or else A10 must not have side-effects. */
11833 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
11834 && operand_equal_p (a01, a11, 0))
11835 return fold_build2 (TREE_CODE (arg0), type,
11836 fold_build2 (code, type, a00, a10),
11840 /* See if we can build a range comparison. */
11841 if (0 != (tem = fold_range_test (code, type, op0, op1)))
11844 /* Check for the possibility of merging component references. If our
11845 lhs is another similar operation, try to merge its rhs with our
11846 rhs. Then try to merge our lhs and rhs. */
11847 if (TREE_CODE (arg0) == code
11848 && 0 != (tem = fold_truthop (code, type,
11849 TREE_OPERAND (arg0, 1), arg1)))
11850 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11852 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
11857 case TRUTH_ORIF_EXPR:
11858 /* Note that the operands of this must be ints
11859 and their values must be 0 or true.
11860 ("true" is a fixed value perhaps depending on the language.) */
11861 /* If first arg is constant true, return it. */
11862 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11863 return fold_convert (type, arg0);
11864 case TRUTH_OR_EXPR:
11865 /* If either arg is constant zero, drop it. */
11866 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
11867 return non_lvalue (fold_convert (type, arg1));
11868 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
11869 /* Preserve sequence points. */
11870 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11871 return non_lvalue (fold_convert (type, arg0));
11872 /* If second arg is constant true, result is true, but we must
11873 evaluate first arg. */
11874 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
11875 return omit_one_operand (type, arg1, arg0);
11876 /* Likewise for first arg, but note this only occurs here for
11878 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11879 return omit_one_operand (type, arg0, arg1);
11881 /* !X || X is always true. */
11882 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11883 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11884 return omit_one_operand (type, integer_one_node, arg1);
11885 /* X || !X is always true. */
11886 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11887 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11888 return omit_one_operand (type, integer_one_node, arg0);
11892 case TRUTH_XOR_EXPR:
11893 /* If the second arg is constant zero, drop it. */
11894 if (integer_zerop (arg1))
11895 return non_lvalue (fold_convert (type, arg0));
11896 /* If the second arg is constant true, this is a logical inversion. */
11897 if (integer_onep (arg1))
11899 /* Only call invert_truthvalue if operand is a truth value. */
11900 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
11901 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
11903 tem = invert_truthvalue (arg0);
11904 return non_lvalue (fold_convert (type, tem));
11906 /* Identical arguments cancel to zero. */
11907 if (operand_equal_p (arg0, arg1, 0))
11908 return omit_one_operand (type, integer_zero_node, arg0);
11910 /* !X ^ X is always true. */
11911 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11912 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11913 return omit_one_operand (type, integer_one_node, arg1);
11915 /* X ^ !X is always true. */
11916 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
11917 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11918 return omit_one_operand (type, integer_one_node, arg0);
11924 tem = fold_comparison (code, type, op0, op1);
11925 if (tem != NULL_TREE)
11928 /* bool_var != 0 becomes bool_var. */
11929 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11930 && code == NE_EXPR)
11931 return non_lvalue (fold_convert (type, arg0));
11933 /* bool_var == 1 becomes bool_var. */
11934 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11935 && code == EQ_EXPR)
11936 return non_lvalue (fold_convert (type, arg0));
11938 /* bool_var != 1 becomes !bool_var. */
11939 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
11940 && code == NE_EXPR)
11941 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11943 /* bool_var == 0 becomes !bool_var. */
11944 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
11945 && code == EQ_EXPR)
11946 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
11948 /* If this is an equality comparison of the address of two non-weak,
11949 unaliased symbols neither of which are extern (since we do not
11950 have access to attributes for externs), then we know the result. */
11951 if (TREE_CODE (arg0) == ADDR_EXPR
11952 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
11953 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
11954 && ! lookup_attribute ("alias",
11955 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
11956 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
11957 && TREE_CODE (arg1) == ADDR_EXPR
11958 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
11959 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
11960 && ! lookup_attribute ("alias",
11961 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
11962 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
11964 /* We know that we're looking at the address of two
11965 non-weak, unaliased, static _DECL nodes.
11967 It is both wasteful and incorrect to call operand_equal_p
11968 to compare the two ADDR_EXPR nodes. It is wasteful in that
11969 all we need to do is test pointer equality for the arguments
11970 to the two ADDR_EXPR nodes. It is incorrect to use
11971 operand_equal_p as that function is NOT equivalent to a
11972 C equality test. It can in fact return false for two
11973 objects which would test as equal using the C equality
11975 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
11976 return constant_boolean_node (equal
11977 ? code == EQ_EXPR : code != EQ_EXPR,
11981 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
11982 a MINUS_EXPR of a constant, we can convert it into a comparison with
11983 a revised constant as long as no overflow occurs. */
11984 if (TREE_CODE (arg1) == INTEGER_CST
11985 && (TREE_CODE (arg0) == PLUS_EXPR
11986 || TREE_CODE (arg0) == MINUS_EXPR)
11987 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11988 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
11989 ? MINUS_EXPR : PLUS_EXPR,
11990 fold_convert (TREE_TYPE (arg0), arg1),
11991 TREE_OPERAND (arg0, 1), 0))
11992 && !TREE_OVERFLOW (tem))
11993 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
11995 /* Similarly for a NEGATE_EXPR. */
11996 if (TREE_CODE (arg0) == NEGATE_EXPR
11997 && TREE_CODE (arg1) == INTEGER_CST
11998 && 0 != (tem = negate_expr (arg1))
11999 && TREE_CODE (tem) == INTEGER_CST
12000 && !TREE_OVERFLOW (tem))
12001 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12003 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12004 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12005 && TREE_CODE (arg1) == INTEGER_CST
12006 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12007 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12008 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12009 fold_convert (TREE_TYPE (arg0), arg1),
12010 TREE_OPERAND (arg0, 1)));
12012 /* Transform comparisons of the form X +- C CMP X. */
12013 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12014 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12015 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12016 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12017 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12019 tree cst = TREE_OPERAND (arg0, 1);
12021 if (code == EQ_EXPR
12022 && !integer_zerop (cst))
12023 return omit_two_operands (type, boolean_false_node,
12024 TREE_OPERAND (arg0, 0), arg1);
12026 return omit_two_operands (type, boolean_true_node,
12027 TREE_OPERAND (arg0, 0), arg1);
12030 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12031 for !=. Don't do this for ordered comparisons due to overflow. */
12032 if (TREE_CODE (arg0) == MINUS_EXPR
12033 && integer_zerop (arg1))
12034 return fold_build2 (code, type,
12035 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12037 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12038 if (TREE_CODE (arg0) == ABS_EXPR
12039 && (integer_zerop (arg1) || real_zerop (arg1)))
12040 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12042 /* If this is an EQ or NE comparison with zero and ARG0 is
12043 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12044 two operations, but the latter can be done in one less insn
12045 on machines that have only two-operand insns or on which a
12046 constant cannot be the first operand. */
12047 if (TREE_CODE (arg0) == BIT_AND_EXPR
12048 && integer_zerop (arg1))
12050 tree arg00 = TREE_OPERAND (arg0, 0);
12051 tree arg01 = TREE_OPERAND (arg0, 1);
12052 if (TREE_CODE (arg00) == LSHIFT_EXPR
12053 && integer_onep (TREE_OPERAND (arg00, 0)))
12055 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12056 arg01, TREE_OPERAND (arg00, 1));
12057 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12058 build_int_cst (TREE_TYPE (arg0), 1));
12059 return fold_build2 (code, type,
12060 fold_convert (TREE_TYPE (arg1), tem), arg1);
12062 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12063 && integer_onep (TREE_OPERAND (arg01, 0)))
12065 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12066 arg00, TREE_OPERAND (arg01, 1));
12067 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12068 build_int_cst (TREE_TYPE (arg0), 1));
12069 return fold_build2 (code, type,
12070 fold_convert (TREE_TYPE (arg1), tem), arg1);
12074 /* If this is an NE or EQ comparison of zero against the result of a
12075 signed MOD operation whose second operand is a power of 2, make
12076 the MOD operation unsigned since it is simpler and equivalent. */
12077 if (integer_zerop (arg1)
12078 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12079 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12080 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12081 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12082 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12083 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12085 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12086 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12087 fold_convert (newtype,
12088 TREE_OPERAND (arg0, 0)),
12089 fold_convert (newtype,
12090 TREE_OPERAND (arg0, 1)));
12092 return fold_build2 (code, type, newmod,
12093 fold_convert (newtype, arg1));
12096 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12097 C1 is a valid shift constant, and C2 is a power of two, i.e.
12099 if (TREE_CODE (arg0) == BIT_AND_EXPR
12100 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12101 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12103 && integer_pow2p (TREE_OPERAND (arg0, 1))
12104 && integer_zerop (arg1))
12106 tree itype = TREE_TYPE (arg0);
12107 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12108 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12110 /* Check for a valid shift count. */
12111 if (TREE_INT_CST_HIGH (arg001) == 0
12112 && TREE_INT_CST_LOW (arg001) < prec)
12114 tree arg01 = TREE_OPERAND (arg0, 1);
12115 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12116 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12117 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12118 can be rewritten as (X & (C2 << C1)) != 0. */
12119 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12121 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12122 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12123 return fold_build2 (code, type, tem, arg1);
12125 /* Otherwise, for signed (arithmetic) shifts,
12126 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12127 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12128 else if (!TYPE_UNSIGNED (itype))
12129 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12130 arg000, build_int_cst (itype, 0));
12131 /* Otherwise, of unsigned (logical) shifts,
12132 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12133 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12135 return omit_one_operand (type,
12136 code == EQ_EXPR ? integer_one_node
12137 : integer_zero_node,
12142 /* If this is an NE comparison of zero with an AND of one, remove the
12143 comparison since the AND will give the correct value. */
12144 if (code == NE_EXPR
12145 && integer_zerop (arg1)
12146 && TREE_CODE (arg0) == BIT_AND_EXPR
12147 && integer_onep (TREE_OPERAND (arg0, 1)))
12148 return fold_convert (type, arg0);
12150 /* If we have (A & C) == C where C is a power of 2, convert this into
12151 (A & C) != 0. Similarly for NE_EXPR. */
12152 if (TREE_CODE (arg0) == BIT_AND_EXPR
12153 && integer_pow2p (TREE_OPERAND (arg0, 1))
12154 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12155 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12156 arg0, fold_convert (TREE_TYPE (arg0),
12157 integer_zero_node));
12159 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12160 bit, then fold the expression into A < 0 or A >= 0. */
12161 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12165 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12166 Similarly for NE_EXPR. */
12167 if (TREE_CODE (arg0) == BIT_AND_EXPR
12168 && TREE_CODE (arg1) == INTEGER_CST
12169 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12171 tree notc = fold_build1 (BIT_NOT_EXPR,
12172 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12173 TREE_OPERAND (arg0, 1));
12174 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12176 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12177 if (integer_nonzerop (dandnotc))
12178 return omit_one_operand (type, rslt, arg0);
12181 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12182 Similarly for NE_EXPR. */
12183 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12184 && TREE_CODE (arg1) == INTEGER_CST
12185 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12187 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12188 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12189 TREE_OPERAND (arg0, 1), notd);
12190 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12191 if (integer_nonzerop (candnotd))
12192 return omit_one_operand (type, rslt, arg0);
12195 /* If this is a comparison of a field, we may be able to simplify it. */
12196 if ((TREE_CODE (arg0) == COMPONENT_REF
12197 || TREE_CODE (arg0) == BIT_FIELD_REF)
12198 /* Handle the constant case even without -O
12199 to make sure the warnings are given. */
12200 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12202 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12207 /* Optimize comparisons of strlen vs zero to a compare of the
12208 first character of the string vs zero. To wit,
12209 strlen(ptr) == 0 => *ptr == 0
12210 strlen(ptr) != 0 => *ptr != 0
12211 Other cases should reduce to one of these two (or a constant)
12212 due to the return value of strlen being unsigned. */
12213 if (TREE_CODE (arg0) == CALL_EXPR
12214 && integer_zerop (arg1))
12216 tree fndecl = get_callee_fndecl (arg0);
12219 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12220 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12221 && call_expr_nargs (arg0) == 1
12222 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12224 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12225 return fold_build2 (code, type, iref,
12226 build_int_cst (TREE_TYPE (iref), 0));
12230 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12231 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12232 if (TREE_CODE (arg0) == RSHIFT_EXPR
12233 && integer_zerop (arg1)
12234 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12236 tree arg00 = TREE_OPERAND (arg0, 0);
12237 tree arg01 = TREE_OPERAND (arg0, 1);
12238 tree itype = TREE_TYPE (arg00);
12239 if (TREE_INT_CST_HIGH (arg01) == 0
12240 && TREE_INT_CST_LOW (arg01)
12241 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12243 if (TYPE_UNSIGNED (itype))
12245 itype = signed_type_for (itype);
12246 arg00 = fold_convert (itype, arg00);
12248 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12249 type, arg00, build_int_cst (itype, 0));
12253 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12254 if (integer_zerop (arg1)
12255 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12256 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12257 TREE_OPERAND (arg0, 1));
12259 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12260 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12261 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12262 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12263 build_int_cst (TREE_TYPE (arg1), 0));
12264 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12265 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12266 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12267 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12268 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12269 build_int_cst (TREE_TYPE (arg1), 0));
12271 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12272 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12273 && TREE_CODE (arg1) == INTEGER_CST
12274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12275 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12276 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12277 TREE_OPERAND (arg0, 1), arg1));
12279 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12280 (X & C) == 0 when C is a single bit. */
12281 if (TREE_CODE (arg0) == BIT_AND_EXPR
12282 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12283 && integer_zerop (arg1)
12284 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12286 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12287 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12288 TREE_OPERAND (arg0, 1));
12289 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12293 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12294 constant C is a power of two, i.e. a single bit. */
12295 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12296 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12297 && integer_zerop (arg1)
12298 && integer_pow2p (TREE_OPERAND (arg0, 1))
12299 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12300 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12302 tree arg00 = TREE_OPERAND (arg0, 0);
12303 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12304 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12307 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12308 when is C is a power of two, i.e. a single bit. */
12309 if (TREE_CODE (arg0) == BIT_AND_EXPR
12310 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12311 && integer_zerop (arg1)
12312 && integer_pow2p (TREE_OPERAND (arg0, 1))
12313 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12314 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12316 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12317 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12318 arg000, TREE_OPERAND (arg0, 1));
12319 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12320 tem, build_int_cst (TREE_TYPE (tem), 0));
12323 if (integer_zerop (arg1)
12324 && tree_expr_nonzero_p (arg0))
12326 tree res = constant_boolean_node (code==NE_EXPR, type);
12327 return omit_one_operand (type, res, arg0);
12330 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12331 if (TREE_CODE (arg0) == NEGATE_EXPR
12332 && TREE_CODE (arg1) == NEGATE_EXPR)
12333 return fold_build2 (code, type,
12334 TREE_OPERAND (arg0, 0),
12335 TREE_OPERAND (arg1, 0));
12337 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12338 if (TREE_CODE (arg0) == BIT_AND_EXPR
12339 && TREE_CODE (arg1) == BIT_AND_EXPR)
12341 tree arg00 = TREE_OPERAND (arg0, 0);
12342 tree arg01 = TREE_OPERAND (arg0, 1);
12343 tree arg10 = TREE_OPERAND (arg1, 0);
12344 tree arg11 = TREE_OPERAND (arg1, 1);
12345 tree itype = TREE_TYPE (arg0);
12347 if (operand_equal_p (arg01, arg11, 0))
12348 return fold_build2 (code, type,
12349 fold_build2 (BIT_AND_EXPR, itype,
12350 fold_build2 (BIT_XOR_EXPR, itype,
12353 build_int_cst (itype, 0));
12355 if (operand_equal_p (arg01, arg10, 0))
12356 return fold_build2 (code, type,
12357 fold_build2 (BIT_AND_EXPR, itype,
12358 fold_build2 (BIT_XOR_EXPR, itype,
12361 build_int_cst (itype, 0));
12363 if (operand_equal_p (arg00, arg11, 0))
12364 return fold_build2 (code, type,
12365 fold_build2 (BIT_AND_EXPR, itype,
12366 fold_build2 (BIT_XOR_EXPR, itype,
12369 build_int_cst (itype, 0));
12371 if (operand_equal_p (arg00, arg10, 0))
12372 return fold_build2 (code, type,
12373 fold_build2 (BIT_AND_EXPR, itype,
12374 fold_build2 (BIT_XOR_EXPR, itype,
12377 build_int_cst (itype, 0));
12380 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12381 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12383 tree arg00 = TREE_OPERAND (arg0, 0);
12384 tree arg01 = TREE_OPERAND (arg0, 1);
12385 tree arg10 = TREE_OPERAND (arg1, 0);
12386 tree arg11 = TREE_OPERAND (arg1, 1);
12387 tree itype = TREE_TYPE (arg0);
12389 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12390 operand_equal_p guarantees no side-effects so we don't need
12391 to use omit_one_operand on Z. */
12392 if (operand_equal_p (arg01, arg11, 0))
12393 return fold_build2 (code, type, arg00, arg10);
12394 if (operand_equal_p (arg01, arg10, 0))
12395 return fold_build2 (code, type, arg00, arg11);
12396 if (operand_equal_p (arg00, arg11, 0))
12397 return fold_build2 (code, type, arg01, arg10);
12398 if (operand_equal_p (arg00, arg10, 0))
12399 return fold_build2 (code, type, arg01, arg11);
12401 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12402 if (TREE_CODE (arg01) == INTEGER_CST
12403 && TREE_CODE (arg11) == INTEGER_CST)
12404 return fold_build2 (code, type,
12405 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12406 fold_build2 (BIT_XOR_EXPR, itype,
12411 /* Attempt to simplify equality/inequality comparisons of complex
12412 values. Only lower the comparison if the result is known or
12413 can be simplified to a single scalar comparison. */
12414 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12415 || TREE_CODE (arg0) == COMPLEX_CST)
12416 && (TREE_CODE (arg1) == COMPLEX_EXPR
12417 || TREE_CODE (arg1) == COMPLEX_CST))
12419 tree real0, imag0, real1, imag1;
12422 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12424 real0 = TREE_OPERAND (arg0, 0);
12425 imag0 = TREE_OPERAND (arg0, 1);
12429 real0 = TREE_REALPART (arg0);
12430 imag0 = TREE_IMAGPART (arg0);
12433 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12435 real1 = TREE_OPERAND (arg1, 0);
12436 imag1 = TREE_OPERAND (arg1, 1);
12440 real1 = TREE_REALPART (arg1);
12441 imag1 = TREE_IMAGPART (arg1);
12444 rcond = fold_binary (code, type, real0, real1);
12445 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12447 if (integer_zerop (rcond))
12449 if (code == EQ_EXPR)
12450 return omit_two_operands (type, boolean_false_node,
12452 return fold_build2 (NE_EXPR, type, imag0, imag1);
12456 if (code == NE_EXPR)
12457 return omit_two_operands (type, boolean_true_node,
12459 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12463 icond = fold_binary (code, type, imag0, imag1);
12464 if (icond && TREE_CODE (icond) == INTEGER_CST)
12466 if (integer_zerop (icond))
12468 if (code == EQ_EXPR)
12469 return omit_two_operands (type, boolean_false_node,
12471 return fold_build2 (NE_EXPR, type, real0, real1);
12475 if (code == NE_EXPR)
12476 return omit_two_operands (type, boolean_true_node,
12478 return fold_build2 (EQ_EXPR, type, real0, real1);
12489 tem = fold_comparison (code, type, op0, op1);
12490 if (tem != NULL_TREE)
12493 /* Transform comparisons of the form X +- C CMP X. */
12494 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12495 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12496 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12497 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12498 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12499 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12501 tree arg01 = TREE_OPERAND (arg0, 1);
12502 enum tree_code code0 = TREE_CODE (arg0);
12505 if (TREE_CODE (arg01) == REAL_CST)
12506 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12508 is_positive = tree_int_cst_sgn (arg01);
12510 /* (X - c) > X becomes false. */
12511 if (code == GT_EXPR
12512 && ((code0 == MINUS_EXPR && is_positive >= 0)
12513 || (code0 == PLUS_EXPR && is_positive <= 0)))
12515 if (TREE_CODE (arg01) == INTEGER_CST
12516 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12517 fold_overflow_warning (("assuming signed overflow does not "
12518 "occur when assuming that (X - c) > X "
12519 "is always false"),
12520 WARN_STRICT_OVERFLOW_ALL);
12521 return constant_boolean_node (0, type);
12524 /* Likewise (X + c) < X becomes false. */
12525 if (code == LT_EXPR
12526 && ((code0 == PLUS_EXPR && is_positive >= 0)
12527 || (code0 == MINUS_EXPR && is_positive <= 0)))
12529 if (TREE_CODE (arg01) == INTEGER_CST
12530 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12531 fold_overflow_warning (("assuming signed overflow does not "
12532 "occur when assuming that "
12533 "(X + c) < X is always false"),
12534 WARN_STRICT_OVERFLOW_ALL);
12535 return constant_boolean_node (0, type);
12538 /* Convert (X - c) <= X to true. */
12539 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12541 && ((code0 == MINUS_EXPR && is_positive >= 0)
12542 || (code0 == PLUS_EXPR && is_positive <= 0)))
12544 if (TREE_CODE (arg01) == INTEGER_CST
12545 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12546 fold_overflow_warning (("assuming signed overflow does not "
12547 "occur when assuming that "
12548 "(X - c) <= X is always true"),
12549 WARN_STRICT_OVERFLOW_ALL);
12550 return constant_boolean_node (1, type);
12553 /* Convert (X + c) >= X to true. */
12554 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12556 && ((code0 == PLUS_EXPR && is_positive >= 0)
12557 || (code0 == MINUS_EXPR && is_positive <= 0)))
12559 if (TREE_CODE (arg01) == INTEGER_CST
12560 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12561 fold_overflow_warning (("assuming signed overflow does not "
12562 "occur when assuming that "
12563 "(X + c) >= X is always true"),
12564 WARN_STRICT_OVERFLOW_ALL);
12565 return constant_boolean_node (1, type);
12568 if (TREE_CODE (arg01) == INTEGER_CST)
12570 /* Convert X + c > X and X - c < X to true for integers. */
12571 if (code == GT_EXPR
12572 && ((code0 == PLUS_EXPR && is_positive > 0)
12573 || (code0 == MINUS_EXPR && is_positive < 0)))
12575 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12576 fold_overflow_warning (("assuming signed overflow does "
12577 "not occur when assuming that "
12578 "(X + c) > X is always true"),
12579 WARN_STRICT_OVERFLOW_ALL);
12580 return constant_boolean_node (1, type);
12583 if (code == LT_EXPR
12584 && ((code0 == MINUS_EXPR && is_positive > 0)
12585 || (code0 == PLUS_EXPR && is_positive < 0)))
12587 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12588 fold_overflow_warning (("assuming signed overflow does "
12589 "not occur when assuming that "
12590 "(X - c) < X is always true"),
12591 WARN_STRICT_OVERFLOW_ALL);
12592 return constant_boolean_node (1, type);
12595 /* Convert X + c <= X and X - c >= X to false for integers. */
12596 if (code == LE_EXPR
12597 && ((code0 == PLUS_EXPR && is_positive > 0)
12598 || (code0 == MINUS_EXPR && is_positive < 0)))
12600 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12601 fold_overflow_warning (("assuming signed overflow does "
12602 "not occur when assuming that "
12603 "(X + c) <= X is always false"),
12604 WARN_STRICT_OVERFLOW_ALL);
12605 return constant_boolean_node (0, type);
12608 if (code == GE_EXPR
12609 && ((code0 == MINUS_EXPR && is_positive > 0)
12610 || (code0 == PLUS_EXPR && is_positive < 0)))
12612 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12613 fold_overflow_warning (("assuming signed overflow does "
12614 "not occur when assuming that "
12615 "(X - c) >= X is always false"),
12616 WARN_STRICT_OVERFLOW_ALL);
12617 return constant_boolean_node (0, type);
12622 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
12623 This transformation affects the cases which are handled in later
12624 optimizations involving comparisons with non-negative constants. */
12625 if (TREE_CODE (arg1) == INTEGER_CST
12626 && TREE_CODE (arg0) != INTEGER_CST
12627 && tree_int_cst_sgn (arg1) > 0)
12629 if (code == GE_EXPR)
12631 arg1 = const_binop (MINUS_EXPR, arg1,
12632 build_int_cst (TREE_TYPE (arg1), 1), 0);
12633 return fold_build2 (GT_EXPR, type, arg0,
12634 fold_convert (TREE_TYPE (arg0), arg1));
12636 if (code == LT_EXPR)
12638 arg1 = const_binop (MINUS_EXPR, arg1,
12639 build_int_cst (TREE_TYPE (arg1), 1), 0);
12640 return fold_build2 (LE_EXPR, type, arg0,
12641 fold_convert (TREE_TYPE (arg0), arg1));
12645 /* Comparisons with the highest or lowest possible integer of
12646 the specified precision will have known values. */
12648 tree arg1_type = TREE_TYPE (arg1);
12649 unsigned int width = TYPE_PRECISION (arg1_type);
12651 if (TREE_CODE (arg1) == INTEGER_CST
12652 && !TREE_OVERFLOW (arg1)
12653 && width <= 2 * HOST_BITS_PER_WIDE_INT
12654 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12656 HOST_WIDE_INT signed_max_hi;
12657 unsigned HOST_WIDE_INT signed_max_lo;
12658 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12660 if (width <= HOST_BITS_PER_WIDE_INT)
12662 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12667 if (TYPE_UNSIGNED (arg1_type))
12669 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12675 max_lo = signed_max_lo;
12676 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12682 width -= HOST_BITS_PER_WIDE_INT;
12683 signed_max_lo = -1;
12684 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12689 if (TYPE_UNSIGNED (arg1_type))
12691 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12696 max_hi = signed_max_hi;
12697 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12701 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12702 && TREE_INT_CST_LOW (arg1) == max_lo)
12706 return omit_one_operand (type, integer_zero_node, arg0);
12709 return fold_build2 (EQ_EXPR, type, op0, op1);
12712 return omit_one_operand (type, integer_one_node, arg0);
12715 return fold_build2 (NE_EXPR, type, op0, op1);
12717 /* The GE_EXPR and LT_EXPR cases above are not normally
12718 reached because of previous transformations. */
12723 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12725 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12729 arg1 = const_binop (PLUS_EXPR, arg1,
12730 build_int_cst (TREE_TYPE (arg1), 1), 0);
12731 return fold_build2 (EQ_EXPR, type,
12732 fold_convert (TREE_TYPE (arg1), arg0),
12735 arg1 = const_binop (PLUS_EXPR, arg1,
12736 build_int_cst (TREE_TYPE (arg1), 1), 0);
12737 return fold_build2 (NE_EXPR, type,
12738 fold_convert (TREE_TYPE (arg1), arg0),
12743 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12745 && TREE_INT_CST_LOW (arg1) == min_lo)
12749 return omit_one_operand (type, integer_zero_node, arg0);
12752 return fold_build2 (EQ_EXPR, type, op0, op1);
12755 return omit_one_operand (type, integer_one_node, arg0);
12758 return fold_build2 (NE_EXPR, type, op0, op1);
12763 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12765 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12769 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12770 return fold_build2 (NE_EXPR, type,
12771 fold_convert (TREE_TYPE (arg1), arg0),
12774 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12775 return fold_build2 (EQ_EXPR, type,
12776 fold_convert (TREE_TYPE (arg1), arg0),
12782 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
12783 && TREE_INT_CST_LOW (arg1) == signed_max_lo
12784 && TYPE_UNSIGNED (arg1_type)
12785 /* We will flip the signedness of the comparison operator
12786 associated with the mode of arg1, so the sign bit is
12787 specified by this mode. Check that arg1 is the signed
12788 max associated with this sign bit. */
12789 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
12790 /* signed_type does not work on pointer types. */
12791 && INTEGRAL_TYPE_P (arg1_type))
12793 /* The following case also applies to X < signed_max+1
12794 and X >= signed_max+1 because previous transformations. */
12795 if (code == LE_EXPR || code == GT_EXPR)
12798 st = signed_type_for (TREE_TYPE (arg1));
12799 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
12800 type, fold_convert (st, arg0),
12801 build_int_cst (st, 0));
12807 /* If we are comparing an ABS_EXPR with a constant, we can
12808 convert all the cases into explicit comparisons, but they may
12809 well not be faster than doing the ABS and one comparison.
12810 But ABS (X) <= C is a range comparison, which becomes a subtraction
12811 and a comparison, and is probably faster. */
12812 if (code == LE_EXPR
12813 && TREE_CODE (arg1) == INTEGER_CST
12814 && TREE_CODE (arg0) == ABS_EXPR
12815 && ! TREE_SIDE_EFFECTS (arg0)
12816 && (0 != (tem = negate_expr (arg1)))
12817 && TREE_CODE (tem) == INTEGER_CST
12818 && !TREE_OVERFLOW (tem))
12819 return fold_build2 (TRUTH_ANDIF_EXPR, type,
12820 build2 (GE_EXPR, type,
12821 TREE_OPERAND (arg0, 0), tem),
12822 build2 (LE_EXPR, type,
12823 TREE_OPERAND (arg0, 0), arg1));
12825 /* Convert ABS_EXPR<x> >= 0 to true. */
12826 strict_overflow_p = false;
12827 if (code == GE_EXPR
12828 && (integer_zerop (arg1)
12829 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12830 && real_zerop (arg1)))
12831 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12833 if (strict_overflow_p)
12834 fold_overflow_warning (("assuming signed overflow does not occur "
12835 "when simplifying comparison of "
12836 "absolute value and zero"),
12837 WARN_STRICT_OVERFLOW_CONDITIONAL);
12838 return omit_one_operand (type, integer_one_node, arg0);
12841 /* Convert ABS_EXPR<x> < 0 to false. */
12842 strict_overflow_p = false;
12843 if (code == LT_EXPR
12844 && (integer_zerop (arg1) || real_zerop (arg1))
12845 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
12847 if (strict_overflow_p)
12848 fold_overflow_warning (("assuming signed overflow does not occur "
12849 "when simplifying comparison of "
12850 "absolute value and zero"),
12851 WARN_STRICT_OVERFLOW_CONDITIONAL);
12852 return omit_one_operand (type, integer_zero_node, arg0);
12855 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
12856 and similarly for >= into !=. */
12857 if ((code == LT_EXPR || code == GE_EXPR)
12858 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12859 && TREE_CODE (arg1) == LSHIFT_EXPR
12860 && integer_onep (TREE_OPERAND (arg1, 0)))
12861 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12862 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12863 TREE_OPERAND (arg1, 1)),
12864 build_int_cst (TREE_TYPE (arg0), 0));
12866 if ((code == LT_EXPR || code == GE_EXPR)
12867 && TYPE_UNSIGNED (TREE_TYPE (arg0))
12868 && (TREE_CODE (arg1) == NOP_EXPR
12869 || TREE_CODE (arg1) == CONVERT_EXPR)
12870 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
12871 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
12873 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
12874 fold_convert (TREE_TYPE (arg0),
12875 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
12876 TREE_OPERAND (TREE_OPERAND (arg1, 0),
12878 build_int_cst (TREE_TYPE (arg0), 0));
12882 case UNORDERED_EXPR:
12890 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
12892 t1 = fold_relational_const (code, type, arg0, arg1);
12893 if (t1 != NULL_TREE)
12897 /* If the first operand is NaN, the result is constant. */
12898 if (TREE_CODE (arg0) == REAL_CST
12899 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
12900 && (code != LTGT_EXPR || ! flag_trapping_math))
12902 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12903 ? integer_zero_node
12904 : integer_one_node;
12905 return omit_one_operand (type, t1, arg1);
12908 /* If the second operand is NaN, the result is constant. */
12909 if (TREE_CODE (arg1) == REAL_CST
12910 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
12911 && (code != LTGT_EXPR || ! flag_trapping_math))
12913 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
12914 ? integer_zero_node
12915 : integer_one_node;
12916 return omit_one_operand (type, t1, arg0);
12919 /* Simplify unordered comparison of something with itself. */
12920 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
12921 && operand_equal_p (arg0, arg1, 0))
12922 return constant_boolean_node (1, type);
12924 if (code == LTGT_EXPR
12925 && !flag_trapping_math
12926 && operand_equal_p (arg0, arg1, 0))
12927 return constant_boolean_node (0, type);
12929 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
12931 tree targ0 = strip_float_extensions (arg0);
12932 tree targ1 = strip_float_extensions (arg1);
12933 tree newtype = TREE_TYPE (targ0);
12935 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
12936 newtype = TREE_TYPE (targ1);
12938 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
12939 return fold_build2 (code, type, fold_convert (newtype, targ0),
12940 fold_convert (newtype, targ1));
12945 case COMPOUND_EXPR:
12946 /* When pedantic, a compound expression can be neither an lvalue
12947 nor an integer constant expression. */
12948 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
12950 /* Don't let (0, 0) be null pointer constant. */
12951 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
12952 : fold_convert (type, arg1);
12953 return pedantic_non_lvalue (tem);
12956 if ((TREE_CODE (arg0) == REAL_CST
12957 && TREE_CODE (arg1) == REAL_CST)
12958 || (TREE_CODE (arg0) == INTEGER_CST
12959 && TREE_CODE (arg1) == INTEGER_CST))
12960 return build_complex (type, arg0, arg1);
12964 /* An ASSERT_EXPR should never be passed to fold_binary. */
12965 gcc_unreachable ();
12969 } /* switch (code) */
12972 /* Callback for walk_tree, looking for LABEL_EXPR.
12973 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
12974 Do not check the sub-tree of GOTO_EXPR. */
12977 contains_label_1 (tree *tp,
12978 int *walk_subtrees,
12979 void *data ATTRIBUTE_UNUSED)
12981 switch (TREE_CODE (*tp))
12986 *walk_subtrees = 0;
12993 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
12994 accessible from outside the sub-tree. Returns NULL_TREE if no
12995 addressable label is found. */
12998 contains_label_p (tree st)
13000 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13003 /* Fold a ternary expression of code CODE and type TYPE with operands
13004 OP0, OP1, and OP2. Return the folded expression if folding is
13005 successful. Otherwise, return NULL_TREE. */
13008 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13011 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13012 enum tree_code_class kind = TREE_CODE_CLASS (code);
13014 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13015 && TREE_CODE_LENGTH (code) == 3);
13017 /* Strip any conversions that don't change the mode. This is safe
13018 for every expression, except for a comparison expression because
13019 its signedness is derived from its operands. So, in the latter
13020 case, only strip conversions that don't change the signedness.
13022 Note that this is done as an internal manipulation within the
13023 constant folder, in order to find the simplest representation of
13024 the arguments so that their form can be studied. In any cases,
13025 the appropriate type conversions should be put back in the tree
13026 that will get out of the constant folder. */
13041 case COMPONENT_REF:
13042 if (TREE_CODE (arg0) == CONSTRUCTOR
13043 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13045 unsigned HOST_WIDE_INT idx;
13047 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13054 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13055 so all simple results must be passed through pedantic_non_lvalue. */
13056 if (TREE_CODE (arg0) == INTEGER_CST)
13058 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13059 tem = integer_zerop (arg0) ? op2 : op1;
13060 /* Only optimize constant conditions when the selected branch
13061 has the same type as the COND_EXPR. This avoids optimizing
13062 away "c ? x : throw", where the throw has a void type.
13063 Avoid throwing away that operand which contains label. */
13064 if ((!TREE_SIDE_EFFECTS (unused_op)
13065 || !contains_label_p (unused_op))
13066 && (! VOID_TYPE_P (TREE_TYPE (tem))
13067 || VOID_TYPE_P (type)))
13068 return pedantic_non_lvalue (tem);
13071 if (operand_equal_p (arg1, op2, 0))
13072 return pedantic_omit_one_operand (type, arg1, arg0);
13074 /* If we have A op B ? A : C, we may be able to convert this to a
13075 simpler expression, depending on the operation and the values
13076 of B and C. Signed zeros prevent all of these transformations,
13077 for reasons given above each one.
13079 Also try swapping the arguments and inverting the conditional. */
13080 if (COMPARISON_CLASS_P (arg0)
13081 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13082 arg1, TREE_OPERAND (arg0, 1))
13083 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13085 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13090 if (COMPARISON_CLASS_P (arg0)
13091 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13093 TREE_OPERAND (arg0, 1))
13094 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13096 tem = fold_truth_not_expr (arg0);
13097 if (tem && COMPARISON_CLASS_P (tem))
13099 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13105 /* If the second operand is simpler than the third, swap them
13106 since that produces better jump optimization results. */
13107 if (truth_value_p (TREE_CODE (arg0))
13108 && tree_swap_operands_p (op1, op2, false))
13110 /* See if this can be inverted. If it can't, possibly because
13111 it was a floating-point inequality comparison, don't do
13113 tem = fold_truth_not_expr (arg0);
13115 return fold_build3 (code, type, tem, op2, op1);
13118 /* Convert A ? 1 : 0 to simply A. */
13119 if (integer_onep (op1)
13120 && integer_zerop (op2)
13121 /* If we try to convert OP0 to our type, the
13122 call to fold will try to move the conversion inside
13123 a COND, which will recurse. In that case, the COND_EXPR
13124 is probably the best choice, so leave it alone. */
13125 && type == TREE_TYPE (arg0))
13126 return pedantic_non_lvalue (arg0);
13128 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13129 over COND_EXPR in cases such as floating point comparisons. */
13130 if (integer_zerop (op1)
13131 && integer_onep (op2)
13132 && truth_value_p (TREE_CODE (arg0)))
13133 return pedantic_non_lvalue (fold_convert (type,
13134 invert_truthvalue (arg0)));
13136 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13137 if (TREE_CODE (arg0) == LT_EXPR
13138 && integer_zerop (TREE_OPERAND (arg0, 1))
13139 && integer_zerop (op2)
13140 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13142 /* sign_bit_p only checks ARG1 bits within A's precision.
13143 If <sign bit of A> has wider type than A, bits outside
13144 of A's precision in <sign bit of A> need to be checked.
13145 If they are all 0, this optimization needs to be done
13146 in unsigned A's type, if they are all 1 in signed A's type,
13147 otherwise this can't be done. */
13148 if (TYPE_PRECISION (TREE_TYPE (tem))
13149 < TYPE_PRECISION (TREE_TYPE (arg1))
13150 && TYPE_PRECISION (TREE_TYPE (tem))
13151 < TYPE_PRECISION (type))
13153 unsigned HOST_WIDE_INT mask_lo;
13154 HOST_WIDE_INT mask_hi;
13155 int inner_width, outer_width;
13158 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13159 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13160 if (outer_width > TYPE_PRECISION (type))
13161 outer_width = TYPE_PRECISION (type);
13163 if (outer_width > HOST_BITS_PER_WIDE_INT)
13165 mask_hi = ((unsigned HOST_WIDE_INT) -1
13166 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13172 mask_lo = ((unsigned HOST_WIDE_INT) -1
13173 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13175 if (inner_width > HOST_BITS_PER_WIDE_INT)
13177 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13178 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13182 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13183 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13185 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13186 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13188 tem_type = signed_type_for (TREE_TYPE (tem));
13189 tem = fold_convert (tem_type, tem);
13191 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13192 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13194 tem_type = unsigned_type_for (TREE_TYPE (tem));
13195 tem = fold_convert (tem_type, tem);
13202 return fold_convert (type,
13203 fold_build2 (BIT_AND_EXPR,
13204 TREE_TYPE (tem), tem,
13205 fold_convert (TREE_TYPE (tem),
13209 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13210 already handled above. */
13211 if (TREE_CODE (arg0) == BIT_AND_EXPR
13212 && integer_onep (TREE_OPERAND (arg0, 1))
13213 && integer_zerop (op2)
13214 && integer_pow2p (arg1))
13216 tree tem = TREE_OPERAND (arg0, 0);
13218 if (TREE_CODE (tem) == RSHIFT_EXPR
13219 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13220 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13221 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13222 return fold_build2 (BIT_AND_EXPR, type,
13223 TREE_OPERAND (tem, 0), arg1);
13226 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13227 is probably obsolete because the first operand should be a
13228 truth value (that's why we have the two cases above), but let's
13229 leave it in until we can confirm this for all front-ends. */
13230 if (integer_zerop (op2)
13231 && TREE_CODE (arg0) == NE_EXPR
13232 && integer_zerop (TREE_OPERAND (arg0, 1))
13233 && integer_pow2p (arg1)
13234 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13235 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13236 arg1, OEP_ONLY_CONST))
13237 return pedantic_non_lvalue (fold_convert (type,
13238 TREE_OPERAND (arg0, 0)));
13240 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13241 if (integer_zerop (op2)
13242 && truth_value_p (TREE_CODE (arg0))
13243 && truth_value_p (TREE_CODE (arg1)))
13244 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13245 fold_convert (type, arg0),
13248 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13249 if (integer_onep (op2)
13250 && truth_value_p (TREE_CODE (arg0))
13251 && truth_value_p (TREE_CODE (arg1)))
13253 /* Only perform transformation if ARG0 is easily inverted. */
13254 tem = fold_truth_not_expr (arg0);
13256 return fold_build2 (TRUTH_ORIF_EXPR, type,
13257 fold_convert (type, tem),
13261 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13262 if (integer_zerop (arg1)
13263 && truth_value_p (TREE_CODE (arg0))
13264 && truth_value_p (TREE_CODE (op2)))
13266 /* Only perform transformation if ARG0 is easily inverted. */
13267 tem = fold_truth_not_expr (arg0);
13269 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13270 fold_convert (type, tem),
13274 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13275 if (integer_onep (arg1)
13276 && truth_value_p (TREE_CODE (arg0))
13277 && truth_value_p (TREE_CODE (op2)))
13278 return fold_build2 (TRUTH_ORIF_EXPR, type,
13279 fold_convert (type, arg0),
13285 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13286 of fold_ternary on them. */
13287 gcc_unreachable ();
13289 case BIT_FIELD_REF:
13290 if ((TREE_CODE (arg0) == VECTOR_CST
13291 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13292 && type == TREE_TYPE (TREE_TYPE (arg0))
13293 && host_integerp (arg1, 1)
13294 && host_integerp (op2, 1))
13296 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13297 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13300 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13301 && (idx % width) == 0
13302 && (idx = idx / width)
13303 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13305 tree elements = NULL_TREE;
13307 if (TREE_CODE (arg0) == VECTOR_CST)
13308 elements = TREE_VECTOR_CST_ELTS (arg0);
13311 unsigned HOST_WIDE_INT idx;
13314 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13315 elements = tree_cons (NULL_TREE, value, elements);
13317 while (idx-- > 0 && elements)
13318 elements = TREE_CHAIN (elements);
13320 return TREE_VALUE (elements);
13322 return fold_convert (type, integer_zero_node);
13329 } /* switch (code) */
13332 /* Perform constant folding and related simplification of EXPR.
13333 The related simplifications include x*1 => x, x*0 => 0, etc.,
13334 and application of the associative law.
13335 NOP_EXPR conversions may be removed freely (as long as we
13336 are careful not to change the type of the overall expression).
13337 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13338 but we can constant-fold them if they have constant operands. */
13340 #ifdef ENABLE_FOLD_CHECKING
13341 # define fold(x) fold_1 (x)
13342 static tree fold_1 (tree);
13348 const tree t = expr;
13349 enum tree_code code = TREE_CODE (t);
13350 enum tree_code_class kind = TREE_CODE_CLASS (code);
13353 /* Return right away if a constant. */
13354 if (kind == tcc_constant)
13357 /* CALL_EXPR-like objects with variable numbers of operands are
13358 treated specially. */
13359 if (kind == tcc_vl_exp)
13361 if (code == CALL_EXPR)
13363 tem = fold_call_expr (expr, false);
13364 return tem ? tem : expr;
13369 if (IS_EXPR_CODE_CLASS (kind)
13370 || IS_GIMPLE_STMT_CODE_CLASS (kind))
13372 tree type = TREE_TYPE (t);
13373 tree op0, op1, op2;
13375 switch (TREE_CODE_LENGTH (code))
13378 op0 = TREE_OPERAND (t, 0);
13379 tem = fold_unary (code, type, op0);
13380 return tem ? tem : expr;
13382 op0 = TREE_OPERAND (t, 0);
13383 op1 = TREE_OPERAND (t, 1);
13384 tem = fold_binary (code, type, op0, op1);
13385 return tem ? tem : expr;
13387 op0 = TREE_OPERAND (t, 0);
13388 op1 = TREE_OPERAND (t, 1);
13389 op2 = TREE_OPERAND (t, 2);
13390 tem = fold_ternary (code, type, op0, op1, op2);
13391 return tem ? tem : expr;
13400 return fold (DECL_INITIAL (t));
13404 } /* switch (code) */
13407 #ifdef ENABLE_FOLD_CHECKING
13410 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13411 static void fold_check_failed (const_tree, const_tree);
13412 void print_fold_checksum (const_tree);
13414 /* When --enable-checking=fold, compute a digest of expr before
13415 and after actual fold call to see if fold did not accidentally
13416 change original expr. */
13422 struct md5_ctx ctx;
13423 unsigned char checksum_before[16], checksum_after[16];
13426 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13427 md5_init_ctx (&ctx);
13428 fold_checksum_tree (expr, &ctx, ht);
13429 md5_finish_ctx (&ctx, checksum_before);
13432 ret = fold_1 (expr);
13434 md5_init_ctx (&ctx);
13435 fold_checksum_tree (expr, &ctx, ht);
13436 md5_finish_ctx (&ctx, checksum_after);
13439 if (memcmp (checksum_before, checksum_after, 16))
13440 fold_check_failed (expr, ret);
13446 print_fold_checksum (const_tree expr)
13448 struct md5_ctx ctx;
13449 unsigned char checksum[16], cnt;
13452 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13453 md5_init_ctx (&ctx);
13454 fold_checksum_tree (expr, &ctx, ht);
13455 md5_finish_ctx (&ctx, checksum);
13457 for (cnt = 0; cnt < 16; ++cnt)
13458 fprintf (stderr, "%02x", checksum[cnt]);
13459 putc ('\n', stderr);
13463 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13465 internal_error ("fold check: original tree changed by fold");
13469 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13472 enum tree_code code;
13473 struct tree_function_decl buf;
13478 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13479 <= sizeof (struct tree_function_decl))
13480 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13483 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13487 code = TREE_CODE (expr);
13488 if (TREE_CODE_CLASS (code) == tcc_declaration
13489 && DECL_ASSEMBLER_NAME_SET_P (expr))
13491 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13492 memcpy ((char *) &buf, expr, tree_size (expr));
13493 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13494 expr = (tree) &buf;
13496 else if (TREE_CODE_CLASS (code) == tcc_type
13497 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
13498 || TYPE_CACHED_VALUES_P (expr)
13499 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
13501 /* Allow these fields to be modified. */
13503 memcpy ((char *) &buf, expr, tree_size (expr));
13504 expr = tmp = (tree) &buf;
13505 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13506 TYPE_POINTER_TO (tmp) = NULL;
13507 TYPE_REFERENCE_TO (tmp) = NULL;
13508 if (TYPE_CACHED_VALUES_P (tmp))
13510 TYPE_CACHED_VALUES_P (tmp) = 0;
13511 TYPE_CACHED_VALUES (tmp) = NULL;
13514 md5_process_bytes (expr, tree_size (expr), ctx);
13515 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13516 if (TREE_CODE_CLASS (code) != tcc_type
13517 && TREE_CODE_CLASS (code) != tcc_declaration
13518 && code != TREE_LIST
13519 && code != SSA_NAME)
13520 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13521 switch (TREE_CODE_CLASS (code))
13527 md5_process_bytes (TREE_STRING_POINTER (expr),
13528 TREE_STRING_LENGTH (expr), ctx);
13531 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13532 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13535 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13541 case tcc_exceptional:
13545 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13546 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13547 expr = TREE_CHAIN (expr);
13548 goto recursive_label;
13551 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13552 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13558 case tcc_expression:
13559 case tcc_reference:
13560 case tcc_comparison:
13563 case tcc_statement:
13565 len = TREE_OPERAND_LENGTH (expr);
13566 for (i = 0; i < len; ++i)
13567 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13569 case tcc_declaration:
13570 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13571 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13572 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13574 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13575 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13576 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13577 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13578 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13580 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13581 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13583 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13585 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13586 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13587 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13591 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13592 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13593 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13594 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13595 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13596 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13597 if (INTEGRAL_TYPE_P (expr)
13598 || SCALAR_FLOAT_TYPE_P (expr))
13600 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13601 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13603 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13604 if (TREE_CODE (expr) == RECORD_TYPE
13605 || TREE_CODE (expr) == UNION_TYPE
13606 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13607 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13608 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13615 /* Helper function for outputting the checksum of a tree T. When
13616 debugging with gdb, you can "define mynext" to be "next" followed
13617 by "call debug_fold_checksum (op0)", then just trace down till the
13621 debug_fold_checksum (const_tree t)
13624 unsigned char checksum[16];
13625 struct md5_ctx ctx;
13626 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13628 md5_init_ctx (&ctx);
13629 fold_checksum_tree (t, &ctx, ht);
13630 md5_finish_ctx (&ctx, checksum);
13633 for (i = 0; i < 16; i++)
13634 fprintf (stderr, "%d ", checksum[i]);
13636 fprintf (stderr, "\n");
13641 /* Fold a unary tree expression with code CODE of type TYPE with an
13642 operand OP0. Return a folded expression if successful. Otherwise,
13643 return a tree expression with code CODE of type TYPE with an
13647 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13650 #ifdef ENABLE_FOLD_CHECKING
13651 unsigned char checksum_before[16], checksum_after[16];
13652 struct md5_ctx ctx;
13655 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13656 md5_init_ctx (&ctx);
13657 fold_checksum_tree (op0, &ctx, ht);
13658 md5_finish_ctx (&ctx, checksum_before);
13662 tem = fold_unary (code, type, op0);
13664 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13666 #ifdef ENABLE_FOLD_CHECKING
13667 md5_init_ctx (&ctx);
13668 fold_checksum_tree (op0, &ctx, ht);
13669 md5_finish_ctx (&ctx, checksum_after);
13672 if (memcmp (checksum_before, checksum_after, 16))
13673 fold_check_failed (op0, tem);
13678 /* Fold a binary tree expression with code CODE of type TYPE with
13679 operands OP0 and OP1. Return a folded expression if successful.
13680 Otherwise, return a tree expression with code CODE of type TYPE
13681 with operands OP0 and OP1. */
13684 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13688 #ifdef ENABLE_FOLD_CHECKING
13689 unsigned char checksum_before_op0[16],
13690 checksum_before_op1[16],
13691 checksum_after_op0[16],
13692 checksum_after_op1[16];
13693 struct md5_ctx ctx;
13696 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13697 md5_init_ctx (&ctx);
13698 fold_checksum_tree (op0, &ctx, ht);
13699 md5_finish_ctx (&ctx, checksum_before_op0);
13702 md5_init_ctx (&ctx);
13703 fold_checksum_tree (op1, &ctx, ht);
13704 md5_finish_ctx (&ctx, checksum_before_op1);
13708 tem = fold_binary (code, type, op0, op1);
13710 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13712 #ifdef ENABLE_FOLD_CHECKING
13713 md5_init_ctx (&ctx);
13714 fold_checksum_tree (op0, &ctx, ht);
13715 md5_finish_ctx (&ctx, checksum_after_op0);
13718 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13719 fold_check_failed (op0, tem);
13721 md5_init_ctx (&ctx);
13722 fold_checksum_tree (op1, &ctx, ht);
13723 md5_finish_ctx (&ctx, checksum_after_op1);
13726 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13727 fold_check_failed (op1, tem);
13732 /* Fold a ternary tree expression with code CODE of type TYPE with
13733 operands OP0, OP1, and OP2. Return a folded expression if
13734 successful. Otherwise, return a tree expression with code CODE of
13735 type TYPE with operands OP0, OP1, and OP2. */
13738 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
13742 #ifdef ENABLE_FOLD_CHECKING
13743 unsigned char checksum_before_op0[16],
13744 checksum_before_op1[16],
13745 checksum_before_op2[16],
13746 checksum_after_op0[16],
13747 checksum_after_op1[16],
13748 checksum_after_op2[16];
13749 struct md5_ctx ctx;
13752 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13753 md5_init_ctx (&ctx);
13754 fold_checksum_tree (op0, &ctx, ht);
13755 md5_finish_ctx (&ctx, checksum_before_op0);
13758 md5_init_ctx (&ctx);
13759 fold_checksum_tree (op1, &ctx, ht);
13760 md5_finish_ctx (&ctx, checksum_before_op1);
13763 md5_init_ctx (&ctx);
13764 fold_checksum_tree (op2, &ctx, ht);
13765 md5_finish_ctx (&ctx, checksum_before_op2);
13769 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
13770 tem = fold_ternary (code, type, op0, op1, op2);
13772 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
13774 #ifdef ENABLE_FOLD_CHECKING
13775 md5_init_ctx (&ctx);
13776 fold_checksum_tree (op0, &ctx, ht);
13777 md5_finish_ctx (&ctx, checksum_after_op0);
13780 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13781 fold_check_failed (op0, tem);
13783 md5_init_ctx (&ctx);
13784 fold_checksum_tree (op1, &ctx, ht);
13785 md5_finish_ctx (&ctx, checksum_after_op1);
13788 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13789 fold_check_failed (op1, tem);
13791 md5_init_ctx (&ctx);
13792 fold_checksum_tree (op2, &ctx, ht);
13793 md5_finish_ctx (&ctx, checksum_after_op2);
13796 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
13797 fold_check_failed (op2, tem);
13802 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
13803 arguments in ARGARRAY, and a null static chain.
13804 Return a folded expression if successful. Otherwise, return a CALL_EXPR
13805 of type TYPE from the given operands as constructed by build_call_array. */
13808 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
13811 #ifdef ENABLE_FOLD_CHECKING
13812 unsigned char checksum_before_fn[16],
13813 checksum_before_arglist[16],
13814 checksum_after_fn[16],
13815 checksum_after_arglist[16];
13816 struct md5_ctx ctx;
13820 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13821 md5_init_ctx (&ctx);
13822 fold_checksum_tree (fn, &ctx, ht);
13823 md5_finish_ctx (&ctx, checksum_before_fn);
13826 md5_init_ctx (&ctx);
13827 for (i = 0; i < nargs; i++)
13828 fold_checksum_tree (argarray[i], &ctx, ht);
13829 md5_finish_ctx (&ctx, checksum_before_arglist);
13833 tem = fold_builtin_call_array (type, fn, nargs, argarray);
13835 #ifdef ENABLE_FOLD_CHECKING
13836 md5_init_ctx (&ctx);
13837 fold_checksum_tree (fn, &ctx, ht);
13838 md5_finish_ctx (&ctx, checksum_after_fn);
13841 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
13842 fold_check_failed (fn, tem);
13844 md5_init_ctx (&ctx);
13845 for (i = 0; i < nargs; i++)
13846 fold_checksum_tree (argarray[i], &ctx, ht);
13847 md5_finish_ctx (&ctx, checksum_after_arglist);
13850 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
13851 fold_check_failed (NULL_TREE, tem);
13856 /* Perform constant folding and related simplification of initializer
13857 expression EXPR. These behave identically to "fold_buildN" but ignore
13858 potential run-time traps and exceptions that fold must preserve. */
13860 #define START_FOLD_INIT \
13861 int saved_signaling_nans = flag_signaling_nans;\
13862 int saved_trapping_math = flag_trapping_math;\
13863 int saved_rounding_math = flag_rounding_math;\
13864 int saved_trapv = flag_trapv;\
13865 int saved_folding_initializer = folding_initializer;\
13866 flag_signaling_nans = 0;\
13867 flag_trapping_math = 0;\
13868 flag_rounding_math = 0;\
13870 folding_initializer = 1;
13872 #define END_FOLD_INIT \
13873 flag_signaling_nans = saved_signaling_nans;\
13874 flag_trapping_math = saved_trapping_math;\
13875 flag_rounding_math = saved_rounding_math;\
13876 flag_trapv = saved_trapv;\
13877 folding_initializer = saved_folding_initializer;
13880 fold_build1_initializer (enum tree_code code, tree type, tree op)
13885 result = fold_build1 (code, type, op);
13892 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
13897 result = fold_build2 (code, type, op0, op1);
13904 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
13910 result = fold_build3 (code, type, op0, op1, op2);
13917 fold_build_call_array_initializer (tree type, tree fn,
13918 int nargs, tree *argarray)
13923 result = fold_build_call_array (type, fn, nargs, argarray);
13929 #undef START_FOLD_INIT
13930 #undef END_FOLD_INIT
13932 /* Determine if first argument is a multiple of second argument. Return 0 if
13933 it is not, or we cannot easily determined it to be.
13935 An example of the sort of thing we care about (at this point; this routine
13936 could surely be made more general, and expanded to do what the *_DIV_EXPR's
13937 fold cases do now) is discovering that
13939 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13945 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
13947 This code also handles discovering that
13949 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
13951 is a multiple of 8 so we don't have to worry about dealing with a
13952 possible remainder.
13954 Note that we *look* inside a SAVE_EXPR only to determine how it was
13955 calculated; it is not safe for fold to do much of anything else with the
13956 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
13957 at run time. For example, the latter example above *cannot* be implemented
13958 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
13959 evaluation time of the original SAVE_EXPR is not necessarily the same at
13960 the time the new expression is evaluated. The only optimization of this
13961 sort that would be valid is changing
13963 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
13967 SAVE_EXPR (I) * SAVE_EXPR (J)
13969 (where the same SAVE_EXPR (J) is used in the original and the
13970 transformed version). */
13973 multiple_of_p (tree type, const_tree top, const_tree bottom)
13975 if (operand_equal_p (top, bottom, 0))
13978 if (TREE_CODE (type) != INTEGER_TYPE)
13981 switch (TREE_CODE (top))
13984 /* Bitwise and provides a power of two multiple. If the mask is
13985 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
13986 if (!integer_pow2p (bottom))
13991 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13992 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
13996 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
13997 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14000 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14004 op1 = TREE_OPERAND (top, 1);
14005 /* const_binop may not detect overflow correctly,
14006 so check for it explicitly here. */
14007 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14008 > TREE_INT_CST_LOW (op1)
14009 && TREE_INT_CST_HIGH (op1) == 0
14010 && 0 != (t1 = fold_convert (type,
14011 const_binop (LSHIFT_EXPR,
14014 && !TREE_OVERFLOW (t1))
14015 return multiple_of_p (type, t1, bottom);
14020 /* Can't handle conversions from non-integral or wider integral type. */
14021 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14022 || (TYPE_PRECISION (type)
14023 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14026 /* .. fall through ... */
14029 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14032 if (TREE_CODE (bottom) != INTEGER_CST
14033 || integer_zerop (bottom)
14034 || (TYPE_UNSIGNED (type)
14035 && (tree_int_cst_sgn (top) < 0
14036 || tree_int_cst_sgn (bottom) < 0)))
14038 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14046 /* Return true if `t' is known to be non-negative. If the return
14047 value is based on the assumption that signed overflow is undefined,
14048 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14049 *STRICT_OVERFLOW_P. */
14052 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14054 if (t == error_mark_node)
14057 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14060 switch (TREE_CODE (t))
14063 /* Query VRP to see if it has recorded any information about
14064 the range of this object. */
14065 return ssa_name_nonnegative_p (t);
14068 /* We can't return 1 if flag_wrapv is set because
14069 ABS_EXPR<INT_MIN> = INT_MIN. */
14070 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
14072 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
14074 *strict_overflow_p = true;
14080 return tree_int_cst_sgn (t) >= 0;
14083 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14086 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14088 case POINTER_PLUS_EXPR:
14090 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14091 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14093 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14094 strict_overflow_p));
14096 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14097 both unsigned and at least 2 bits shorter than the result. */
14098 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14099 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14100 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14102 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14103 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14104 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14105 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14107 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14108 TYPE_PRECISION (inner2)) + 1;
14109 return prec < TYPE_PRECISION (TREE_TYPE (t));
14115 if (FLOAT_TYPE_P (TREE_TYPE (t)))
14117 /* x * x for floating point x is always non-negative. */
14118 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
14120 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14122 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14123 strict_overflow_p));
14126 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14127 both unsigned and their total bits is shorter than the result. */
14128 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
14129 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
14130 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
14132 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
14133 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
14134 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14135 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14136 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
14137 < TYPE_PRECISION (TREE_TYPE (t));
14143 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14145 || tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14146 strict_overflow_p));
14152 case TRUNC_DIV_EXPR:
14153 case CEIL_DIV_EXPR:
14154 case FLOOR_DIV_EXPR:
14155 case ROUND_DIV_EXPR:
14156 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14158 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14159 strict_overflow_p));
14161 case TRUNC_MOD_EXPR:
14162 case CEIL_MOD_EXPR:
14163 case FLOOR_MOD_EXPR:
14164 case ROUND_MOD_EXPR:
14166 case NON_LVALUE_EXPR:
14168 case FIX_TRUNC_EXPR:
14169 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14170 strict_overflow_p);
14172 case COMPOUND_EXPR:
14174 case GIMPLE_MODIFY_STMT:
14175 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14176 strict_overflow_p);
14179 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14180 strict_overflow_p);
14183 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14185 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14186 strict_overflow_p));
14190 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14191 tree outer_type = TREE_TYPE (t);
14193 if (TREE_CODE (outer_type) == REAL_TYPE)
14195 if (TREE_CODE (inner_type) == REAL_TYPE)
14196 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14197 strict_overflow_p);
14198 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14200 if (TYPE_UNSIGNED (inner_type))
14202 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14203 strict_overflow_p);
14206 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14208 if (TREE_CODE (inner_type) == REAL_TYPE)
14209 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t,0),
14210 strict_overflow_p);
14211 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14212 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14213 && TYPE_UNSIGNED (inner_type);
14220 tree temp = TARGET_EXPR_SLOT (t);
14221 t = TARGET_EXPR_INITIAL (t);
14223 /* If the initializer is non-void, then it's a normal expression
14224 that will be assigned to the slot. */
14225 if (!VOID_TYPE_P (t))
14226 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14228 /* Otherwise, the initializer sets the slot in some way. One common
14229 way is an assignment statement at the end of the initializer. */
14232 if (TREE_CODE (t) == BIND_EXPR)
14233 t = expr_last (BIND_EXPR_BODY (t));
14234 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14235 || TREE_CODE (t) == TRY_CATCH_EXPR)
14236 t = expr_last (TREE_OPERAND (t, 0));
14237 else if (TREE_CODE (t) == STATEMENT_LIST)
14242 if ((TREE_CODE (t) == MODIFY_EXPR
14243 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
14244 && GENERIC_TREE_OPERAND (t, 0) == temp)
14245 return tree_expr_nonnegative_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14246 strict_overflow_p);
14253 tree fndecl = get_callee_fndecl (t);
14254 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14255 switch (DECL_FUNCTION_CODE (fndecl))
14257 CASE_FLT_FN (BUILT_IN_ACOS):
14258 CASE_FLT_FN (BUILT_IN_ACOSH):
14259 CASE_FLT_FN (BUILT_IN_CABS):
14260 CASE_FLT_FN (BUILT_IN_COSH):
14261 CASE_FLT_FN (BUILT_IN_ERFC):
14262 CASE_FLT_FN (BUILT_IN_EXP):
14263 CASE_FLT_FN (BUILT_IN_EXP10):
14264 CASE_FLT_FN (BUILT_IN_EXP2):
14265 CASE_FLT_FN (BUILT_IN_FABS):
14266 CASE_FLT_FN (BUILT_IN_FDIM):
14267 CASE_FLT_FN (BUILT_IN_HYPOT):
14268 CASE_FLT_FN (BUILT_IN_POW10):
14269 CASE_INT_FN (BUILT_IN_FFS):
14270 CASE_INT_FN (BUILT_IN_PARITY):
14271 CASE_INT_FN (BUILT_IN_POPCOUNT):
14272 case BUILT_IN_BSWAP32:
14273 case BUILT_IN_BSWAP64:
14277 CASE_FLT_FN (BUILT_IN_SQRT):
14278 /* sqrt(-0.0) is -0.0. */
14279 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
14281 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14282 strict_overflow_p);
14284 CASE_FLT_FN (BUILT_IN_ASINH):
14285 CASE_FLT_FN (BUILT_IN_ATAN):
14286 CASE_FLT_FN (BUILT_IN_ATANH):
14287 CASE_FLT_FN (BUILT_IN_CBRT):
14288 CASE_FLT_FN (BUILT_IN_CEIL):
14289 CASE_FLT_FN (BUILT_IN_ERF):
14290 CASE_FLT_FN (BUILT_IN_EXPM1):
14291 CASE_FLT_FN (BUILT_IN_FLOOR):
14292 CASE_FLT_FN (BUILT_IN_FMOD):
14293 CASE_FLT_FN (BUILT_IN_FREXP):
14294 CASE_FLT_FN (BUILT_IN_LCEIL):
14295 CASE_FLT_FN (BUILT_IN_LDEXP):
14296 CASE_FLT_FN (BUILT_IN_LFLOOR):
14297 CASE_FLT_FN (BUILT_IN_LLCEIL):
14298 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14299 CASE_FLT_FN (BUILT_IN_LLRINT):
14300 CASE_FLT_FN (BUILT_IN_LLROUND):
14301 CASE_FLT_FN (BUILT_IN_LRINT):
14302 CASE_FLT_FN (BUILT_IN_LROUND):
14303 CASE_FLT_FN (BUILT_IN_MODF):
14304 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14305 CASE_FLT_FN (BUILT_IN_RINT):
14306 CASE_FLT_FN (BUILT_IN_ROUND):
14307 CASE_FLT_FN (BUILT_IN_SCALB):
14308 CASE_FLT_FN (BUILT_IN_SCALBLN):
14309 CASE_FLT_FN (BUILT_IN_SCALBN):
14310 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14311 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14312 CASE_FLT_FN (BUILT_IN_SINH):
14313 CASE_FLT_FN (BUILT_IN_TANH):
14314 CASE_FLT_FN (BUILT_IN_TRUNC):
14315 /* True if the 1st argument is nonnegative. */
14316 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14317 strict_overflow_p);
14319 CASE_FLT_FN (BUILT_IN_FMAX):
14320 /* True if the 1st OR 2nd arguments are nonnegative. */
14321 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14323 || (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14324 strict_overflow_p)));
14326 CASE_FLT_FN (BUILT_IN_FMIN):
14327 /* True if the 1st AND 2nd arguments are nonnegative. */
14328 return (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14330 && (tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14331 strict_overflow_p)));
14333 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14334 /* True if the 2nd argument is nonnegative. */
14335 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 1),
14336 strict_overflow_p);
14338 CASE_FLT_FN (BUILT_IN_POWI):
14339 /* True if the 1st argument is nonnegative or the second
14340 argument is an even integer. */
14341 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == INTEGER_CST)
14343 tree arg1 = CALL_EXPR_ARG (t, 1);
14344 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
14347 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14348 strict_overflow_p);
14350 CASE_FLT_FN (BUILT_IN_POW):
14351 /* True if the 1st argument is nonnegative or the second
14352 argument is an even integer valued real. */
14353 if (TREE_CODE (CALL_EXPR_ARG (t, 1)) == REAL_CST)
14358 c = TREE_REAL_CST (CALL_EXPR_ARG (t, 1));
14359 n = real_to_integer (&c);
14362 REAL_VALUE_TYPE cint;
14363 real_from_integer (&cint, VOIDmode, n,
14364 n < 0 ? -1 : 0, 0);
14365 if (real_identical (&c, &cint))
14369 return tree_expr_nonnegative_warnv_p (CALL_EXPR_ARG (t, 0),
14370 strict_overflow_p);
14377 /* ... fall through ... */
14381 tree type = TREE_TYPE (t);
14382 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14383 && truth_value_p (TREE_CODE (t)))
14384 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14385 have a signed:1 type (where the value is -1 and 0). */
14390 /* We don't know sign of `t', so be conservative and return false. */
14394 /* Return true if `t' is known to be non-negative. Handle warnings
14395 about undefined signed overflow. */
14398 tree_expr_nonnegative_p (tree t)
14400 bool ret, strict_overflow_p;
14402 strict_overflow_p = false;
14403 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14404 if (strict_overflow_p)
14405 fold_overflow_warning (("assuming signed overflow does not occur when "
14406 "determining that expression is always "
14408 WARN_STRICT_OVERFLOW_MISC);
14412 /* Return true when T is an address and is known to be nonzero.
14413 For floating point we further ensure that T is not denormal.
14414 Similar logic is present in nonzero_address in rtlanal.h.
14416 If the return value is based on the assumption that signed overflow
14417 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14418 change *STRICT_OVERFLOW_P. */
14421 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
14423 tree type = TREE_TYPE (t);
14424 bool sub_strict_overflow_p;
14426 /* Doing something useful for floating point would need more work. */
14427 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
14430 switch (TREE_CODE (t))
14433 /* Query VRP to see if it has recorded any information about
14434 the range of this object. */
14435 return ssa_name_nonzero_p (t);
14438 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14439 strict_overflow_p);
14442 return !integer_zerop (t);
14444 case POINTER_PLUS_EXPR:
14446 if (TYPE_OVERFLOW_UNDEFINED (type))
14448 /* With the presence of negative values it is hard
14449 to say something. */
14450 sub_strict_overflow_p = false;
14451 if (!tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14452 &sub_strict_overflow_p)
14453 || !tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14454 &sub_strict_overflow_p))
14456 /* One of operands must be positive and the other non-negative. */
14457 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14458 overflows, on a twos-complement machine the sum of two
14459 nonnegative numbers can never be zero. */
14460 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14462 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14463 strict_overflow_p));
14468 if (TYPE_OVERFLOW_UNDEFINED (type))
14470 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14472 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14473 strict_overflow_p))
14475 *strict_overflow_p = true;
14483 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
14484 tree outer_type = TREE_TYPE (t);
14486 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14487 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14488 strict_overflow_p));
14494 tree base = get_base_address (TREE_OPERAND (t, 0));
14499 /* Weak declarations may link to NULL. */
14500 if (VAR_OR_FUNCTION_DECL_P (base))
14501 return !DECL_WEAK (base);
14503 /* Constants are never weak. */
14504 if (CONSTANT_CLASS_P (base))
14511 sub_strict_overflow_p = false;
14512 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14513 &sub_strict_overflow_p)
14514 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
14515 &sub_strict_overflow_p))
14517 if (sub_strict_overflow_p)
14518 *strict_overflow_p = true;
14524 sub_strict_overflow_p = false;
14525 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14526 &sub_strict_overflow_p)
14527 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14528 &sub_strict_overflow_p))
14530 if (sub_strict_overflow_p)
14531 *strict_overflow_p = true;
14536 sub_strict_overflow_p = false;
14537 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14538 &sub_strict_overflow_p))
14540 if (sub_strict_overflow_p)
14541 *strict_overflow_p = true;
14543 /* When both operands are nonzero, then MAX must be too. */
14544 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14545 strict_overflow_p))
14548 /* MAX where operand 0 is positive is positive. */
14549 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14550 strict_overflow_p);
14552 /* MAX where operand 1 is positive is positive. */
14553 else if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14554 &sub_strict_overflow_p)
14555 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14556 &sub_strict_overflow_p))
14558 if (sub_strict_overflow_p)
14559 *strict_overflow_p = true;
14564 case COMPOUND_EXPR:
14566 case GIMPLE_MODIFY_STMT:
14568 return tree_expr_nonzero_warnv_p (GENERIC_TREE_OPERAND (t, 1),
14569 strict_overflow_p);
14572 case NON_LVALUE_EXPR:
14573 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14574 strict_overflow_p);
14577 return (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
14579 || tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
14580 strict_overflow_p));
14583 return alloca_call_p (t);
14591 /* Return true when T is an address and is known to be nonzero.
14592 Handle warnings about undefined signed overflow. */
14595 tree_expr_nonzero_p (tree t)
14597 bool ret, strict_overflow_p;
14599 strict_overflow_p = false;
14600 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
14601 if (strict_overflow_p)
14602 fold_overflow_warning (("assuming signed overflow does not occur when "
14603 "determining that expression is always "
14605 WARN_STRICT_OVERFLOW_MISC);
14609 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
14610 attempt to fold the expression to a constant without modifying TYPE,
14613 If the expression could be simplified to a constant, then return
14614 the constant. If the expression would not be simplified to a
14615 constant, then return NULL_TREE. */
14618 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
14620 tree tem = fold_binary (code, type, op0, op1);
14621 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14624 /* Given the components of a unary expression CODE, TYPE and OP0,
14625 attempt to fold the expression to a constant without modifying
14628 If the expression could be simplified to a constant, then return
14629 the constant. If the expression would not be simplified to a
14630 constant, then return NULL_TREE. */
14633 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
14635 tree tem = fold_unary (code, type, op0);
14636 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
14639 /* If EXP represents referencing an element in a constant string
14640 (either via pointer arithmetic or array indexing), return the
14641 tree representing the value accessed, otherwise return NULL. */
14644 fold_read_from_constant_string (tree exp)
14646 if ((TREE_CODE (exp) == INDIRECT_REF
14647 || TREE_CODE (exp) == ARRAY_REF)
14648 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
14650 tree exp1 = TREE_OPERAND (exp, 0);
14654 if (TREE_CODE (exp) == INDIRECT_REF)
14655 string = string_constant (exp1, &index);
14658 tree low_bound = array_ref_low_bound (exp);
14659 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
14661 /* Optimize the special-case of a zero lower bound.
14663 We convert the low_bound to sizetype to avoid some problems
14664 with constant folding. (E.g. suppose the lower bound is 1,
14665 and its mode is QI. Without the conversion,l (ARRAY
14666 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
14667 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
14668 if (! integer_zerop (low_bound))
14669 index = size_diffop (index, fold_convert (sizetype, low_bound));
14675 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
14676 && TREE_CODE (string) == STRING_CST
14677 && TREE_CODE (index) == INTEGER_CST
14678 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
14679 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
14681 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
14682 return build_int_cst_type (TREE_TYPE (exp),
14683 (TREE_STRING_POINTER (string)
14684 [TREE_INT_CST_LOW (index)]));
14689 /* Return the tree for neg (ARG0) when ARG0 is known to be either
14690 an integer constant, real, or fixed-point constant.
14692 TYPE is the type of the result. */
14695 fold_negate_const (tree arg0, tree type)
14697 tree t = NULL_TREE;
14699 switch (TREE_CODE (arg0))
14703 unsigned HOST_WIDE_INT low;
14704 HOST_WIDE_INT high;
14705 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14706 TREE_INT_CST_HIGH (arg0),
14708 t = force_fit_type_double (type, low, high, 1,
14709 (overflow | TREE_OVERFLOW (arg0))
14710 && !TYPE_UNSIGNED (type));
14715 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14720 FIXED_VALUE_TYPE f;
14721 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
14722 &(TREE_FIXED_CST (arg0)), NULL,
14723 TYPE_SATURATING (type));
14724 t = build_fixed (type, f);
14725 /* Propagate overflow flags. */
14726 if (overflow_p | TREE_OVERFLOW (arg0))
14728 TREE_OVERFLOW (t) = 1;
14729 TREE_CONSTANT_OVERFLOW (t) = 1;
14731 else if (TREE_CONSTANT_OVERFLOW (arg0))
14732 TREE_CONSTANT_OVERFLOW (t) = 1;
14737 gcc_unreachable ();
14743 /* Return the tree for abs (ARG0) when ARG0 is known to be either
14744 an integer constant or real constant.
14746 TYPE is the type of the result. */
14749 fold_abs_const (tree arg0, tree type)
14751 tree t = NULL_TREE;
14753 switch (TREE_CODE (arg0))
14756 /* If the value is unsigned, then the absolute value is
14757 the same as the ordinary value. */
14758 if (TYPE_UNSIGNED (type))
14760 /* Similarly, if the value is non-negative. */
14761 else if (INT_CST_LT (integer_minus_one_node, arg0))
14763 /* If the value is negative, then the absolute value is
14767 unsigned HOST_WIDE_INT low;
14768 HOST_WIDE_INT high;
14769 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
14770 TREE_INT_CST_HIGH (arg0),
14772 t = force_fit_type_double (type, low, high, -1,
14773 overflow | TREE_OVERFLOW (arg0));
14778 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
14779 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
14785 gcc_unreachable ();
14791 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
14792 constant. TYPE is the type of the result. */
14795 fold_not_const (tree arg0, tree type)
14797 tree t = NULL_TREE;
14799 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
14801 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
14802 ~TREE_INT_CST_HIGH (arg0), 0,
14803 TREE_OVERFLOW (arg0));
14808 /* Given CODE, a relational operator, the target type, TYPE and two
14809 constant operands OP0 and OP1, return the result of the
14810 relational operation. If the result is not a compile time
14811 constant, then return NULL_TREE. */
14814 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
14816 int result, invert;
14818 /* From here on, the only cases we handle are when the result is
14819 known to be a constant. */
14821 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
14823 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
14824 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
14826 /* Handle the cases where either operand is a NaN. */
14827 if (real_isnan (c0) || real_isnan (c1))
14837 case UNORDERED_EXPR:
14851 if (flag_trapping_math)
14857 gcc_unreachable ();
14860 return constant_boolean_node (result, type);
14863 return constant_boolean_node (real_compare (code, c0, c1), type);
14866 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14868 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14869 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14870 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14873 /* Handle equality/inequality of complex constants. */
14874 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14876 tree rcond = fold_relational_const (code, type,
14877 TREE_REALPART (op0),
14878 TREE_REALPART (op1));
14879 tree icond = fold_relational_const (code, type,
14880 TREE_IMAGPART (op0),
14881 TREE_IMAGPART (op1));
14882 if (code == EQ_EXPR)
14883 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14884 else if (code == NE_EXPR)
14885 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14890 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14892 To compute GT, swap the arguments and do LT.
14893 To compute GE, do LT and invert the result.
14894 To compute LE, swap the arguments, do LT and invert the result.
14895 To compute NE, do EQ and invert the result.
14897 Therefore, the code below must handle only EQ and LT. */
14899 if (code == LE_EXPR || code == GT_EXPR)
14904 code = swap_tree_comparison (code);
14907 /* Note that it is safe to invert for real values here because we
14908 have already handled the one case that it matters. */
14911 if (code == NE_EXPR || code == GE_EXPR)
14914 code = invert_tree_comparison (code, false);
14917 /* Compute a result for LT or EQ if args permit;
14918 Otherwise return T. */
14919 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14921 if (code == EQ_EXPR)
14922 result = tree_int_cst_equal (op0, op1);
14923 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
14924 result = INT_CST_LT_UNSIGNED (op0, op1);
14926 result = INT_CST_LT (op0, op1);
14933 return constant_boolean_node (result, type);
14936 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14937 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14941 fold_build_cleanup_point_expr (tree type, tree expr)
14943 /* If the expression does not have side effects then we don't have to wrap
14944 it with a cleanup point expression. */
14945 if (!TREE_SIDE_EFFECTS (expr))
14948 /* If the expression is a return, check to see if the expression inside the
14949 return has no side effects or the right hand side of the modify expression
14950 inside the return. If either don't have side effects set we don't need to
14951 wrap the expression in a cleanup point expression. Note we don't check the
14952 left hand side of the modify because it should always be a return decl. */
14953 if (TREE_CODE (expr) == RETURN_EXPR)
14955 tree op = TREE_OPERAND (expr, 0);
14956 if (!op || !TREE_SIDE_EFFECTS (op))
14958 op = TREE_OPERAND (op, 1);
14959 if (!TREE_SIDE_EFFECTS (op))
14963 return build1 (CLEANUP_POINT_EXPR, type, expr);
14966 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14967 of an indirection through OP0, or NULL_TREE if no simplification is
14971 fold_indirect_ref_1 (tree type, tree op0)
14977 subtype = TREE_TYPE (sub);
14978 if (!POINTER_TYPE_P (subtype))
14981 if (TREE_CODE (sub) == ADDR_EXPR)
14983 tree op = TREE_OPERAND (sub, 0);
14984 tree optype = TREE_TYPE (op);
14985 /* *&CONST_DECL -> to the value of the const decl. */
14986 if (TREE_CODE (op) == CONST_DECL)
14987 return DECL_INITIAL (op);
14988 /* *&p => p; make sure to handle *&"str"[cst] here. */
14989 if (type == optype)
14991 tree fop = fold_read_from_constant_string (op);
14997 /* *(foo *)&fooarray => fooarray[0] */
14998 else if (TREE_CODE (optype) == ARRAY_TYPE
14999 && type == TREE_TYPE (optype))
15001 tree type_domain = TYPE_DOMAIN (optype);
15002 tree min_val = size_zero_node;
15003 if (type_domain && TYPE_MIN_VALUE (type_domain))
15004 min_val = TYPE_MIN_VALUE (type_domain);
15005 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15007 /* *(foo *)&complexfoo => __real__ complexfoo */
15008 else if (TREE_CODE (optype) == COMPLEX_TYPE
15009 && type == TREE_TYPE (optype))
15010 return fold_build1 (REALPART_EXPR, type, op);
15011 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15012 else if (TREE_CODE (optype) == VECTOR_TYPE
15013 && type == TREE_TYPE (optype))
15015 tree part_width = TYPE_SIZE (type);
15016 tree index = bitsize_int (0);
15017 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15021 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15022 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15023 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15025 tree op00 = TREE_OPERAND (sub, 0);
15026 tree op01 = TREE_OPERAND (sub, 1);
15030 op00type = TREE_TYPE (op00);
15031 if (TREE_CODE (op00) == ADDR_EXPR
15032 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15033 && type == TREE_TYPE (TREE_TYPE (op00type)))
15035 tree size = TYPE_SIZE_UNIT (type);
15036 if (tree_int_cst_equal (size, op01))
15037 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15041 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15042 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15043 && type == TREE_TYPE (TREE_TYPE (subtype)))
15046 tree min_val = size_zero_node;
15047 sub = build_fold_indirect_ref (sub);
15048 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15049 if (type_domain && TYPE_MIN_VALUE (type_domain))
15050 min_val = TYPE_MIN_VALUE (type_domain);
15051 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15057 /* Builds an expression for an indirection through T, simplifying some
15061 build_fold_indirect_ref (tree t)
15063 tree type = TREE_TYPE (TREE_TYPE (t));
15064 tree sub = fold_indirect_ref_1 (type, t);
15069 return build1 (INDIRECT_REF, type, t);
15072 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15075 fold_indirect_ref (tree t)
15077 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15085 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15086 whose result is ignored. The type of the returned tree need not be
15087 the same as the original expression. */
15090 fold_ignored_result (tree t)
15092 if (!TREE_SIDE_EFFECTS (t))
15093 return integer_zero_node;
15096 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15099 t = TREE_OPERAND (t, 0);
15103 case tcc_comparison:
15104 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15105 t = TREE_OPERAND (t, 0);
15106 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15107 t = TREE_OPERAND (t, 1);
15112 case tcc_expression:
15113 switch (TREE_CODE (t))
15115 case COMPOUND_EXPR:
15116 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15118 t = TREE_OPERAND (t, 0);
15122 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15123 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15125 t = TREE_OPERAND (t, 0);
15138 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15139 This can only be applied to objects of a sizetype. */
15142 round_up (tree value, int divisor)
15144 tree div = NULL_TREE;
15146 gcc_assert (divisor > 0);
15150 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15151 have to do anything. Only do this when we are not given a const,
15152 because in that case, this check is more expensive than just
15154 if (TREE_CODE (value) != INTEGER_CST)
15156 div = build_int_cst (TREE_TYPE (value), divisor);
15158 if (multiple_of_p (TREE_TYPE (value), value, div))
15162 /* If divisor is a power of two, simplify this to bit manipulation. */
15163 if (divisor == (divisor & -divisor))
15165 if (TREE_CODE (value) == INTEGER_CST)
15167 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15168 unsigned HOST_WIDE_INT high;
15171 if ((low & (divisor - 1)) == 0)
15174 overflow_p = TREE_OVERFLOW (value);
15175 high = TREE_INT_CST_HIGH (value);
15176 low &= ~(divisor - 1);
15185 return force_fit_type_double (TREE_TYPE (value), low, high,
15192 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15193 value = size_binop (PLUS_EXPR, value, t);
15194 t = build_int_cst (TREE_TYPE (value), -divisor);
15195 value = size_binop (BIT_AND_EXPR, value, t);
15201 div = build_int_cst (TREE_TYPE (value), divisor);
15202 value = size_binop (CEIL_DIV_EXPR, value, div);
15203 value = size_binop (MULT_EXPR, value, div);
15209 /* Likewise, but round down. */
15212 round_down (tree value, int divisor)
15214 tree div = NULL_TREE;
15216 gcc_assert (divisor > 0);
15220 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15221 have to do anything. Only do this when we are not given a const,
15222 because in that case, this check is more expensive than just
15224 if (TREE_CODE (value) != INTEGER_CST)
15226 div = build_int_cst (TREE_TYPE (value), divisor);
15228 if (multiple_of_p (TREE_TYPE (value), value, div))
15232 /* If divisor is a power of two, simplify this to bit manipulation. */
15233 if (divisor == (divisor & -divisor))
15237 t = build_int_cst (TREE_TYPE (value), -divisor);
15238 value = size_binop (BIT_AND_EXPR, value, t);
15243 div = build_int_cst (TREE_TYPE (value), divisor);
15244 value = size_binop (FLOOR_DIV_EXPR, value, div);
15245 value = size_binop (MULT_EXPR, value, div);
15251 /* Returns the pointer to the base of the object addressed by EXP and
15252 extracts the information about the offset of the access, storing it
15253 to PBITPOS and POFFSET. */
15256 split_address_to_core_and_offset (tree exp,
15257 HOST_WIDE_INT *pbitpos, tree *poffset)
15260 enum machine_mode mode;
15261 int unsignedp, volatilep;
15262 HOST_WIDE_INT bitsize;
15264 if (TREE_CODE (exp) == ADDR_EXPR)
15266 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15267 poffset, &mode, &unsignedp, &volatilep,
15269 core = fold_addr_expr (core);
15275 *poffset = NULL_TREE;
15281 /* Returns true if addresses of E1 and E2 differ by a constant, false
15282 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15285 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15288 HOST_WIDE_INT bitpos1, bitpos2;
15289 tree toffset1, toffset2, tdiff, type;
15291 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15292 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15294 if (bitpos1 % BITS_PER_UNIT != 0
15295 || bitpos2 % BITS_PER_UNIT != 0
15296 || !operand_equal_p (core1, core2, 0))
15299 if (toffset1 && toffset2)
15301 type = TREE_TYPE (toffset1);
15302 if (type != TREE_TYPE (toffset2))
15303 toffset2 = fold_convert (type, toffset2);
15305 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15306 if (!cst_and_fits_in_hwi (tdiff))
15309 *diff = int_cst_value (tdiff);
15311 else if (toffset1 || toffset2)
15313 /* If only one of the offsets is non-constant, the difference cannot
15320 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15324 /* Simplify the floating point expression EXP when the sign of the
15325 result is not significant. Return NULL_TREE if no simplification
15329 fold_strip_sign_ops (tree exp)
15333 switch (TREE_CODE (exp))
15337 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15338 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15342 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15344 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15345 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15346 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15347 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15348 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15349 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15352 case COMPOUND_EXPR:
15353 arg0 = TREE_OPERAND (exp, 0);
15354 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15356 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15360 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15361 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15363 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15364 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15365 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15370 const enum built_in_function fcode = builtin_mathfn_code (exp);
15373 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15374 /* Strip copysign function call, return the 1st argument. */
15375 arg0 = CALL_EXPR_ARG (exp, 0);
15376 arg1 = CALL_EXPR_ARG (exp, 1);
15377 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15380 /* Strip sign ops from the argument of "odd" math functions. */
15381 if (negate_mathfn_p (fcode))
15383 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15385 return build_call_expr (get_callee_fndecl (exp), 1, arg0);