1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type_double.
33 fold takes a tree as argument and returns a simplified tree.
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
42 force_fit_type_double takes a constant, an overflowable flag and a
43 prior overflow indicator. It forces the value to fit the type and
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
52 #include "coretypes.h"
57 #include "fixed-value.h"
66 #include "langhooks.h"
70 /* Nonzero if we are folding constants inside an initializer; zero
72 int folding_initializer = 0;
74 /* The following constants represent a bit based encoding of GCC's
75 comparison operators. This encoding simplifies transformations
76 on relational comparison operators, such as AND and OR. */
77 enum comparison_code {
96 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree, int);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand (tree, tree, tree);
110 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (tree, tree, HOST_WIDE_INT, HOST_WIDE_INT, int);
112 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
113 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
114 enum machine_mode *, int *, int *,
116 static int all_ones_mask_p (const_tree, int);
117 static tree sign_bit_p (tree, const_tree);
118 static int simple_operand_p (const_tree);
119 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
120 static tree range_predecessor (tree);
121 static tree range_successor (tree);
122 static tree make_range (tree, int *, tree *, tree *, bool *);
123 static tree build_range_check (tree, tree, int, tree, tree);
124 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
126 static tree fold_range_test (enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree fold_truthop (enum tree_code, tree, tree, tree);
130 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
136 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
139 static tree fold_div_compare (enum tree_code, tree, tree, tree);
140 static bool reorder_operands_p (const_tree, const_tree);
141 static tree fold_negate_const (tree, tree);
142 static tree fold_not_const (tree, tree);
143 static tree fold_relational_const (enum tree_code, tree, tree, tree);
144 static tree fold_convert_const (enum tree_code, tree, tree);
147 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
148 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
149 and SUM1. Then this yields nonzero if overflow occurred during the
152 Overflow occurs if A and B have the same sign, but A and SUM differ in
153 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
155 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
157 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
158 We do that by representing the two-word integer in 4 words, with only
159 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
160 number. The value of the word is LOWPART + HIGHPART * BASE. */
163 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
164 #define HIGHPART(x) \
165 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
166 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
168 /* Unpack a two-word integer into 4 words.
169 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
170 WORDS points to the array of HOST_WIDE_INTs. */
173 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
175 words[0] = LOWPART (low);
176 words[1] = HIGHPART (low);
177 words[2] = LOWPART (hi);
178 words[3] = HIGHPART (hi);
181 /* Pack an array of 4 words into a two-word integer.
182 WORDS points to the array of words.
183 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 *low = words[0] + words[1] * BASE;
190 *hi = words[2] + words[3] * BASE;
193 /* Force the double-word integer L1, H1 to be within the range of the
194 integer type TYPE. Stores the properly truncated and sign-extended
195 double-word integer in *LV, *HV. Returns true if the operation
196 overflows, that is, argument and result are different. */
199 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
200 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
202 unsigned HOST_WIDE_INT low0 = l1;
203 HOST_WIDE_INT high0 = h1;
205 int sign_extended_type;
207 if (POINTER_TYPE_P (type)
208 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = TYPE_PRECISION (type);
213 /* Size types *are* sign extended. */
214 sign_extended_type = (!TYPE_UNSIGNED (type)
215 || (TREE_CODE (type) == INTEGER_TYPE
216 && TYPE_IS_SIZETYPE (type)));
218 /* First clear all bits that are beyond the type's precision. */
219 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
221 else if (prec > HOST_BITS_PER_WIDE_INT)
222 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
226 if (prec < HOST_BITS_PER_WIDE_INT)
227 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 /* Then do sign extension if necessary. */
231 if (!sign_extended_type)
232 /* No sign extension */;
233 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
234 /* Correct width already. */;
235 else if (prec > HOST_BITS_PER_WIDE_INT)
237 /* Sign extend top half? */
238 if (h1 & ((unsigned HOST_WIDE_INT)1
239 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
240 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
242 else if (prec == HOST_BITS_PER_WIDE_INT)
244 if ((HOST_WIDE_INT)l1 < 0)
249 /* Sign extend bottom half? */
250 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 l1 |= (HOST_WIDE_INT)(-1) << prec;
260 /* If the value didn't fit, signal overflow. */
261 return l1 != low0 || h1 != high0;
264 /* We force the double-int HIGH:LOW to the range of the type TYPE by
265 sign or zero extending it.
266 OVERFLOWABLE indicates if we are interested
267 in overflow of the value, when >0 we are only interested in signed
268 overflow, for <0 we are interested in any overflow. OVERFLOWED
269 indicates whether overflow has already occurred. CONST_OVERFLOWED
270 indicates whether constant overflow has already occurred. We force
271 T's value to be within range of T's type (by setting to 0 or 1 all
272 the bits outside the type's range). We set TREE_OVERFLOWED if,
273 OVERFLOWED is nonzero,
274 or OVERFLOWABLE is >0 and signed overflow occurs
275 or OVERFLOWABLE is <0 and any overflow occurs
276 We return a new tree node for the extended double-int. The node
277 is shared if no overflow flags are set. */
280 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
281 HOST_WIDE_INT high, int overflowable,
284 int sign_extended_type;
287 /* Size types *are* sign extended. */
288 sign_extended_type = (!TYPE_UNSIGNED (type)
289 || (TREE_CODE (type) == INTEGER_TYPE
290 && TYPE_IS_SIZETYPE (type)));
292 overflow = fit_double_type (low, high, &low, &high, type);
294 /* If we need to set overflow flags, return a new unshared node. */
295 if (overflowed || overflow)
299 || (overflowable > 0 && sign_extended_type))
301 tree t = make_node (INTEGER_CST);
302 TREE_INT_CST_LOW (t) = low;
303 TREE_INT_CST_HIGH (t) = high;
304 TREE_TYPE (t) = type;
305 TREE_OVERFLOW (t) = 1;
310 /* Else build a shared node. */
311 return build_int_cst_wide (type, low, high);
314 /* Add two doubleword integers with doubleword result.
315 Return nonzero if the operation overflows according to UNSIGNED_P.
316 Each argument is given as two `HOST_WIDE_INT' pieces.
317 One argument is L1 and H1; the other, L2 and H2.
318 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
321 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
322 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
323 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
326 unsigned HOST_WIDE_INT l;
330 h = h1 + h2 + (l < l1);
336 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
338 return OVERFLOW_SUM_SIGN (h1, h2, h);
341 /* Negate a doubleword integer with doubleword result.
342 Return nonzero if the operation overflows, assuming it's signed.
343 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
344 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
347 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
348 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
354 return (*hv & h1) < 0;
364 /* Multiply two doubleword integers with doubleword result.
365 Return nonzero if the operation overflows according to UNSIGNED_P.
366 Each argument is given as two `HOST_WIDE_INT' pieces.
367 One argument is L1 and H1; the other, L2 and H2.
368 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
371 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
372 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
373 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
376 HOST_WIDE_INT arg1[4];
377 HOST_WIDE_INT arg2[4];
378 HOST_WIDE_INT prod[4 * 2];
379 unsigned HOST_WIDE_INT carry;
381 unsigned HOST_WIDE_INT toplow, neglow;
382 HOST_WIDE_INT tophigh, neghigh;
384 encode (arg1, l1, h1);
385 encode (arg2, l2, h2);
387 memset (prod, 0, sizeof prod);
389 for (i = 0; i < 4; i++)
392 for (j = 0; j < 4; j++)
395 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
396 carry += arg1[i] * arg2[j];
397 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
399 prod[k] = LOWPART (carry);
400 carry = HIGHPART (carry);
405 decode (prod, lv, hv);
406 decode (prod + 4, &toplow, &tophigh);
408 /* Unsigned overflow is immediate. */
410 return (toplow | tophigh) != 0;
412 /* Check for signed overflow by calculating the signed representation of the
413 top half of the result; it should agree with the low half's sign bit. */
416 neg_double (l2, h2, &neglow, &neghigh);
417 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421 neg_double (l1, h1, &neglow, &neghigh);
422 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
424 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
427 /* Shift the doubleword integer in L1, H1 left by COUNT places
428 keeping only PREC bits of result.
429 Shift right if COUNT is negative.
430 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
431 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
434 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
435 HOST_WIDE_INT count, unsigned int prec,
436 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
438 unsigned HOST_WIDE_INT signmask;
442 rshift_double (l1, h1, -count, prec, lv, hv, arith);
446 if (SHIFT_COUNT_TRUNCATED)
449 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
451 /* Shifting by the host word size is undefined according to the
452 ANSI standard, so we must handle this as a special case. */
456 else if (count >= HOST_BITS_PER_WIDE_INT)
458 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
463 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
464 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468 /* Sign extend all bits that are beyond the precision. */
470 signmask = -((prec > HOST_BITS_PER_WIDE_INT
471 ? ((unsigned HOST_WIDE_INT) *hv
472 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
473 : (*lv >> (prec - 1))) & 1);
475 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
477 else if (prec >= HOST_BITS_PER_WIDE_INT)
479 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
480 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
485 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
486 *lv |= signmask << prec;
490 /* Shift the doubleword integer in L1, H1 right by COUNT places
491 keeping only PREC bits of result. COUNT must be positive.
492 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
493 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
496 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
497 HOST_WIDE_INT count, unsigned int prec,
498 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
501 unsigned HOST_WIDE_INT signmask;
504 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
507 if (SHIFT_COUNT_TRUNCATED)
510 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
512 /* Shifting by the host word size is undefined according to the
513 ANSI standard, so we must handle this as a special case. */
517 else if (count >= HOST_BITS_PER_WIDE_INT)
520 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
526 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
529 /* Zero / sign extend all bits that are beyond the precision. */
531 if (count >= (HOST_WIDE_INT)prec)
536 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
538 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
540 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
541 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
546 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
547 *lv |= signmask << (prec - count);
551 /* Rotate the doubleword integer in L1, H1 left by COUNT places
552 keeping only PREC bits of result.
553 Rotate right if COUNT is negative.
554 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
557 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
558 HOST_WIDE_INT count, unsigned int prec,
559 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
561 unsigned HOST_WIDE_INT s1l, s2l;
562 HOST_WIDE_INT s1h, s2h;
568 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
569 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
574 /* Rotate the doubleword integer in L1, H1 left by COUNT places
575 keeping only PREC bits of result. COUNT must be positive.
576 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
579 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
580 HOST_WIDE_INT count, unsigned int prec,
581 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
583 unsigned HOST_WIDE_INT s1l, s2l;
584 HOST_WIDE_INT s1h, s2h;
590 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
591 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
596 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
597 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
598 CODE is a tree code for a kind of division, one of
599 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
601 It controls how the quotient is rounded to an integer.
602 Return nonzero if the operation overflows.
603 UNS nonzero says do unsigned division. */
606 div_and_round_double (enum tree_code code, int uns,
607 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
608 HOST_WIDE_INT hnum_orig,
609 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
610 HOST_WIDE_INT hden_orig,
611 unsigned HOST_WIDE_INT *lquo,
612 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
617 HOST_WIDE_INT den[4], quo[4];
619 unsigned HOST_WIDE_INT work;
620 unsigned HOST_WIDE_INT carry = 0;
621 unsigned HOST_WIDE_INT lnum = lnum_orig;
622 HOST_WIDE_INT hnum = hnum_orig;
623 unsigned HOST_WIDE_INT lden = lden_orig;
624 HOST_WIDE_INT hden = hden_orig;
627 if (hden == 0 && lden == 0)
628 overflow = 1, lden = 1;
630 /* Calculate quotient sign and convert operands to unsigned. */
636 /* (minimum integer) / (-1) is the only overflow case. */
637 if (neg_double (lnum, hnum, &lnum, &hnum)
638 && ((HOST_WIDE_INT) lden & hden) == -1)
644 neg_double (lden, hden, &lden, &hden);
648 if (hnum == 0 && hden == 0)
649 { /* single precision */
651 /* This unsigned division rounds toward zero. */
657 { /* trivial case: dividend < divisor */
658 /* hden != 0 already checked. */
665 memset (quo, 0, sizeof quo);
667 memset (num, 0, sizeof num); /* to zero 9th element */
668 memset (den, 0, sizeof den);
670 encode (num, lnum, hnum);
671 encode (den, lden, hden);
673 /* Special code for when the divisor < BASE. */
674 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
676 /* hnum != 0 already checked. */
677 for (i = 4 - 1; i >= 0; i--)
679 work = num[i] + carry * BASE;
680 quo[i] = work / lden;
686 /* Full double precision division,
687 with thanks to Don Knuth's "Seminumerical Algorithms". */
688 int num_hi_sig, den_hi_sig;
689 unsigned HOST_WIDE_INT quo_est, scale;
691 /* Find the highest nonzero divisor digit. */
692 for (i = 4 - 1;; i--)
699 /* Insure that the first digit of the divisor is at least BASE/2.
700 This is required by the quotient digit estimation algorithm. */
702 scale = BASE / (den[den_hi_sig] + 1);
704 { /* scale divisor and dividend */
706 for (i = 0; i <= 4 - 1; i++)
708 work = (num[i] * scale) + carry;
709 num[i] = LOWPART (work);
710 carry = HIGHPART (work);
715 for (i = 0; i <= 4 - 1; i++)
717 work = (den[i] * scale) + carry;
718 den[i] = LOWPART (work);
719 carry = HIGHPART (work);
720 if (den[i] != 0) den_hi_sig = i;
727 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
729 /* Guess the next quotient digit, quo_est, by dividing the first
730 two remaining dividend digits by the high order quotient digit.
731 quo_est is never low and is at most 2 high. */
732 unsigned HOST_WIDE_INT tmp;
734 num_hi_sig = i + den_hi_sig + 1;
735 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
736 if (num[num_hi_sig] != den[den_hi_sig])
737 quo_est = work / den[den_hi_sig];
741 /* Refine quo_est so it's usually correct, and at most one high. */
742 tmp = work - quo_est * den[den_hi_sig];
744 && (den[den_hi_sig - 1] * quo_est
745 > (tmp * BASE + num[num_hi_sig - 2])))
748 /* Try QUO_EST as the quotient digit, by multiplying the
749 divisor by QUO_EST and subtracting from the remaining dividend.
750 Keep in mind that QUO_EST is the I - 1st digit. */
753 for (j = 0; j <= den_hi_sig; j++)
755 work = quo_est * den[j] + carry;
756 carry = HIGHPART (work);
757 work = num[i + j] - LOWPART (work);
758 num[i + j] = LOWPART (work);
759 carry += HIGHPART (work) != 0;
762 /* If quo_est was high by one, then num[i] went negative and
763 we need to correct things. */
764 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
767 carry = 0; /* add divisor back in */
768 for (j = 0; j <= den_hi_sig; j++)
770 work = num[i + j] + den[j] + carry;
771 carry = HIGHPART (work);
772 num[i + j] = LOWPART (work);
775 num [num_hi_sig] += carry;
778 /* Store the quotient digit. */
783 decode (quo, lquo, hquo);
786 /* If result is negative, make it so. */
788 neg_double (*lquo, *hquo, lquo, hquo);
790 /* Compute trial remainder: rem = num - (quo * den) */
791 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
792 neg_double (*lrem, *hrem, lrem, hrem);
793 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
798 case TRUNC_MOD_EXPR: /* round toward zero */
799 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
803 case FLOOR_MOD_EXPR: /* round toward negative infinity */
804 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
807 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
815 case CEIL_MOD_EXPR: /* round toward positive infinity */
816 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
818 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
826 case ROUND_MOD_EXPR: /* round to closest integer */
828 unsigned HOST_WIDE_INT labs_rem = *lrem;
829 HOST_WIDE_INT habs_rem = *hrem;
830 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
831 HOST_WIDE_INT habs_den = hden, htwice;
833 /* Get absolute values. */
835 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
837 neg_double (lden, hden, &labs_den, &habs_den);
839 /* If (2 * abs (lrem) >= abs (lden)), adjust the quotient. */
840 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
841 labs_rem, habs_rem, <wice, &htwice);
843 if (((unsigned HOST_WIDE_INT) habs_den
844 < (unsigned HOST_WIDE_INT) htwice)
845 || (((unsigned HOST_WIDE_INT) habs_den
846 == (unsigned HOST_WIDE_INT) htwice)
847 && (labs_den <= ltwice)))
851 add_double (*lquo, *hquo,
852 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
855 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
867 /* Compute true remainder: rem = num - (quo * den) */
868 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
869 neg_double (*lrem, *hrem, lrem, hrem);
870 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874 /* If ARG2 divides ARG1 with zero remainder, carries out the division
875 of type CODE and returns the quotient.
876 Otherwise returns NULL_TREE. */
879 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
881 unsigned HOST_WIDE_INT int1l, int2l;
882 HOST_WIDE_INT int1h, int2h;
883 unsigned HOST_WIDE_INT quol, reml;
884 HOST_WIDE_INT quoh, remh;
885 tree type = TREE_TYPE (arg1);
886 int uns = TYPE_UNSIGNED (type);
888 int1l = TREE_INT_CST_LOW (arg1);
889 int1h = TREE_INT_CST_HIGH (arg1);
890 /* &obj[0] + -128 really should be compiled as &obj[-8] rather than
891 &obj[some_exotic_number]. */
892 if (POINTER_TYPE_P (type))
895 type = signed_type_for (type);
896 fit_double_type (int1l, int1h, &int1l, &int1h,
900 fit_double_type (int1l, int1h, &int1l, &int1h, type);
901 int2l = TREE_INT_CST_LOW (arg2);
902 int2h = TREE_INT_CST_HIGH (arg2);
904 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
905 &quol, &quoh, &reml, &remh);
906 if (remh != 0 || reml != 0)
909 return build_int_cst_wide (type, quol, quoh);
912 /* This is nonzero if we should defer warnings about undefined
913 overflow. This facility exists because these warnings are a
914 special case. The code to estimate loop iterations does not want
915 to issue any warnings, since it works with expressions which do not
916 occur in user code. Various bits of cleanup code call fold(), but
917 only use the result if it has certain characteristics (e.g., is a
918 constant); that code only wants to issue a warning if the result is
921 static int fold_deferring_overflow_warnings;
923 /* If a warning about undefined overflow is deferred, this is the
924 warning. Note that this may cause us to turn two warnings into
925 one, but that is fine since it is sufficient to only give one
926 warning per expression. */
928 static const char* fold_deferred_overflow_warning;
930 /* If a warning about undefined overflow is deferred, this is the
931 level at which the warning should be emitted. */
933 static enum warn_strict_overflow_code fold_deferred_overflow_code;
935 /* Start deferring overflow warnings. We could use a stack here to
936 permit nested calls, but at present it is not necessary. */
939 fold_defer_overflow_warnings (void)
941 ++fold_deferring_overflow_warnings;
944 /* Stop deferring overflow warnings. If there is a pending warning,
945 and ISSUE is true, then issue the warning if appropriate. STMT is
946 the statement with which the warning should be associated (used for
947 location information); STMT may be NULL. CODE is the level of the
948 warning--a warn_strict_overflow_code value. This function will use
949 the smaller of CODE and the deferred code when deciding whether to
950 issue the warning. CODE may be zero to mean to always use the
954 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
959 gcc_assert (fold_deferring_overflow_warnings > 0);
960 --fold_deferring_overflow_warnings;
961 if (fold_deferring_overflow_warnings > 0)
963 if (fold_deferred_overflow_warning != NULL
965 && code < (int) fold_deferred_overflow_code)
966 fold_deferred_overflow_code = code;
970 warnmsg = fold_deferred_overflow_warning;
971 fold_deferred_overflow_warning = NULL;
973 if (!issue || warnmsg == NULL)
976 if (gimple_no_warning_p (stmt))
979 /* Use the smallest code level when deciding to issue the
981 if (code == 0 || code > (int) fold_deferred_overflow_code)
982 code = fold_deferred_overflow_code;
984 if (!issue_strict_overflow_warning (code))
988 locus = input_location;
990 locus = gimple_location (stmt);
991 warning (OPT_Wstrict_overflow, "%H%s", &locus, warnmsg);
994 /* Stop deferring overflow warnings, ignoring any deferred
998 fold_undefer_and_ignore_overflow_warnings (void)
1000 fold_undefer_overflow_warnings (false, NULL, 0);
1003 /* Whether we are deferring overflow warnings. */
1006 fold_deferring_overflow_warnings_p (void)
1008 return fold_deferring_overflow_warnings > 0;
1011 /* This is called when we fold something based on the fact that signed
1012 overflow is undefined. */
1015 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1017 if (fold_deferring_overflow_warnings > 0)
1019 if (fold_deferred_overflow_warning == NULL
1020 || wc < fold_deferred_overflow_code)
1022 fold_deferred_overflow_warning = gmsgid;
1023 fold_deferred_overflow_code = wc;
1026 else if (issue_strict_overflow_warning (wc))
1027 warning (OPT_Wstrict_overflow, gmsgid);
1030 /* Return true if the built-in mathematical function specified by CODE
1031 is odd, i.e. -f(x) == f(-x). */
1034 negate_mathfn_p (enum built_in_function code)
1038 CASE_FLT_FN (BUILT_IN_ASIN):
1039 CASE_FLT_FN (BUILT_IN_ASINH):
1040 CASE_FLT_FN (BUILT_IN_ATAN):
1041 CASE_FLT_FN (BUILT_IN_ATANH):
1042 CASE_FLT_FN (BUILT_IN_CASIN):
1043 CASE_FLT_FN (BUILT_IN_CASINH):
1044 CASE_FLT_FN (BUILT_IN_CATAN):
1045 CASE_FLT_FN (BUILT_IN_CATANH):
1046 CASE_FLT_FN (BUILT_IN_CBRT):
1047 CASE_FLT_FN (BUILT_IN_CPROJ):
1048 CASE_FLT_FN (BUILT_IN_CSIN):
1049 CASE_FLT_FN (BUILT_IN_CSINH):
1050 CASE_FLT_FN (BUILT_IN_CTAN):
1051 CASE_FLT_FN (BUILT_IN_CTANH):
1052 CASE_FLT_FN (BUILT_IN_ERF):
1053 CASE_FLT_FN (BUILT_IN_LLROUND):
1054 CASE_FLT_FN (BUILT_IN_LROUND):
1055 CASE_FLT_FN (BUILT_IN_ROUND):
1056 CASE_FLT_FN (BUILT_IN_SIN):
1057 CASE_FLT_FN (BUILT_IN_SINH):
1058 CASE_FLT_FN (BUILT_IN_TAN):
1059 CASE_FLT_FN (BUILT_IN_TANH):
1060 CASE_FLT_FN (BUILT_IN_TRUNC):
1063 CASE_FLT_FN (BUILT_IN_LLRINT):
1064 CASE_FLT_FN (BUILT_IN_LRINT):
1065 CASE_FLT_FN (BUILT_IN_NEARBYINT):
1066 CASE_FLT_FN (BUILT_IN_RINT):
1067 return !flag_rounding_math;
1075 /* Check whether we may negate an integer constant T without causing
1079 may_negate_without_overflow_p (const_tree t)
1081 unsigned HOST_WIDE_INT val;
1085 gcc_assert (TREE_CODE (t) == INTEGER_CST);
1087 type = TREE_TYPE (t);
1088 if (TYPE_UNSIGNED (type))
1091 prec = TYPE_PRECISION (type);
1092 if (prec > HOST_BITS_PER_WIDE_INT)
1094 if (TREE_INT_CST_LOW (t) != 0)
1096 prec -= HOST_BITS_PER_WIDE_INT;
1097 val = TREE_INT_CST_HIGH (t);
1100 val = TREE_INT_CST_LOW (t);
1101 if (prec < HOST_BITS_PER_WIDE_INT)
1102 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1103 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1106 /* Determine whether an expression T can be cheaply negated using
1107 the function negate_expr without introducing undefined overflow. */
1110 negate_expr_p (tree t)
1117 type = TREE_TYPE (t);
1119 STRIP_SIGN_NOPS (t);
1120 switch (TREE_CODE (t))
1123 if (TYPE_OVERFLOW_WRAPS (type))
1126 /* Check that -CST will not overflow type. */
1127 return may_negate_without_overflow_p (t);
1129 return (INTEGRAL_TYPE_P (type)
1130 && TYPE_OVERFLOW_WRAPS (type));
1138 return negate_expr_p (TREE_REALPART (t))
1139 && negate_expr_p (TREE_IMAGPART (t));
1142 return negate_expr_p (TREE_OPERAND (t, 0))
1143 && negate_expr_p (TREE_OPERAND (t, 1));
1146 return negate_expr_p (TREE_OPERAND (t, 0));
1149 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1150 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1152 /* -(A + B) -> (-B) - A. */
1153 if (negate_expr_p (TREE_OPERAND (t, 1))
1154 && reorder_operands_p (TREE_OPERAND (t, 0),
1155 TREE_OPERAND (t, 1)))
1157 /* -(A + B) -> (-A) - B. */
1158 return negate_expr_p (TREE_OPERAND (t, 0));
1161 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1162 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1163 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1164 && reorder_operands_p (TREE_OPERAND (t, 0),
1165 TREE_OPERAND (t, 1));
1168 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1174 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1175 return negate_expr_p (TREE_OPERAND (t, 1))
1176 || negate_expr_p (TREE_OPERAND (t, 0));
1179 case TRUNC_DIV_EXPR:
1180 case ROUND_DIV_EXPR:
1181 case FLOOR_DIV_EXPR:
1183 case EXACT_DIV_EXPR:
1184 /* In general we can't negate A / B, because if A is INT_MIN and
1185 B is 1, we may turn this into INT_MIN / -1 which is undefined
1186 and actually traps on some architectures. But if overflow is
1187 undefined, we can negate, because - (INT_MIN / 1) is an
1189 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1190 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1192 return negate_expr_p (TREE_OPERAND (t, 1))
1193 || negate_expr_p (TREE_OPERAND (t, 0));
1196 /* Negate -((double)float) as (double)(-float). */
1197 if (TREE_CODE (type) == REAL_TYPE)
1199 tree tem = strip_float_extensions (t);
1201 return negate_expr_p (tem);
1206 /* Negate -f(x) as f(-x). */
1207 if (negate_mathfn_p (builtin_mathfn_code (t)))
1208 return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1213 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1215 tree op1 = TREE_OPERAND (t, 1);
1216 if (TREE_INT_CST_HIGH (op1) == 0
1217 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1218 == TREE_INT_CST_LOW (op1))
1229 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1230 simplification is possible.
1231 If negate_expr_p would return true for T, NULL_TREE will never be
1235 fold_negate_expr (tree t)
1237 tree type = TREE_TYPE (t);
1240 switch (TREE_CODE (t))
1242 /* Convert - (~A) to A + 1. */
1244 if (INTEGRAL_TYPE_P (type))
1245 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1246 build_int_cst (type, 1));
1250 tem = fold_negate_const (t, type);
1251 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1252 || !TYPE_OVERFLOW_TRAPS (type))
1257 tem = fold_negate_const (t, type);
1258 /* Two's complement FP formats, such as c4x, may overflow. */
1259 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1264 tem = fold_negate_const (t, type);
1269 tree rpart = negate_expr (TREE_REALPART (t));
1270 tree ipart = negate_expr (TREE_IMAGPART (t));
1272 if ((TREE_CODE (rpart) == REAL_CST
1273 && TREE_CODE (ipart) == REAL_CST)
1274 || (TREE_CODE (rpart) == INTEGER_CST
1275 && TREE_CODE (ipart) == INTEGER_CST))
1276 return build_complex (type, rpart, ipart);
1281 if (negate_expr_p (t))
1282 return fold_build2 (COMPLEX_EXPR, type,
1283 fold_negate_expr (TREE_OPERAND (t, 0)),
1284 fold_negate_expr (TREE_OPERAND (t, 1)));
1288 if (negate_expr_p (t))
1289 return fold_build1 (CONJ_EXPR, type,
1290 fold_negate_expr (TREE_OPERAND (t, 0)));
1294 return TREE_OPERAND (t, 0);
1297 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1298 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1300 /* -(A + B) -> (-B) - A. */
1301 if (negate_expr_p (TREE_OPERAND (t, 1))
1302 && reorder_operands_p (TREE_OPERAND (t, 0),
1303 TREE_OPERAND (t, 1)))
1305 tem = negate_expr (TREE_OPERAND (t, 1));
1306 return fold_build2 (MINUS_EXPR, type,
1307 tem, TREE_OPERAND (t, 0));
1310 /* -(A + B) -> (-A) - B. */
1311 if (negate_expr_p (TREE_OPERAND (t, 0)))
1313 tem = negate_expr (TREE_OPERAND (t, 0));
1314 return fold_build2 (MINUS_EXPR, type,
1315 tem, TREE_OPERAND (t, 1));
1321 /* - (A - B) -> B - A */
1322 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1323 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1324 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1325 return fold_build2 (MINUS_EXPR, type,
1326 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1330 if (TYPE_UNSIGNED (type))
1336 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1338 tem = TREE_OPERAND (t, 1);
1339 if (negate_expr_p (tem))
1340 return fold_build2 (TREE_CODE (t), type,
1341 TREE_OPERAND (t, 0), negate_expr (tem));
1342 tem = TREE_OPERAND (t, 0);
1343 if (negate_expr_p (tem))
1344 return fold_build2 (TREE_CODE (t), type,
1345 negate_expr (tem), TREE_OPERAND (t, 1));
1349 case TRUNC_DIV_EXPR:
1350 case ROUND_DIV_EXPR:
1351 case FLOOR_DIV_EXPR:
1353 case EXACT_DIV_EXPR:
1354 /* In general we can't negate A / B, because if A is INT_MIN and
1355 B is 1, we may turn this into INT_MIN / -1 which is undefined
1356 and actually traps on some architectures. But if overflow is
1357 undefined, we can negate, because - (INT_MIN / 1) is an
1359 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1361 const char * const warnmsg = G_("assuming signed overflow does not "
1362 "occur when negating a division");
1363 tem = TREE_OPERAND (t, 1);
1364 if (negate_expr_p (tem))
1366 if (INTEGRAL_TYPE_P (type)
1367 && (TREE_CODE (tem) != INTEGER_CST
1368 || integer_onep (tem)))
1369 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1370 return fold_build2 (TREE_CODE (t), type,
1371 TREE_OPERAND (t, 0), negate_expr (tem));
1373 tem = TREE_OPERAND (t, 0);
1374 if (negate_expr_p (tem))
1376 if (INTEGRAL_TYPE_P (type)
1377 && (TREE_CODE (tem) != INTEGER_CST
1378 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1379 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1380 return fold_build2 (TREE_CODE (t), type,
1381 negate_expr (tem), TREE_OPERAND (t, 1));
1387 /* Convert -((double)float) into (double)(-float). */
1388 if (TREE_CODE (type) == REAL_TYPE)
1390 tem = strip_float_extensions (t);
1391 if (tem != t && negate_expr_p (tem))
1392 return fold_convert (type, negate_expr (tem));
1397 /* Negate -f(x) as f(-x). */
1398 if (negate_mathfn_p (builtin_mathfn_code (t))
1399 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403 fndecl = get_callee_fndecl (t);
1404 arg = negate_expr (CALL_EXPR_ARG (t, 0));
1405 return build_call_expr (fndecl, 1, arg);
1410 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1411 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1413 tree op1 = TREE_OPERAND (t, 1);
1414 if (TREE_INT_CST_HIGH (op1) == 0
1415 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1416 == TREE_INT_CST_LOW (op1))
1418 tree ntype = TYPE_UNSIGNED (type)
1419 ? signed_type_for (type)
1420 : unsigned_type_for (type);
1421 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1422 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1423 return fold_convert (type, temp);
1435 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1436 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1437 return NULL_TREE. */
1440 negate_expr (tree t)
1447 type = TREE_TYPE (t);
1448 STRIP_SIGN_NOPS (t);
1450 tem = fold_negate_expr (t);
1452 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1453 return fold_convert (type, tem);
1456 /* Split a tree IN into a constant, literal and variable parts that could be
1457 combined with CODE to make IN. "constant" means an expression with
1458 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1459 commutative arithmetic operation. Store the constant part into *CONP,
1460 the literal in *LITP and return the variable part. If a part isn't
1461 present, set it to null. If the tree does not decompose in this way,
1462 return the entire tree as the variable part and the other parts as null.
1464 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1465 case, we negate an operand that was subtracted. Except if it is a
1466 literal for which we use *MINUS_LITP instead.
1468 If NEGATE_P is true, we are negating all of IN, again except a literal
1469 for which we use *MINUS_LITP instead.
1471 If IN is itself a literal or constant, return it as appropriate.
1473 Note that we do not guarantee that any of the three values will be the
1474 same type as IN, but they will have the same signedness and mode. */
1477 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1478 tree *minus_litp, int negate_p)
1486 /* Strip any conversions that don't change the machine mode or signedness. */
1487 STRIP_SIGN_NOPS (in);
1489 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1490 || TREE_CODE (in) == FIXED_CST)
1492 else if (TREE_CODE (in) == code
1493 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1494 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1495 /* We can associate addition and subtraction together (even
1496 though the C standard doesn't say so) for integers because
1497 the value is not affected. For reals, the value might be
1498 affected, so we can't. */
1499 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1500 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1502 tree op0 = TREE_OPERAND (in, 0);
1503 tree op1 = TREE_OPERAND (in, 1);
1504 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1505 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1507 /* First see if either of the operands is a literal, then a constant. */
1508 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1509 || TREE_CODE (op0) == FIXED_CST)
1510 *litp = op0, op0 = 0;
1511 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1512 || TREE_CODE (op1) == FIXED_CST)
1513 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1515 if (op0 != 0 && TREE_CONSTANT (op0))
1516 *conp = op0, op0 = 0;
1517 else if (op1 != 0 && TREE_CONSTANT (op1))
1518 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1520 /* If we haven't dealt with either operand, this is not a case we can
1521 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1522 if (op0 != 0 && op1 != 0)
1527 var = op1, neg_var_p = neg1_p;
1529 /* Now do any needed negations. */
1531 *minus_litp = *litp, *litp = 0;
1533 *conp = negate_expr (*conp);
1535 var = negate_expr (var);
1537 else if (TREE_CONSTANT (in))
1545 *minus_litp = *litp, *litp = 0;
1546 else if (*minus_litp)
1547 *litp = *minus_litp, *minus_litp = 0;
1548 *conp = negate_expr (*conp);
1549 var = negate_expr (var);
1555 /* Re-associate trees split by the above function. T1 and T2 are either
1556 expressions to associate or null. Return the new expression, if any. If
1557 we build an operation, do it in TYPE and with CODE. */
1560 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1567 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1568 try to fold this since we will have infinite recursion. But do
1569 deal with any NEGATE_EXPRs. */
1570 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1571 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1573 if (code == PLUS_EXPR)
1575 if (TREE_CODE (t1) == NEGATE_EXPR)
1576 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1577 fold_convert (type, TREE_OPERAND (t1, 0)));
1578 else if (TREE_CODE (t2) == NEGATE_EXPR)
1579 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1580 fold_convert (type, TREE_OPERAND (t2, 0)));
1581 else if (integer_zerop (t2))
1582 return fold_convert (type, t1);
1584 else if (code == MINUS_EXPR)
1586 if (integer_zerop (t2))
1587 return fold_convert (type, t1);
1590 return build2 (code, type, fold_convert (type, t1),
1591 fold_convert (type, t2));
1594 return fold_build2 (code, type, fold_convert (type, t1),
1595 fold_convert (type, t2));
1598 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1599 for use in int_const_binop, size_binop and size_diffop. */
1602 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1604 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1606 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1621 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1622 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1623 && TYPE_MODE (type1) == TYPE_MODE (type2);
1627 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1628 to produce a new constant. Return NULL_TREE if we don't know how
1629 to evaluate CODE at compile-time.
1631 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1634 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1636 unsigned HOST_WIDE_INT int1l, int2l;
1637 HOST_WIDE_INT int1h, int2h;
1638 unsigned HOST_WIDE_INT low;
1640 unsigned HOST_WIDE_INT garbagel;
1641 HOST_WIDE_INT garbageh;
1643 tree type = TREE_TYPE (arg1);
1644 int uns = TYPE_UNSIGNED (type);
1646 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1649 int1l = TREE_INT_CST_LOW (arg1);
1650 int1h = TREE_INT_CST_HIGH (arg1);
1651 int2l = TREE_INT_CST_LOW (arg2);
1652 int2h = TREE_INT_CST_HIGH (arg2);
1657 low = int1l | int2l, hi = int1h | int2h;
1661 low = int1l ^ int2l, hi = int1h ^ int2h;
1665 low = int1l & int2l, hi = int1h & int2h;
1671 /* It's unclear from the C standard whether shifts can overflow.
1672 The following code ignores overflow; perhaps a C standard
1673 interpretation ruling is needed. */
1674 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1681 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1686 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1690 neg_double (int2l, int2h, &low, &hi);
1691 add_double (int1l, int1h, low, hi, &low, &hi);
1692 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1696 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1699 case TRUNC_DIV_EXPR:
1700 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1701 case EXACT_DIV_EXPR:
1702 /* This is a shortcut for a common special case. */
1703 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1704 && !TREE_OVERFLOW (arg1)
1705 && !TREE_OVERFLOW (arg2)
1706 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1708 if (code == CEIL_DIV_EXPR)
1711 low = int1l / int2l, hi = 0;
1715 /* ... fall through ... */
1717 case ROUND_DIV_EXPR:
1718 if (int2h == 0 && int2l == 0)
1720 if (int2h == 0 && int2l == 1)
1722 low = int1l, hi = int1h;
1725 if (int1l == int2l && int1h == int2h
1726 && ! (int1l == 0 && int1h == 0))
1731 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1732 &low, &hi, &garbagel, &garbageh);
1735 case TRUNC_MOD_EXPR:
1736 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1737 /* This is a shortcut for a common special case. */
1738 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1739 && !TREE_OVERFLOW (arg1)
1740 && !TREE_OVERFLOW (arg2)
1741 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1743 if (code == CEIL_MOD_EXPR)
1745 low = int1l % int2l, hi = 0;
1749 /* ... fall through ... */
1751 case ROUND_MOD_EXPR:
1752 if (int2h == 0 && int2l == 0)
1754 overflow = div_and_round_double (code, uns,
1755 int1l, int1h, int2l, int2h,
1756 &garbagel, &garbageh, &low, &hi);
1762 low = (((unsigned HOST_WIDE_INT) int1h
1763 < (unsigned HOST_WIDE_INT) int2h)
1764 || (((unsigned HOST_WIDE_INT) int1h
1765 == (unsigned HOST_WIDE_INT) int2h)
1768 low = (int1h < int2h
1769 || (int1h == int2h && int1l < int2l));
1771 if (low == (code == MIN_EXPR))
1772 low = int1l, hi = int1h;
1774 low = int2l, hi = int2h;
1783 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1785 /* Propagate overflow flags ourselves. */
1786 if (((!uns || is_sizetype) && overflow)
1787 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1790 TREE_OVERFLOW (t) = 1;
1794 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1795 ((!uns || is_sizetype) && overflow)
1796 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1801 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1802 constant. We assume ARG1 and ARG2 have the same data type, or at least
1803 are the same kind of constant and the same machine mode. Return zero if
1804 combining the constants is not allowed in the current operating mode.
1806 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1809 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1811 /* Sanity check for the recursive cases. */
1818 if (TREE_CODE (arg1) == INTEGER_CST)
1819 return int_const_binop (code, arg1, arg2, notrunc);
1821 if (TREE_CODE (arg1) == REAL_CST)
1823 enum machine_mode mode;
1826 REAL_VALUE_TYPE value;
1827 REAL_VALUE_TYPE result;
1831 /* The following codes are handled by real_arithmetic. */
1846 d1 = TREE_REAL_CST (arg1);
1847 d2 = TREE_REAL_CST (arg2);
1849 type = TREE_TYPE (arg1);
1850 mode = TYPE_MODE (type);
1852 /* Don't perform operation if we honor signaling NaNs and
1853 either operand is a NaN. */
1854 if (HONOR_SNANS (mode)
1855 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1858 /* Don't perform operation if it would raise a division
1859 by zero exception. */
1860 if (code == RDIV_EXPR
1861 && REAL_VALUES_EQUAL (d2, dconst0)
1862 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1865 /* If either operand is a NaN, just return it. Otherwise, set up
1866 for floating-point trap; we return an overflow. */
1867 if (REAL_VALUE_ISNAN (d1))
1869 else if (REAL_VALUE_ISNAN (d2))
1872 inexact = real_arithmetic (&value, code, &d1, &d2);
1873 real_convert (&result, mode, &value);
1875 /* Don't constant fold this floating point operation if
1876 the result has overflowed and flag_trapping_math. */
1877 if (flag_trapping_math
1878 && MODE_HAS_INFINITIES (mode)
1879 && REAL_VALUE_ISINF (result)
1880 && !REAL_VALUE_ISINF (d1)
1881 && !REAL_VALUE_ISINF (d2))
1884 /* Don't constant fold this floating point operation if the
1885 result may dependent upon the run-time rounding mode and
1886 flag_rounding_math is set, or if GCC's software emulation
1887 is unable to accurately represent the result. */
1888 if ((flag_rounding_math
1889 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1890 && (inexact || !real_identical (&result, &value)))
1893 t = build_real (type, result);
1895 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1899 if (TREE_CODE (arg1) == FIXED_CST)
1901 FIXED_VALUE_TYPE f1;
1902 FIXED_VALUE_TYPE f2;
1903 FIXED_VALUE_TYPE result;
1908 /* The following codes are handled by fixed_arithmetic. */
1914 case TRUNC_DIV_EXPR:
1915 f2 = TREE_FIXED_CST (arg2);
1920 f2.data.high = TREE_INT_CST_HIGH (arg2);
1921 f2.data.low = TREE_INT_CST_LOW (arg2);
1929 f1 = TREE_FIXED_CST (arg1);
1930 type = TREE_TYPE (arg1);
1931 sat_p = TYPE_SATURATING (type);
1932 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1933 t = build_fixed (type, result);
1934 /* Propagate overflow flags. */
1935 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1936 TREE_OVERFLOW (t) = 1;
1940 if (TREE_CODE (arg1) == COMPLEX_CST)
1942 tree type = TREE_TYPE (arg1);
1943 tree r1 = TREE_REALPART (arg1);
1944 tree i1 = TREE_IMAGPART (arg1);
1945 tree r2 = TREE_REALPART (arg2);
1946 tree i2 = TREE_IMAGPART (arg2);
1953 real = const_binop (code, r1, r2, notrunc);
1954 imag = const_binop (code, i1, i2, notrunc);
1958 real = const_binop (MINUS_EXPR,
1959 const_binop (MULT_EXPR, r1, r2, notrunc),
1960 const_binop (MULT_EXPR, i1, i2, notrunc),
1962 imag = const_binop (PLUS_EXPR,
1963 const_binop (MULT_EXPR, r1, i2, notrunc),
1964 const_binop (MULT_EXPR, i1, r2, notrunc),
1971 = const_binop (PLUS_EXPR,
1972 const_binop (MULT_EXPR, r2, r2, notrunc),
1973 const_binop (MULT_EXPR, i2, i2, notrunc),
1976 = const_binop (PLUS_EXPR,
1977 const_binop (MULT_EXPR, r1, r2, notrunc),
1978 const_binop (MULT_EXPR, i1, i2, notrunc),
1981 = const_binop (MINUS_EXPR,
1982 const_binop (MULT_EXPR, i1, r2, notrunc),
1983 const_binop (MULT_EXPR, r1, i2, notrunc),
1986 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1987 code = TRUNC_DIV_EXPR;
1989 real = const_binop (code, t1, magsquared, notrunc);
1990 imag = const_binop (code, t2, magsquared, notrunc);
1999 return build_complex (type, real, imag);
2002 if (TREE_CODE (arg1) == VECTOR_CST)
2004 tree type = TREE_TYPE(arg1);
2005 int count = TYPE_VECTOR_SUBPARTS (type), i;
2006 tree elements1, elements2, list = NULL_TREE;
2008 if(TREE_CODE(arg2) != VECTOR_CST)
2011 elements1 = TREE_VECTOR_CST_ELTS (arg1);
2012 elements2 = TREE_VECTOR_CST_ELTS (arg2);
2014 for (i = 0; i < count; i++)
2016 tree elem1, elem2, elem;
2018 /* The trailing elements can be empty and should be treated as 0 */
2020 elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2023 elem1 = TREE_VALUE(elements1);
2024 elements1 = TREE_CHAIN (elements1);
2028 elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2031 elem2 = TREE_VALUE(elements2);
2032 elements2 = TREE_CHAIN (elements2);
2035 elem = const_binop (code, elem1, elem2, notrunc);
2037 /* It is possible that const_binop cannot handle the given
2038 code and return NULL_TREE */
2039 if(elem == NULL_TREE)
2042 list = tree_cons (NULL_TREE, elem, list);
2044 return build_vector(type, nreverse(list));
2049 /* Create a size type INT_CST node with NUMBER sign extended. KIND
2050 indicates which particular sizetype to create. */
2053 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2055 return build_int_cst (sizetype_tab[(int) kind], number);
2058 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
2059 is a tree code. The type of the result is taken from the operands.
2060 Both must be equivalent integer types, ala int_binop_types_match_p.
2061 If the operands are constant, so is the result. */
2064 size_binop (enum tree_code code, tree arg0, tree arg1)
2066 tree type = TREE_TYPE (arg0);
2068 if (arg0 == error_mark_node || arg1 == error_mark_node)
2069 return error_mark_node;
2071 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2074 /* Handle the special case of two integer constants faster. */
2075 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2077 /* And some specific cases even faster than that. */
2078 if (code == PLUS_EXPR)
2080 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2082 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2085 else if (code == MINUS_EXPR)
2087 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2090 else if (code == MULT_EXPR)
2092 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2096 /* Handle general case of two integer constants. */
2097 return int_const_binop (code, arg0, arg1, 0);
2100 return fold_build2 (code, type, arg0, arg1);
2103 /* Given two values, either both of sizetype or both of bitsizetype,
2104 compute the difference between the two values. Return the value
2105 in signed type corresponding to the type of the operands. */
2108 size_diffop (tree arg0, tree arg1)
2110 tree type = TREE_TYPE (arg0);
2113 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2116 /* If the type is already signed, just do the simple thing. */
2117 if (!TYPE_UNSIGNED (type))
2118 return size_binop (MINUS_EXPR, arg0, arg1);
2120 if (type == sizetype)
2122 else if (type == bitsizetype)
2123 ctype = sbitsizetype;
2125 ctype = signed_type_for (type);
2127 /* If either operand is not a constant, do the conversions to the signed
2128 type and subtract. The hardware will do the right thing with any
2129 overflow in the subtraction. */
2130 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2131 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
2132 fold_convert (ctype, arg1));
2134 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2135 Otherwise, subtract the other way, convert to CTYPE (we know that can't
2136 overflow) and negate (which can't either). Special-case a result
2137 of zero while we're here. */
2138 if (tree_int_cst_equal (arg0, arg1))
2139 return build_int_cst (ctype, 0);
2140 else if (tree_int_cst_lt (arg1, arg0))
2141 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
2143 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
2144 fold_convert (ctype, size_binop (MINUS_EXPR,
2148 /* A subroutine of fold_convert_const handling conversions of an
2149 INTEGER_CST to another integer type. */
2152 fold_convert_const_int_from_int (tree type, const_tree arg1)
2156 /* Given an integer constant, make new constant with new type,
2157 appropriately sign-extended or truncated. */
2158 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2159 TREE_INT_CST_HIGH (arg1),
2160 /* Don't set the overflow when
2161 converting from a pointer, */
2162 !POINTER_TYPE_P (TREE_TYPE (arg1))
2163 /* or to a sizetype with same signedness
2164 and the precision is unchanged.
2165 ??? sizetype is always sign-extended,
2166 but its signedness depends on the
2167 frontend. Thus we see spurious overflows
2168 here if we do not check this. */
2169 && !((TYPE_PRECISION (TREE_TYPE (arg1))
2170 == TYPE_PRECISION (type))
2171 && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2172 == TYPE_UNSIGNED (type))
2173 && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2174 && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2175 || (TREE_CODE (type) == INTEGER_TYPE
2176 && TYPE_IS_SIZETYPE (type)))),
2177 (TREE_INT_CST_HIGH (arg1) < 0
2178 && (TYPE_UNSIGNED (type)
2179 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2180 | TREE_OVERFLOW (arg1));
2185 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2186 to an integer type. */
2189 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2194 /* The following code implements the floating point to integer
2195 conversion rules required by the Java Language Specification,
2196 that IEEE NaNs are mapped to zero and values that overflow
2197 the target precision saturate, i.e. values greater than
2198 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2199 are mapped to INT_MIN. These semantics are allowed by the
2200 C and C++ standards that simply state that the behavior of
2201 FP-to-integer conversion is unspecified upon overflow. */
2203 HOST_WIDE_INT high, low;
2205 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2209 case FIX_TRUNC_EXPR:
2210 real_trunc (&r, VOIDmode, &x);
2217 /* If R is NaN, return zero and show we have an overflow. */
2218 if (REAL_VALUE_ISNAN (r))
2225 /* See if R is less than the lower bound or greater than the
2230 tree lt = TYPE_MIN_VALUE (type);
2231 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2232 if (REAL_VALUES_LESS (r, l))
2235 high = TREE_INT_CST_HIGH (lt);
2236 low = TREE_INT_CST_LOW (lt);
2242 tree ut = TYPE_MAX_VALUE (type);
2245 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2246 if (REAL_VALUES_LESS (u, r))
2249 high = TREE_INT_CST_HIGH (ut);
2250 low = TREE_INT_CST_LOW (ut);
2256 REAL_VALUE_TO_INT (&low, &high, r);
2258 t = force_fit_type_double (type, low, high, -1,
2259 overflow | TREE_OVERFLOW (arg1));
2263 /* A subroutine of fold_convert_const handling conversions of a
2264 FIXED_CST to an integer type. */
2267 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2270 double_int temp, temp_trunc;
2273 /* Right shift FIXED_CST to temp by fbit. */
2274 temp = TREE_FIXED_CST (arg1).data;
2275 mode = TREE_FIXED_CST (arg1).mode;
2276 if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2278 lshift_double (temp.low, temp.high,
2279 - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2280 &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2282 /* Left shift temp to temp_trunc by fbit. */
2283 lshift_double (temp.low, temp.high,
2284 GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2285 &temp_trunc.low, &temp_trunc.high,
2286 SIGNED_FIXED_POINT_MODE_P (mode));
2293 temp_trunc.high = 0;
2296 /* If FIXED_CST is negative, we need to round the value toward 0.
2297 By checking if the fractional bits are not zero to add 1 to temp. */
2298 if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2299 && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2304 temp = double_int_add (temp, one);
2307 /* Given a fixed-point constant, make new constant with new type,
2308 appropriately sign-extended or truncated. */
2309 t = force_fit_type_double (type, temp.low, temp.high, -1,
2311 && (TYPE_UNSIGNED (type)
2312 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2313 | TREE_OVERFLOW (arg1));
2318 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2319 to another floating point type. */
2322 fold_convert_const_real_from_real (tree type, const_tree arg1)
2324 REAL_VALUE_TYPE value;
2327 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2328 t = build_real (type, value);
2330 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2334 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2335 to a floating point type. */
2338 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2340 REAL_VALUE_TYPE value;
2343 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2344 t = build_real (type, value);
2346 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2350 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2351 to another fixed-point type. */
2354 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2356 FIXED_VALUE_TYPE value;
2360 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2361 TYPE_SATURATING (type));
2362 t = build_fixed (type, value);
2364 /* Propagate overflow flags. */
2365 if (overflow_p | TREE_OVERFLOW (arg1))
2366 TREE_OVERFLOW (t) = 1;
2370 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2371 to a fixed-point type. */
2374 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2376 FIXED_VALUE_TYPE value;
2380 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2381 TREE_INT_CST (arg1),
2382 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2383 TYPE_SATURATING (type));
2384 t = build_fixed (type, value);
2386 /* Propagate overflow flags. */
2387 if (overflow_p | TREE_OVERFLOW (arg1))
2388 TREE_OVERFLOW (t) = 1;
2392 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2393 to a fixed-point type. */
2396 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2398 FIXED_VALUE_TYPE value;
2402 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2403 &TREE_REAL_CST (arg1),
2404 TYPE_SATURATING (type));
2405 t = build_fixed (type, value);
2407 /* Propagate overflow flags. */
2408 if (overflow_p | TREE_OVERFLOW (arg1))
2409 TREE_OVERFLOW (t) = 1;
2413 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2414 type TYPE. If no simplification can be done return NULL_TREE. */
2417 fold_convert_const (enum tree_code code, tree type, tree arg1)
2419 if (TREE_TYPE (arg1) == type)
2422 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2423 || TREE_CODE (type) == OFFSET_TYPE)
2425 if (TREE_CODE (arg1) == INTEGER_CST)
2426 return fold_convert_const_int_from_int (type, arg1);
2427 else if (TREE_CODE (arg1) == REAL_CST)
2428 return fold_convert_const_int_from_real (code, type, arg1);
2429 else if (TREE_CODE (arg1) == FIXED_CST)
2430 return fold_convert_const_int_from_fixed (type, arg1);
2432 else if (TREE_CODE (type) == REAL_TYPE)
2434 if (TREE_CODE (arg1) == INTEGER_CST)
2435 return build_real_from_int_cst (type, arg1);
2436 else if (TREE_CODE (arg1) == REAL_CST)
2437 return fold_convert_const_real_from_real (type, arg1);
2438 else if (TREE_CODE (arg1) == FIXED_CST)
2439 return fold_convert_const_real_from_fixed (type, arg1);
2441 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2443 if (TREE_CODE (arg1) == FIXED_CST)
2444 return fold_convert_const_fixed_from_fixed (type, arg1);
2445 else if (TREE_CODE (arg1) == INTEGER_CST)
2446 return fold_convert_const_fixed_from_int (type, arg1);
2447 else if (TREE_CODE (arg1) == REAL_CST)
2448 return fold_convert_const_fixed_from_real (type, arg1);
2453 /* Construct a vector of zero elements of vector type TYPE. */
2456 build_zero_vector (tree type)
2461 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2462 units = TYPE_VECTOR_SUBPARTS (type);
2465 for (i = 0; i < units; i++)
2466 list = tree_cons (NULL_TREE, elem, list);
2467 return build_vector (type, list);
2470 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2473 fold_convertible_p (const_tree type, const_tree arg)
2475 tree orig = TREE_TYPE (arg);
2480 if (TREE_CODE (arg) == ERROR_MARK
2481 || TREE_CODE (type) == ERROR_MARK
2482 || TREE_CODE (orig) == ERROR_MARK)
2485 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2488 switch (TREE_CODE (type))
2490 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2491 case POINTER_TYPE: case REFERENCE_TYPE:
2493 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2494 || TREE_CODE (orig) == OFFSET_TYPE)
2496 return (TREE_CODE (orig) == VECTOR_TYPE
2497 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2500 case FIXED_POINT_TYPE:
2504 return TREE_CODE (type) == TREE_CODE (orig);
2511 /* Convert expression ARG to type TYPE. Used by the middle-end for
2512 simple conversions in preference to calling the front-end's convert. */
2515 fold_convert (tree type, tree arg)
2517 tree orig = TREE_TYPE (arg);
2523 if (TREE_CODE (arg) == ERROR_MARK
2524 || TREE_CODE (type) == ERROR_MARK
2525 || TREE_CODE (orig) == ERROR_MARK)
2526 return error_mark_node;
2528 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2529 return fold_build1 (NOP_EXPR, type, arg);
2531 switch (TREE_CODE (type))
2533 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2534 case POINTER_TYPE: case REFERENCE_TYPE:
2536 if (TREE_CODE (arg) == INTEGER_CST)
2538 tem = fold_convert_const (NOP_EXPR, type, arg);
2539 if (tem != NULL_TREE)
2542 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2543 || TREE_CODE (orig) == OFFSET_TYPE)
2544 return fold_build1 (NOP_EXPR, type, arg);
2545 if (TREE_CODE (orig) == COMPLEX_TYPE)
2547 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2548 return fold_convert (type, tem);
2550 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2551 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2552 return fold_build1 (NOP_EXPR, type, arg);
2555 if (TREE_CODE (arg) == INTEGER_CST)
2557 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2558 if (tem != NULL_TREE)
2561 else if (TREE_CODE (arg) == REAL_CST)
2563 tem = fold_convert_const (NOP_EXPR, type, arg);
2564 if (tem != NULL_TREE)
2567 else if (TREE_CODE (arg) == FIXED_CST)
2569 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2570 if (tem != NULL_TREE)
2574 switch (TREE_CODE (orig))
2577 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2578 case POINTER_TYPE: case REFERENCE_TYPE:
2579 return fold_build1 (FLOAT_EXPR, type, arg);
2582 return fold_build1 (NOP_EXPR, type, arg);
2584 case FIXED_POINT_TYPE:
2585 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2588 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2589 return fold_convert (type, tem);
2595 case FIXED_POINT_TYPE:
2596 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2597 || TREE_CODE (arg) == REAL_CST)
2599 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2600 if (tem != NULL_TREE)
2604 switch (TREE_CODE (orig))
2606 case FIXED_POINT_TYPE:
2611 return fold_build1 (FIXED_CONVERT_EXPR, type, arg);
2614 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2615 return fold_convert (type, tem);
2622 switch (TREE_CODE (orig))
2625 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2626 case POINTER_TYPE: case REFERENCE_TYPE:
2628 case FIXED_POINT_TYPE:
2629 return build2 (COMPLEX_EXPR, type,
2630 fold_convert (TREE_TYPE (type), arg),
2631 fold_convert (TREE_TYPE (type), integer_zero_node));
2636 if (TREE_CODE (arg) == COMPLEX_EXPR)
2638 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2639 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2640 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2643 arg = save_expr (arg);
2644 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2645 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2646 rpart = fold_convert (TREE_TYPE (type), rpart);
2647 ipart = fold_convert (TREE_TYPE (type), ipart);
2648 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2656 if (integer_zerop (arg))
2657 return build_zero_vector (type);
2658 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2659 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2660 || TREE_CODE (orig) == VECTOR_TYPE);
2661 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2664 tem = fold_ignored_result (arg);
2665 if (TREE_CODE (tem) == MODIFY_EXPR)
2667 return fold_build1 (NOP_EXPR, type, tem);
2674 /* Return false if expr can be assumed not to be an lvalue, true
2678 maybe_lvalue_p (const_tree x)
2680 /* We only need to wrap lvalue tree codes. */
2681 switch (TREE_CODE (x))
2692 case ALIGN_INDIRECT_REF:
2693 case MISALIGNED_INDIRECT_REF:
2695 case ARRAY_RANGE_REF:
2701 case PREINCREMENT_EXPR:
2702 case PREDECREMENT_EXPR:
2704 case TRY_CATCH_EXPR:
2705 case WITH_CLEANUP_EXPR:
2716 /* Assume the worst for front-end tree codes. */
2717 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2725 /* Return an expr equal to X but certainly not valid as an lvalue. */
2730 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2735 if (! maybe_lvalue_p (x))
2737 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2740 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2741 Zero means allow extended lvalues. */
2743 int pedantic_lvalues;
2745 /* When pedantic, return an expr equal to X but certainly not valid as a
2746 pedantic lvalue. Otherwise, return X. */
2749 pedantic_non_lvalue (tree x)
2751 if (pedantic_lvalues)
2752 return non_lvalue (x);
2757 /* Given a tree comparison code, return the code that is the logical inverse
2758 of the given code. It is not safe to do this for floating-point
2759 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2760 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2763 invert_tree_comparison (enum tree_code code, bool honor_nans)
2765 if (honor_nans && flag_trapping_math)
2775 return honor_nans ? UNLE_EXPR : LE_EXPR;
2777 return honor_nans ? UNLT_EXPR : LT_EXPR;
2779 return honor_nans ? UNGE_EXPR : GE_EXPR;
2781 return honor_nans ? UNGT_EXPR : GT_EXPR;
2795 return UNORDERED_EXPR;
2796 case UNORDERED_EXPR:
2797 return ORDERED_EXPR;
2803 /* Similar, but return the comparison that results if the operands are
2804 swapped. This is safe for floating-point. */
2807 swap_tree_comparison (enum tree_code code)
2814 case UNORDERED_EXPR:
2840 /* Convert a comparison tree code from an enum tree_code representation
2841 into a compcode bit-based encoding. This function is the inverse of
2842 compcode_to_comparison. */
2844 static enum comparison_code
2845 comparison_to_compcode (enum tree_code code)
2862 return COMPCODE_ORD;
2863 case UNORDERED_EXPR:
2864 return COMPCODE_UNORD;
2866 return COMPCODE_UNLT;
2868 return COMPCODE_UNEQ;
2870 return COMPCODE_UNLE;
2872 return COMPCODE_UNGT;
2874 return COMPCODE_LTGT;
2876 return COMPCODE_UNGE;
2882 /* Convert a compcode bit-based encoding of a comparison operator back
2883 to GCC's enum tree_code representation. This function is the
2884 inverse of comparison_to_compcode. */
2886 static enum tree_code
2887 compcode_to_comparison (enum comparison_code code)
2904 return ORDERED_EXPR;
2905 case COMPCODE_UNORD:
2906 return UNORDERED_EXPR;
2924 /* Return a tree for the comparison which is the combination of
2925 doing the AND or OR (depending on CODE) of the two operations LCODE
2926 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2927 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2928 if this makes the transformation invalid. */
2931 combine_comparisons (enum tree_code code, enum tree_code lcode,
2932 enum tree_code rcode, tree truth_type,
2933 tree ll_arg, tree lr_arg)
2935 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2936 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2937 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2938 enum comparison_code compcode;
2942 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2943 compcode = lcompcode & rcompcode;
2946 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2947 compcode = lcompcode | rcompcode;
2956 /* Eliminate unordered comparisons, as well as LTGT and ORD
2957 which are not used unless the mode has NaNs. */
2958 compcode &= ~COMPCODE_UNORD;
2959 if (compcode == COMPCODE_LTGT)
2960 compcode = COMPCODE_NE;
2961 else if (compcode == COMPCODE_ORD)
2962 compcode = COMPCODE_TRUE;
2964 else if (flag_trapping_math)
2966 /* Check that the original operation and the optimized ones will trap
2967 under the same condition. */
2968 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2969 && (lcompcode != COMPCODE_EQ)
2970 && (lcompcode != COMPCODE_ORD);
2971 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2972 && (rcompcode != COMPCODE_EQ)
2973 && (rcompcode != COMPCODE_ORD);
2974 bool trap = (compcode & COMPCODE_UNORD) == 0
2975 && (compcode != COMPCODE_EQ)
2976 && (compcode != COMPCODE_ORD);
2978 /* In a short-circuited boolean expression the LHS might be
2979 such that the RHS, if evaluated, will never trap. For
2980 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2981 if neither x nor y is NaN. (This is a mixed blessing: for
2982 example, the expression above will never trap, hence
2983 optimizing it to x < y would be invalid). */
2984 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2985 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2988 /* If the comparison was short-circuited, and only the RHS
2989 trapped, we may now generate a spurious trap. */
2991 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2994 /* If we changed the conditions that cause a trap, we lose. */
2995 if ((ltrap || rtrap) != trap)
2999 if (compcode == COMPCODE_TRUE)
3000 return constant_boolean_node (true, truth_type);
3001 else if (compcode == COMPCODE_FALSE)
3002 return constant_boolean_node (false, truth_type);
3004 return fold_build2 (compcode_to_comparison (compcode),
3005 truth_type, ll_arg, lr_arg);
3008 /* Return nonzero if two operands (typically of the same tree node)
3009 are necessarily equal. If either argument has side-effects this
3010 function returns zero. FLAGS modifies behavior as follows:
3012 If OEP_ONLY_CONST is set, only return nonzero for constants.
3013 This function tests whether the operands are indistinguishable;
3014 it does not test whether they are equal using C's == operation.
3015 The distinction is important for IEEE floating point, because
3016 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3017 (2) two NaNs may be indistinguishable, but NaN!=NaN.
3019 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3020 even though it may hold multiple values during a function.
3021 This is because a GCC tree node guarantees that nothing else is
3022 executed between the evaluation of its "operands" (which may often
3023 be evaluated in arbitrary order). Hence if the operands themselves
3024 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3025 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
3026 unset means assuming isochronic (or instantaneous) tree equivalence.
3027 Unless comparing arbitrary expression trees, such as from different
3028 statements, this flag can usually be left unset.
3030 If OEP_PURE_SAME is set, then pure functions with identical arguments
3031 are considered the same. It is used when the caller has other ways
3032 to ensure that global memory is unchanged in between. */
3035 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3037 /* If either is ERROR_MARK, they aren't equal. */
3038 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
3041 /* Check equality of integer constants before bailing out due to
3042 precision differences. */
3043 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3044 return tree_int_cst_equal (arg0, arg1);
3046 /* If both types don't have the same signedness, then we can't consider
3047 them equal. We must check this before the STRIP_NOPS calls
3048 because they may change the signedness of the arguments. As pointers
3049 strictly don't have a signedness, require either two pointers or
3050 two non-pointers as well. */
3051 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3052 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3055 /* If both types don't have the same precision, then it is not safe
3057 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3063 /* In case both args are comparisons but with different comparison
3064 code, try to swap the comparison operands of one arg to produce
3065 a match and compare that variant. */
3066 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3067 && COMPARISON_CLASS_P (arg0)
3068 && COMPARISON_CLASS_P (arg1))
3070 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3072 if (TREE_CODE (arg0) == swap_code)
3073 return operand_equal_p (TREE_OPERAND (arg0, 0),
3074 TREE_OPERAND (arg1, 1), flags)
3075 && operand_equal_p (TREE_OPERAND (arg0, 1),
3076 TREE_OPERAND (arg1, 0), flags);
3079 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3080 /* This is needed for conversions and for COMPONENT_REF.
3081 Might as well play it safe and always test this. */
3082 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3083 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3084 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3087 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3088 We don't care about side effects in that case because the SAVE_EXPR
3089 takes care of that for us. In all other cases, two expressions are
3090 equal if they have no side effects. If we have two identical
3091 expressions with side effects that should be treated the same due
3092 to the only side effects being identical SAVE_EXPR's, that will
3093 be detected in the recursive calls below. */
3094 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3095 && (TREE_CODE (arg0) == SAVE_EXPR
3096 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3099 /* Next handle constant cases, those for which we can return 1 even
3100 if ONLY_CONST is set. */
3101 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3102 switch (TREE_CODE (arg0))
3105 return tree_int_cst_equal (arg0, arg1);
3108 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3109 TREE_FIXED_CST (arg1));
3112 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3113 TREE_REAL_CST (arg1)))
3117 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3119 /* If we do not distinguish between signed and unsigned zero,
3120 consider them equal. */
3121 if (real_zerop (arg0) && real_zerop (arg1))
3130 v1 = TREE_VECTOR_CST_ELTS (arg0);
3131 v2 = TREE_VECTOR_CST_ELTS (arg1);
3134 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3137 v1 = TREE_CHAIN (v1);
3138 v2 = TREE_CHAIN (v2);
3145 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3147 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3151 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3152 && ! memcmp (TREE_STRING_POINTER (arg0),
3153 TREE_STRING_POINTER (arg1),
3154 TREE_STRING_LENGTH (arg0)));
3157 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3163 if (flags & OEP_ONLY_CONST)
3166 /* Define macros to test an operand from arg0 and arg1 for equality and a
3167 variant that allows null and views null as being different from any
3168 non-null value. In the latter case, if either is null, the both
3169 must be; otherwise, do the normal comparison. */
3170 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
3171 TREE_OPERAND (arg1, N), flags)
3173 #define OP_SAME_WITH_NULL(N) \
3174 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
3175 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3177 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3180 /* Two conversions are equal only if signedness and modes match. */
3181 switch (TREE_CODE (arg0))
3184 case FIX_TRUNC_EXPR:
3185 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3186 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3196 case tcc_comparison:
3198 if (OP_SAME (0) && OP_SAME (1))
3201 /* For commutative ops, allow the other order. */
3202 return (commutative_tree_code (TREE_CODE (arg0))
3203 && operand_equal_p (TREE_OPERAND (arg0, 0),
3204 TREE_OPERAND (arg1, 1), flags)
3205 && operand_equal_p (TREE_OPERAND (arg0, 1),
3206 TREE_OPERAND (arg1, 0), flags));
3209 /* If either of the pointer (or reference) expressions we are
3210 dereferencing contain a side effect, these cannot be equal. */
3211 if (TREE_SIDE_EFFECTS (arg0)
3212 || TREE_SIDE_EFFECTS (arg1))
3215 switch (TREE_CODE (arg0))
3218 case ALIGN_INDIRECT_REF:
3219 case MISALIGNED_INDIRECT_REF:
3225 case ARRAY_RANGE_REF:
3226 /* Operands 2 and 3 may be null.
3227 Compare the array index by value if it is constant first as we
3228 may have different types but same value here. */
3230 && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3231 TREE_OPERAND (arg1, 1))
3233 && OP_SAME_WITH_NULL (2)
3234 && OP_SAME_WITH_NULL (3));
3237 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3238 may be NULL when we're called to compare MEM_EXPRs. */
3239 return OP_SAME_WITH_NULL (0)
3241 && OP_SAME_WITH_NULL (2);
3244 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3250 case tcc_expression:
3251 switch (TREE_CODE (arg0))
3254 case TRUTH_NOT_EXPR:
3257 case TRUTH_ANDIF_EXPR:
3258 case TRUTH_ORIF_EXPR:
3259 return OP_SAME (0) && OP_SAME (1);
3261 case TRUTH_AND_EXPR:
3263 case TRUTH_XOR_EXPR:
3264 if (OP_SAME (0) && OP_SAME (1))
3267 /* Otherwise take into account this is a commutative operation. */
3268 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3269 TREE_OPERAND (arg1, 1), flags)
3270 && operand_equal_p (TREE_OPERAND (arg0, 1),
3271 TREE_OPERAND (arg1, 0), flags));
3274 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3281 switch (TREE_CODE (arg0))
3284 /* If the CALL_EXPRs call different functions, then they
3285 clearly can not be equal. */
3286 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3291 unsigned int cef = call_expr_flags (arg0);
3292 if (flags & OEP_PURE_SAME)
3293 cef &= ECF_CONST | ECF_PURE;
3300 /* Now see if all the arguments are the same. */
3302 const_call_expr_arg_iterator iter0, iter1;
3304 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3305 a1 = first_const_call_expr_arg (arg1, &iter1);
3307 a0 = next_const_call_expr_arg (&iter0),
3308 a1 = next_const_call_expr_arg (&iter1))
3309 if (! operand_equal_p (a0, a1, flags))
3312 /* If we get here and both argument lists are exhausted
3313 then the CALL_EXPRs are equal. */
3314 return ! (a0 || a1);
3320 case tcc_declaration:
3321 /* Consider __builtin_sqrt equal to sqrt. */
3322 return (TREE_CODE (arg0) == FUNCTION_DECL
3323 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3324 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3325 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3332 #undef OP_SAME_WITH_NULL
3335 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3336 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3338 When in doubt, return 0. */
3341 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3343 int unsignedp1, unsignedpo;
3344 tree primarg0, primarg1, primother;
3345 unsigned int correct_width;
3347 if (operand_equal_p (arg0, arg1, 0))
3350 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3351 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3354 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3355 and see if the inner values are the same. This removes any
3356 signedness comparison, which doesn't matter here. */
3357 primarg0 = arg0, primarg1 = arg1;
3358 STRIP_NOPS (primarg0);
3359 STRIP_NOPS (primarg1);
3360 if (operand_equal_p (primarg0, primarg1, 0))
3363 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3364 actual comparison operand, ARG0.
3366 First throw away any conversions to wider types
3367 already present in the operands. */
3369 primarg1 = get_narrower (arg1, &unsignedp1);
3370 primother = get_narrower (other, &unsignedpo);
3372 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3373 if (unsignedp1 == unsignedpo
3374 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3375 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3377 tree type = TREE_TYPE (arg0);
3379 /* Make sure shorter operand is extended the right way
3380 to match the longer operand. */
3381 primarg1 = fold_convert (signed_or_unsigned_type_for
3382 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3384 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3391 /* See if ARG is an expression that is either a comparison or is performing
3392 arithmetic on comparisons. The comparisons must only be comparing
3393 two different values, which will be stored in *CVAL1 and *CVAL2; if
3394 they are nonzero it means that some operands have already been found.
3395 No variables may be used anywhere else in the expression except in the
3396 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3397 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3399 If this is true, return 1. Otherwise, return zero. */
3402 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3404 enum tree_code code = TREE_CODE (arg);
3405 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3407 /* We can handle some of the tcc_expression cases here. */
3408 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3410 else if (tclass == tcc_expression
3411 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3412 || code == COMPOUND_EXPR))
3413 tclass = tcc_binary;
3415 else if (tclass == tcc_expression && code == SAVE_EXPR
3416 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3418 /* If we've already found a CVAL1 or CVAL2, this expression is
3419 two complex to handle. */
3420 if (*cval1 || *cval2)
3430 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3433 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3434 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3435 cval1, cval2, save_p));
3440 case tcc_expression:
3441 if (code == COND_EXPR)
3442 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3443 cval1, cval2, save_p)
3444 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3445 cval1, cval2, save_p)
3446 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3447 cval1, cval2, save_p));
3450 case tcc_comparison:
3451 /* First see if we can handle the first operand, then the second. For
3452 the second operand, we know *CVAL1 can't be zero. It must be that
3453 one side of the comparison is each of the values; test for the
3454 case where this isn't true by failing if the two operands
3457 if (operand_equal_p (TREE_OPERAND (arg, 0),
3458 TREE_OPERAND (arg, 1), 0))
3462 *cval1 = TREE_OPERAND (arg, 0);
3463 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3465 else if (*cval2 == 0)
3466 *cval2 = TREE_OPERAND (arg, 0);
3467 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3472 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3474 else if (*cval2 == 0)
3475 *cval2 = TREE_OPERAND (arg, 1);
3476 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3488 /* ARG is a tree that is known to contain just arithmetic operations and
3489 comparisons. Evaluate the operations in the tree substituting NEW0 for
3490 any occurrence of OLD0 as an operand of a comparison and likewise for
3494 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
3496 tree type = TREE_TYPE (arg);
3497 enum tree_code code = TREE_CODE (arg);
3498 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3500 /* We can handle some of the tcc_expression cases here. */
3501 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3503 else if (tclass == tcc_expression
3504 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3505 tclass = tcc_binary;
3510 return fold_build1 (code, type,
3511 eval_subst (TREE_OPERAND (arg, 0),
3512 old0, new0, old1, new1));
3515 return fold_build2 (code, type,
3516 eval_subst (TREE_OPERAND (arg, 0),
3517 old0, new0, old1, new1),
3518 eval_subst (TREE_OPERAND (arg, 1),
3519 old0, new0, old1, new1));
3521 case tcc_expression:
3525 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3528 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3531 return fold_build3 (code, type,
3532 eval_subst (TREE_OPERAND (arg, 0),
3533 old0, new0, old1, new1),
3534 eval_subst (TREE_OPERAND (arg, 1),
3535 old0, new0, old1, new1),
3536 eval_subst (TREE_OPERAND (arg, 2),
3537 old0, new0, old1, new1));
3541 /* Fall through - ??? */
3543 case tcc_comparison:
3545 tree arg0 = TREE_OPERAND (arg, 0);
3546 tree arg1 = TREE_OPERAND (arg, 1);
3548 /* We need to check both for exact equality and tree equality. The
3549 former will be true if the operand has a side-effect. In that
3550 case, we know the operand occurred exactly once. */
3552 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3554 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3557 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3559 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3562 return fold_build2 (code, type, arg0, arg1);
3570 /* Return a tree for the case when the result of an expression is RESULT
3571 converted to TYPE and OMITTED was previously an operand of the expression
3572 but is now not needed (e.g., we folded OMITTED * 0).
3574 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3575 the conversion of RESULT to TYPE. */
3578 omit_one_operand (tree type, tree result, tree omitted)
3580 tree t = fold_convert (type, result);
3582 /* If the resulting operand is an empty statement, just return the omitted
3583 statement casted to void. */
3584 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3585 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3587 if (TREE_SIDE_EFFECTS (omitted))
3588 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3590 return non_lvalue (t);
3593 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3596 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3598 tree t = fold_convert (type, result);
3600 /* If the resulting operand is an empty statement, just return the omitted
3601 statement casted to void. */
3602 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3603 return build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3605 if (TREE_SIDE_EFFECTS (omitted))
3606 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3608 return pedantic_non_lvalue (t);
3611 /* Return a tree for the case when the result of an expression is RESULT
3612 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3613 of the expression but are now not needed.
3615 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3616 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3617 evaluated before OMITTED2. Otherwise, if neither has side effects,
3618 just do the conversion of RESULT to TYPE. */
3621 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3623 tree t = fold_convert (type, result);
3625 if (TREE_SIDE_EFFECTS (omitted2))
3626 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3627 if (TREE_SIDE_EFFECTS (omitted1))
3628 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3630 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3634 /* Return a simplified tree node for the truth-negation of ARG. This
3635 never alters ARG itself. We assume that ARG is an operation that
3636 returns a truth value (0 or 1).
3638 FIXME: one would think we would fold the result, but it causes
3639 problems with the dominator optimizer. */
3642 fold_truth_not_expr (tree arg)
3644 tree type = TREE_TYPE (arg);
3645 enum tree_code code = TREE_CODE (arg);
3647 /* If this is a comparison, we can simply invert it, except for
3648 floating-point non-equality comparisons, in which case we just
3649 enclose a TRUTH_NOT_EXPR around what we have. */
3651 if (TREE_CODE_CLASS (code) == tcc_comparison)
3653 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3654 if (FLOAT_TYPE_P (op_type)
3655 && flag_trapping_math
3656 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3657 && code != NE_EXPR && code != EQ_EXPR)
3661 code = invert_tree_comparison (code,
3662 HONOR_NANS (TYPE_MODE (op_type)));
3663 if (code == ERROR_MARK)
3666 return build2 (code, type,
3667 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3674 return constant_boolean_node (integer_zerop (arg), type);
3676 case TRUTH_AND_EXPR:
3677 return build2 (TRUTH_OR_EXPR, type,
3678 invert_truthvalue (TREE_OPERAND (arg, 0)),
3679 invert_truthvalue (TREE_OPERAND (arg, 1)));
3682 return build2 (TRUTH_AND_EXPR, type,
3683 invert_truthvalue (TREE_OPERAND (arg, 0)),
3684 invert_truthvalue (TREE_OPERAND (arg, 1)));
3686 case TRUTH_XOR_EXPR:
3687 /* Here we can invert either operand. We invert the first operand
3688 unless the second operand is a TRUTH_NOT_EXPR in which case our
3689 result is the XOR of the first operand with the inside of the
3690 negation of the second operand. */
3692 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3693 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3694 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3696 return build2 (TRUTH_XOR_EXPR, type,
3697 invert_truthvalue (TREE_OPERAND (arg, 0)),
3698 TREE_OPERAND (arg, 1));
3700 case TRUTH_ANDIF_EXPR:
3701 return build2 (TRUTH_ORIF_EXPR, type,
3702 invert_truthvalue (TREE_OPERAND (arg, 0)),
3703 invert_truthvalue (TREE_OPERAND (arg, 1)));
3705 case TRUTH_ORIF_EXPR:
3706 return build2 (TRUTH_ANDIF_EXPR, type,
3707 invert_truthvalue (TREE_OPERAND (arg, 0)),
3708 invert_truthvalue (TREE_OPERAND (arg, 1)));
3710 case TRUTH_NOT_EXPR:
3711 return TREE_OPERAND (arg, 0);
3715 tree arg1 = TREE_OPERAND (arg, 1);
3716 tree arg2 = TREE_OPERAND (arg, 2);
3717 /* A COND_EXPR may have a throw as one operand, which
3718 then has void type. Just leave void operands
3720 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3721 VOID_TYPE_P (TREE_TYPE (arg1))
3722 ? arg1 : invert_truthvalue (arg1),
3723 VOID_TYPE_P (TREE_TYPE (arg2))
3724 ? arg2 : invert_truthvalue (arg2));
3728 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3729 invert_truthvalue (TREE_OPERAND (arg, 1)));
3731 case NON_LVALUE_EXPR:
3732 return invert_truthvalue (TREE_OPERAND (arg, 0));
3735 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3736 return build1 (TRUTH_NOT_EXPR, type, arg);
3740 return build1 (TREE_CODE (arg), type,
3741 invert_truthvalue (TREE_OPERAND (arg, 0)));
3744 if (!integer_onep (TREE_OPERAND (arg, 1)))
3746 return build2 (EQ_EXPR, type, arg,
3747 build_int_cst (type, 0));
3750 return build1 (TRUTH_NOT_EXPR, type, arg);
3752 case CLEANUP_POINT_EXPR:
3753 return build1 (CLEANUP_POINT_EXPR, type,
3754 invert_truthvalue (TREE_OPERAND (arg, 0)));
3763 /* Return a simplified tree node for the truth-negation of ARG. This
3764 never alters ARG itself. We assume that ARG is an operation that
3765 returns a truth value (0 or 1).
3767 FIXME: one would think we would fold the result, but it causes
3768 problems with the dominator optimizer. */
3771 invert_truthvalue (tree arg)
3775 if (TREE_CODE (arg) == ERROR_MARK)
3778 tem = fold_truth_not_expr (arg);
3780 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3785 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3786 operands are another bit-wise operation with a common input. If so,
3787 distribute the bit operations to save an operation and possibly two if
3788 constants are involved. For example, convert
3789 (A | B) & (A | C) into A | (B & C)
3790 Further simplification will occur if B and C are constants.
3792 If this optimization cannot be done, 0 will be returned. */
3795 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3800 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3801 || TREE_CODE (arg0) == code
3802 || (TREE_CODE (arg0) != BIT_AND_EXPR
3803 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3806 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3808 common = TREE_OPERAND (arg0, 0);
3809 left = TREE_OPERAND (arg0, 1);
3810 right = TREE_OPERAND (arg1, 1);
3812 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3814 common = TREE_OPERAND (arg0, 0);
3815 left = TREE_OPERAND (arg0, 1);
3816 right = TREE_OPERAND (arg1, 0);
3818 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3820 common = TREE_OPERAND (arg0, 1);
3821 left = TREE_OPERAND (arg0, 0);
3822 right = TREE_OPERAND (arg1, 1);
3824 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3826 common = TREE_OPERAND (arg0, 1);
3827 left = TREE_OPERAND (arg0, 0);
3828 right = TREE_OPERAND (arg1, 0);
3833 common = fold_convert (type, common);
3834 left = fold_convert (type, left);
3835 right = fold_convert (type, right);
3836 return fold_build2 (TREE_CODE (arg0), type, common,
3837 fold_build2 (code, type, left, right));
3840 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3841 with code CODE. This optimization is unsafe. */
3843 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3845 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3846 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3848 /* (A / C) +- (B / C) -> (A +- B) / C. */
3850 && operand_equal_p (TREE_OPERAND (arg0, 1),
3851 TREE_OPERAND (arg1, 1), 0))
3852 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3853 fold_build2 (code, type,
3854 TREE_OPERAND (arg0, 0),
3855 TREE_OPERAND (arg1, 0)),
3856 TREE_OPERAND (arg0, 1));
3858 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3859 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3860 TREE_OPERAND (arg1, 0), 0)
3861 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3862 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3864 REAL_VALUE_TYPE r0, r1;
3865 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3866 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3868 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3870 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3871 real_arithmetic (&r0, code, &r0, &r1);
3872 return fold_build2 (MULT_EXPR, type,
3873 TREE_OPERAND (arg0, 0),
3874 build_real (type, r0));
3880 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3881 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3884 make_bit_field_ref (tree inner, tree type, HOST_WIDE_INT bitsize,
3885 HOST_WIDE_INT bitpos, int unsignedp)
3887 tree result, bftype;
3891 tree size = TYPE_SIZE (TREE_TYPE (inner));
3892 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3893 || POINTER_TYPE_P (TREE_TYPE (inner)))
3894 && host_integerp (size, 0)
3895 && tree_low_cst (size, 0) == bitsize)
3896 return fold_convert (type, inner);
3900 if (TYPE_PRECISION (bftype) != bitsize
3901 || TYPE_UNSIGNED (bftype) == !unsignedp)
3902 bftype = build_nonstandard_integer_type (bitsize, 0);
3904 result = build3 (BIT_FIELD_REF, bftype, inner,
3905 size_int (bitsize), bitsize_int (bitpos));
3908 result = fold_convert (type, result);
3913 /* Optimize a bit-field compare.
3915 There are two cases: First is a compare against a constant and the
3916 second is a comparison of two items where the fields are at the same
3917 bit position relative to the start of a chunk (byte, halfword, word)
3918 large enough to contain it. In these cases we can avoid the shift
3919 implicit in bitfield extractions.
3921 For constants, we emit a compare of the shifted constant with the
3922 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3923 compared. For two fields at the same position, we do the ANDs with the
3924 similar mask and compare the result of the ANDs.
3926 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3927 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3928 are the left and right operands of the comparison, respectively.
3930 If the optimization described above can be done, we return the resulting
3931 tree. Otherwise we return zero. */
3934 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3937 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3938 tree type = TREE_TYPE (lhs);
3939 tree signed_type, unsigned_type;
3940 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3941 enum machine_mode lmode, rmode, nmode;
3942 int lunsignedp, runsignedp;
3943 int lvolatilep = 0, rvolatilep = 0;
3944 tree linner, rinner = NULL_TREE;
3948 /* Get all the information about the extractions being done. If the bit size
3949 if the same as the size of the underlying object, we aren't doing an
3950 extraction at all and so can do nothing. We also don't want to
3951 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3952 then will no longer be able to replace it. */
3953 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3954 &lunsignedp, &lvolatilep, false);
3955 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3956 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3961 /* If this is not a constant, we can only do something if bit positions,
3962 sizes, and signedness are the same. */
3963 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3964 &runsignedp, &rvolatilep, false);
3966 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3967 || lunsignedp != runsignedp || offset != 0
3968 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3972 /* See if we can find a mode to refer to this field. We should be able to,
3973 but fail if we can't. */
3974 nmode = get_best_mode (lbitsize, lbitpos,
3975 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3976 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3977 TYPE_ALIGN (TREE_TYPE (rinner))),
3978 word_mode, lvolatilep || rvolatilep);
3979 if (nmode == VOIDmode)
3982 /* Set signed and unsigned types of the precision of this mode for the
3984 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3985 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3987 /* Compute the bit position and size for the new reference and our offset
3988 within it. If the new reference is the same size as the original, we
3989 won't optimize anything, so return zero. */
3990 nbitsize = GET_MODE_BITSIZE (nmode);
3991 nbitpos = lbitpos & ~ (nbitsize - 1);
3993 if (nbitsize == lbitsize)
3996 if (BYTES_BIG_ENDIAN)
3997 lbitpos = nbitsize - lbitsize - lbitpos;
3999 /* Make the mask to be used against the extracted field. */
4000 mask = build_int_cst_type (unsigned_type, -1);
4001 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4002 mask = const_binop (RSHIFT_EXPR, mask,
4003 size_int (nbitsize - lbitsize - lbitpos), 0);
4006 /* If not comparing with constant, just rework the comparison
4008 return fold_build2 (code, compare_type,
4009 fold_build2 (BIT_AND_EXPR, unsigned_type,
4010 make_bit_field_ref (linner,
4015 fold_build2 (BIT_AND_EXPR, unsigned_type,
4016 make_bit_field_ref (rinner,
4022 /* Otherwise, we are handling the constant case. See if the constant is too
4023 big for the field. Warn and return a tree of for 0 (false) if so. We do
4024 this not only for its own sake, but to avoid having to test for this
4025 error case below. If we didn't, we might generate wrong code.
4027 For unsigned fields, the constant shifted right by the field length should
4028 be all zero. For signed fields, the high-order bits should agree with
4033 if (! integer_zerop (const_binop (RSHIFT_EXPR,
4034 fold_convert (unsigned_type, rhs),
4035 size_int (lbitsize), 0)))
4037 warning (0, "comparison is always %d due to width of bit-field",
4039 return constant_boolean_node (code == NE_EXPR, compare_type);
4044 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
4045 size_int (lbitsize - 1), 0);
4046 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4048 warning (0, "comparison is always %d due to width of bit-field",
4050 return constant_boolean_node (code == NE_EXPR, compare_type);
4054 /* Single-bit compares should always be against zero. */
4055 if (lbitsize == 1 && ! integer_zerop (rhs))
4057 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4058 rhs = build_int_cst (type, 0);
4061 /* Make a new bitfield reference, shift the constant over the
4062 appropriate number of bits and mask it with the computed mask
4063 (in case this was a signed field). If we changed it, make a new one. */
4064 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
4067 TREE_SIDE_EFFECTS (lhs) = 1;
4068 TREE_THIS_VOLATILE (lhs) = 1;
4071 rhs = const_binop (BIT_AND_EXPR,
4072 const_binop (LSHIFT_EXPR,
4073 fold_convert (unsigned_type, rhs),
4074 size_int (lbitpos), 0),
4077 return build2 (code, compare_type,
4078 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4082 /* Subroutine for fold_truthop: decode a field reference.
4084 If EXP is a comparison reference, we return the innermost reference.
4086 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4087 set to the starting bit number.
4089 If the innermost field can be completely contained in a mode-sized
4090 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4092 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4093 otherwise it is not changed.
4095 *PUNSIGNEDP is set to the signedness of the field.
4097 *PMASK is set to the mask used. This is either contained in a
4098 BIT_AND_EXPR or derived from the width of the field.
4100 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4102 Return 0 if this is not a component reference or is one that we can't
4103 do anything with. */
4106 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
4107 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4108 int *punsignedp, int *pvolatilep,
4109 tree *pmask, tree *pand_mask)
4111 tree outer_type = 0;
4113 tree mask, inner, offset;
4115 unsigned int precision;
4117 /* All the optimizations using this function assume integer fields.
4118 There are problems with FP fields since the type_for_size call
4119 below can fail for, e.g., XFmode. */
4120 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4123 /* We are interested in the bare arrangement of bits, so strip everything
4124 that doesn't affect the machine mode. However, record the type of the
4125 outermost expression if it may matter below. */
4126 if (CONVERT_EXPR_P (exp)
4127 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4128 outer_type = TREE_TYPE (exp);
4131 if (TREE_CODE (exp) == BIT_AND_EXPR)
4133 and_mask = TREE_OPERAND (exp, 1);
4134 exp = TREE_OPERAND (exp, 0);
4135 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4136 if (TREE_CODE (and_mask) != INTEGER_CST)
4140 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4141 punsignedp, pvolatilep, false);
4142 if ((inner == exp && and_mask == 0)
4143 || *pbitsize < 0 || offset != 0
4144 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4147 /* If the number of bits in the reference is the same as the bitsize of
4148 the outer type, then the outer type gives the signedness. Otherwise
4149 (in case of a small bitfield) the signedness is unchanged. */
4150 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4151 *punsignedp = TYPE_UNSIGNED (outer_type);
4153 /* Compute the mask to access the bitfield. */
4154 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4155 precision = TYPE_PRECISION (unsigned_type);
4157 mask = build_int_cst_type (unsigned_type, -1);
4159 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4160 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4162 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4164 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
4165 fold_convert (unsigned_type, and_mask), mask);
4168 *pand_mask = and_mask;
4172 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4176 all_ones_mask_p (const_tree mask, int size)
4178 tree type = TREE_TYPE (mask);
4179 unsigned int precision = TYPE_PRECISION (type);
4182 tmask = build_int_cst_type (signed_type_for (type), -1);
4185 tree_int_cst_equal (mask,
4186 const_binop (RSHIFT_EXPR,
4187 const_binop (LSHIFT_EXPR, tmask,
4188 size_int (precision - size),
4190 size_int (precision - size), 0));
4193 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4194 represents the sign bit of EXP's type. If EXP represents a sign
4195 or zero extension, also test VAL against the unextended type.
4196 The return value is the (sub)expression whose sign bit is VAL,
4197 or NULL_TREE otherwise. */
4200 sign_bit_p (tree exp, const_tree val)
4202 unsigned HOST_WIDE_INT mask_lo, lo;
4203 HOST_WIDE_INT mask_hi, hi;
4207 /* Tree EXP must have an integral type. */
4208 t = TREE_TYPE (exp);
4209 if (! INTEGRAL_TYPE_P (t))
4212 /* Tree VAL must be an integer constant. */
4213 if (TREE_CODE (val) != INTEGER_CST
4214 || TREE_OVERFLOW (val))
4217 width = TYPE_PRECISION (t);
4218 if (width > HOST_BITS_PER_WIDE_INT)
4220 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4223 mask_hi = ((unsigned HOST_WIDE_INT) -1
4224 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4230 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4233 mask_lo = ((unsigned HOST_WIDE_INT) -1
4234 >> (HOST_BITS_PER_WIDE_INT - width));
4237 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4238 treat VAL as if it were unsigned. */
4239 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4240 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4243 /* Handle extension from a narrower type. */
4244 if (TREE_CODE (exp) == NOP_EXPR
4245 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4246 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4251 /* Subroutine for fold_truthop: determine if an operand is simple enough
4252 to be evaluated unconditionally. */
4255 simple_operand_p (const_tree exp)
4257 /* Strip any conversions that don't change the machine mode. */
4260 return (CONSTANT_CLASS_P (exp)
4261 || TREE_CODE (exp) == SSA_NAME
4263 && ! TREE_ADDRESSABLE (exp)
4264 && ! TREE_THIS_VOLATILE (exp)
4265 && ! DECL_NONLOCAL (exp)
4266 /* Don't regard global variables as simple. They may be
4267 allocated in ways unknown to the compiler (shared memory,
4268 #pragma weak, etc). */
4269 && ! TREE_PUBLIC (exp)
4270 && ! DECL_EXTERNAL (exp)
4271 /* Loading a static variable is unduly expensive, but global
4272 registers aren't expensive. */
4273 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4276 /* The following functions are subroutines to fold_range_test and allow it to
4277 try to change a logical combination of comparisons into a range test.
4280 X == 2 || X == 3 || X == 4 || X == 5
4284 (unsigned) (X - 2) <= 3
4286 We describe each set of comparisons as being either inside or outside
4287 a range, using a variable named like IN_P, and then describe the
4288 range with a lower and upper bound. If one of the bounds is omitted,
4289 it represents either the highest or lowest value of the type.
4291 In the comments below, we represent a range by two numbers in brackets
4292 preceded by a "+" to designate being inside that range, or a "-" to
4293 designate being outside that range, so the condition can be inverted by
4294 flipping the prefix. An omitted bound is represented by a "-". For
4295 example, "- [-, 10]" means being outside the range starting at the lowest
4296 possible value and ending at 10, in other words, being greater than 10.
4297 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4300 We set up things so that the missing bounds are handled in a consistent
4301 manner so neither a missing bound nor "true" and "false" need to be
4302 handled using a special case. */
4304 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4305 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4306 and UPPER1_P are nonzero if the respective argument is an upper bound
4307 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4308 must be specified for a comparison. ARG1 will be converted to ARG0's
4309 type if both are specified. */
4312 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4313 tree arg1, int upper1_p)
4319 /* If neither arg represents infinity, do the normal operation.
4320 Else, if not a comparison, return infinity. Else handle the special
4321 comparison rules. Note that most of the cases below won't occur, but
4322 are handled for consistency. */
4324 if (arg0 != 0 && arg1 != 0)
4326 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4327 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4329 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4332 if (TREE_CODE_CLASS (code) != tcc_comparison)
4335 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4336 for neither. In real maths, we cannot assume open ended ranges are
4337 the same. But, this is computer arithmetic, where numbers are finite.
4338 We can therefore make the transformation of any unbounded range with
4339 the value Z, Z being greater than any representable number. This permits
4340 us to treat unbounded ranges as equal. */
4341 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4342 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4346 result = sgn0 == sgn1;
4349 result = sgn0 != sgn1;
4352 result = sgn0 < sgn1;
4355 result = sgn0 <= sgn1;
4358 result = sgn0 > sgn1;
4361 result = sgn0 >= sgn1;
4367 return constant_boolean_node (result, type);
4370 /* Given EXP, a logical expression, set the range it is testing into
4371 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4372 actually being tested. *PLOW and *PHIGH will be made of the same
4373 type as the returned expression. If EXP is not a comparison, we
4374 will most likely not be returning a useful value and range. Set
4375 *STRICT_OVERFLOW_P to true if the return value is only valid
4376 because signed overflow is undefined; otherwise, do not change
4377 *STRICT_OVERFLOW_P. */
4380 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4381 bool *strict_overflow_p)
4383 enum tree_code code;
4384 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4385 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4387 tree low, high, n_low, n_high;
4389 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4390 and see if we can refine the range. Some of the cases below may not
4391 happen, but it doesn't seem worth worrying about this. We "continue"
4392 the outer loop when we've changed something; otherwise we "break"
4393 the switch, which will "break" the while. */
4396 low = high = build_int_cst (TREE_TYPE (exp), 0);
4400 code = TREE_CODE (exp);
4401 exp_type = TREE_TYPE (exp);
4403 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4405 if (TREE_OPERAND_LENGTH (exp) > 0)
4406 arg0 = TREE_OPERAND (exp, 0);
4407 if (TREE_CODE_CLASS (code) == tcc_comparison
4408 || TREE_CODE_CLASS (code) == tcc_unary
4409 || TREE_CODE_CLASS (code) == tcc_binary)
4410 arg0_type = TREE_TYPE (arg0);
4411 if (TREE_CODE_CLASS (code) == tcc_binary
4412 || TREE_CODE_CLASS (code) == tcc_comparison
4413 || (TREE_CODE_CLASS (code) == tcc_expression
4414 && TREE_OPERAND_LENGTH (exp) > 1))
4415 arg1 = TREE_OPERAND (exp, 1);
4420 case TRUTH_NOT_EXPR:
4421 in_p = ! in_p, exp = arg0;
4424 case EQ_EXPR: case NE_EXPR:
4425 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4426 /* We can only do something if the range is testing for zero
4427 and if the second operand is an integer constant. Note that
4428 saying something is "in" the range we make is done by
4429 complementing IN_P since it will set in the initial case of
4430 being not equal to zero; "out" is leaving it alone. */
4431 if (low == 0 || high == 0
4432 || ! integer_zerop (low) || ! integer_zerop (high)
4433 || TREE_CODE (arg1) != INTEGER_CST)
4438 case NE_EXPR: /* - [c, c] */
4441 case EQ_EXPR: /* + [c, c] */
4442 in_p = ! in_p, low = high = arg1;
4444 case GT_EXPR: /* - [-, c] */
4445 low = 0, high = arg1;
4447 case GE_EXPR: /* + [c, -] */
4448 in_p = ! in_p, low = arg1, high = 0;
4450 case LT_EXPR: /* - [c, -] */
4451 low = arg1, high = 0;
4453 case LE_EXPR: /* + [-, c] */
4454 in_p = ! in_p, low = 0, high = arg1;
4460 /* If this is an unsigned comparison, we also know that EXP is
4461 greater than or equal to zero. We base the range tests we make
4462 on that fact, so we record it here so we can parse existing
4463 range tests. We test arg0_type since often the return type
4464 of, e.g. EQ_EXPR, is boolean. */
4465 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4467 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4469 build_int_cst (arg0_type, 0),
4473 in_p = n_in_p, low = n_low, high = n_high;
4475 /* If the high bound is missing, but we have a nonzero low
4476 bound, reverse the range so it goes from zero to the low bound
4478 if (high == 0 && low && ! integer_zerop (low))
4481 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4482 integer_one_node, 0);
4483 low = build_int_cst (arg0_type, 0);
4491 /* (-x) IN [a,b] -> x in [-b, -a] */
4492 n_low = range_binop (MINUS_EXPR, exp_type,
4493 build_int_cst (exp_type, 0),
4495 n_high = range_binop (MINUS_EXPR, exp_type,
4496 build_int_cst (exp_type, 0),
4498 low = n_low, high = n_high;
4504 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4505 build_int_cst (exp_type, 1));
4508 case PLUS_EXPR: case MINUS_EXPR:
4509 if (TREE_CODE (arg1) != INTEGER_CST)
4512 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4513 move a constant to the other side. */
4514 if (!TYPE_UNSIGNED (arg0_type)
4515 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4518 /* If EXP is signed, any overflow in the computation is undefined,
4519 so we don't worry about it so long as our computations on
4520 the bounds don't overflow. For unsigned, overflow is defined
4521 and this is exactly the right thing. */
4522 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4523 arg0_type, low, 0, arg1, 0);
4524 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4525 arg0_type, high, 1, arg1, 0);
4526 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4527 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4530 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4531 *strict_overflow_p = true;
4533 /* Check for an unsigned range which has wrapped around the maximum
4534 value thus making n_high < n_low, and normalize it. */
4535 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4537 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4538 integer_one_node, 0);
4539 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4540 integer_one_node, 0);
4542 /* If the range is of the form +/- [ x+1, x ], we won't
4543 be able to normalize it. But then, it represents the
4544 whole range or the empty set, so make it
4546 if (tree_int_cst_equal (n_low, low)
4547 && tree_int_cst_equal (n_high, high))
4553 low = n_low, high = n_high;
4558 CASE_CONVERT: case NON_LVALUE_EXPR:
4559 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4562 if (! INTEGRAL_TYPE_P (arg0_type)
4563 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4564 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4567 n_low = low, n_high = high;
4570 n_low = fold_convert (arg0_type, n_low);
4573 n_high = fold_convert (arg0_type, n_high);
4576 /* If we're converting arg0 from an unsigned type, to exp,
4577 a signed type, we will be doing the comparison as unsigned.
4578 The tests above have already verified that LOW and HIGH
4581 So we have to ensure that we will handle large unsigned
4582 values the same way that the current signed bounds treat
4585 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4589 /* For fixed-point modes, we need to pass the saturating flag
4590 as the 2nd parameter. */
4591 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4592 equiv_type = lang_hooks.types.type_for_mode
4593 (TYPE_MODE (arg0_type),
4594 TYPE_SATURATING (arg0_type));
4596 equiv_type = lang_hooks.types.type_for_mode
4597 (TYPE_MODE (arg0_type), 1);
4599 /* A range without an upper bound is, naturally, unbounded.
4600 Since convert would have cropped a very large value, use
4601 the max value for the destination type. */
4603 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4604 : TYPE_MAX_VALUE (arg0_type);
4606 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4607 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4608 fold_convert (arg0_type,
4610 build_int_cst (arg0_type, 1));
4612 /* If the low bound is specified, "and" the range with the
4613 range for which the original unsigned value will be
4617 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4618 1, n_low, n_high, 1,
4619 fold_convert (arg0_type,
4624 in_p = (n_in_p == in_p);
4628 /* Otherwise, "or" the range with the range of the input
4629 that will be interpreted as negative. */
4630 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4631 0, n_low, n_high, 1,
4632 fold_convert (arg0_type,
4637 in_p = (in_p != n_in_p);
4642 low = n_low, high = n_high;
4652 /* If EXP is a constant, we can evaluate whether this is true or false. */
4653 if (TREE_CODE (exp) == INTEGER_CST)
4655 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4657 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4663 *pin_p = in_p, *plow = low, *phigh = high;
4667 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4668 type, TYPE, return an expression to test if EXP is in (or out of, depending
4669 on IN_P) the range. Return 0 if the test couldn't be created. */
4672 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4674 tree etype = TREE_TYPE (exp), value;
4675 enum tree_code code;
4677 #ifdef HAVE_canonicalize_funcptr_for_compare
4678 /* Disable this optimization for function pointer expressions
4679 on targets that require function pointer canonicalization. */
4680 if (HAVE_canonicalize_funcptr_for_compare
4681 && TREE_CODE (etype) == POINTER_TYPE
4682 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4688 value = build_range_check (type, exp, 1, low, high);
4690 return invert_truthvalue (value);
4695 if (low == 0 && high == 0)
4696 return build_int_cst (type, 1);
4699 return fold_build2 (LE_EXPR, type, exp,
4700 fold_convert (etype, high));
4703 return fold_build2 (GE_EXPR, type, exp,
4704 fold_convert (etype, low));
4706 if (operand_equal_p (low, high, 0))
4707 return fold_build2 (EQ_EXPR, type, exp,
4708 fold_convert (etype, low));
4710 if (integer_zerop (low))
4712 if (! TYPE_UNSIGNED (etype))
4714 etype = unsigned_type_for (etype);
4715 high = fold_convert (etype, high);
4716 exp = fold_convert (etype, exp);
4718 return build_range_check (type, exp, 1, 0, high);
4721 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4722 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4724 unsigned HOST_WIDE_INT lo;
4728 prec = TYPE_PRECISION (etype);
4729 if (prec <= HOST_BITS_PER_WIDE_INT)
4732 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4736 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4737 lo = (unsigned HOST_WIDE_INT) -1;
4740 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4742 if (TYPE_UNSIGNED (etype))
4744 tree signed_etype = signed_type_for (etype);
4745 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4747 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4749 etype = signed_etype;
4750 exp = fold_convert (etype, exp);
4752 return fold_build2 (GT_EXPR, type, exp,
4753 build_int_cst (etype, 0));
4757 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4758 This requires wrap-around arithmetics for the type of the expression. */
4759 code = TREE_CODE (etype);
4765 /* There is no requirement that LOW be within the range of ETYPE
4766 if the latter is a subtype. It must, however, be within the base
4767 type of ETYPE. So be sure we do the subtraction in that type. */
4768 if (code == INTEGER_TYPE && TREE_TYPE (etype))
4770 etype = TREE_TYPE (etype);
4771 /* But not in an enumeral or boolean type though. */
4772 code = TREE_CODE (etype);
4775 if (code != INTEGER_TYPE)
4776 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4777 TYPE_UNSIGNED (etype));
4784 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4785 if (TREE_CODE (etype) == INTEGER_TYPE
4786 && !TYPE_OVERFLOW_WRAPS (etype))
4788 tree utype, minv, maxv;
4790 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4791 for the type in question, as we rely on this here. */
4792 utype = unsigned_type_for (etype);
4793 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4794 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4795 integer_one_node, 1);
4796 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4798 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4805 high = fold_convert (etype, high);
4806 low = fold_convert (etype, low);
4807 exp = fold_convert (etype, exp);
4809 value = const_binop (MINUS_EXPR, high, low, 0);
4812 if (POINTER_TYPE_P (etype))
4814 if (value != 0 && !TREE_OVERFLOW (value))
4816 low = fold_convert (sizetype, low);
4817 low = fold_build1 (NEGATE_EXPR, sizetype, low);
4818 return build_range_check (type,
4819 fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
4820 1, build_int_cst (etype, 0), value);
4825 if (value != 0 && !TREE_OVERFLOW (value))
4826 return build_range_check (type,
4827 fold_build2 (MINUS_EXPR, etype, exp, low),
4828 1, build_int_cst (etype, 0), value);
4833 /* Return the predecessor of VAL in its type, handling the infinite case. */
4836 range_predecessor (tree val)
4838 tree type = TREE_TYPE (val);
4840 if (INTEGRAL_TYPE_P (type)
4841 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4844 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4847 /* Return the successor of VAL in its type, handling the infinite case. */
4850 range_successor (tree val)
4852 tree type = TREE_TYPE (val);
4854 if (INTEGRAL_TYPE_P (type)
4855 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4858 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4861 /* Given two ranges, see if we can merge them into one. Return 1 if we
4862 can, 0 if we can't. Set the output range into the specified parameters. */
4865 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4866 tree high0, int in1_p, tree low1, tree high1)
4874 int lowequal = ((low0 == 0 && low1 == 0)
4875 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4876 low0, 0, low1, 0)));
4877 int highequal = ((high0 == 0 && high1 == 0)
4878 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4879 high0, 1, high1, 1)));
4881 /* Make range 0 be the range that starts first, or ends last if they
4882 start at the same value. Swap them if it isn't. */
4883 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4886 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4887 high1, 1, high0, 1))))
4889 temp = in0_p, in0_p = in1_p, in1_p = temp;
4890 tem = low0, low0 = low1, low1 = tem;
4891 tem = high0, high0 = high1, high1 = tem;
4894 /* Now flag two cases, whether the ranges are disjoint or whether the
4895 second range is totally subsumed in the first. Note that the tests
4896 below are simplified by the ones above. */
4897 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4898 high0, 1, low1, 0));
4899 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4900 high1, 1, high0, 1));
4902 /* We now have four cases, depending on whether we are including or
4903 excluding the two ranges. */
4906 /* If they don't overlap, the result is false. If the second range
4907 is a subset it is the result. Otherwise, the range is from the start
4908 of the second to the end of the first. */
4910 in_p = 0, low = high = 0;
4912 in_p = 1, low = low1, high = high1;
4914 in_p = 1, low = low1, high = high0;
4917 else if (in0_p && ! in1_p)
4919 /* If they don't overlap, the result is the first range. If they are
4920 equal, the result is false. If the second range is a subset of the
4921 first, and the ranges begin at the same place, we go from just after
4922 the end of the second range to the end of the first. If the second
4923 range is not a subset of the first, or if it is a subset and both
4924 ranges end at the same place, the range starts at the start of the
4925 first range and ends just before the second range.
4926 Otherwise, we can't describe this as a single range. */
4928 in_p = 1, low = low0, high = high0;
4929 else if (lowequal && highequal)
4930 in_p = 0, low = high = 0;
4931 else if (subset && lowequal)
4933 low = range_successor (high1);
4938 /* We are in the weird situation where high0 > high1 but
4939 high1 has no successor. Punt. */
4943 else if (! subset || highequal)
4946 high = range_predecessor (low1);
4950 /* low0 < low1 but low1 has no predecessor. Punt. */
4958 else if (! in0_p && in1_p)
4960 /* If they don't overlap, the result is the second range. If the second
4961 is a subset of the first, the result is false. Otherwise,
4962 the range starts just after the first range and ends at the
4963 end of the second. */
4965 in_p = 1, low = low1, high = high1;
4966 else if (subset || highequal)
4967 in_p = 0, low = high = 0;
4970 low = range_successor (high0);
4975 /* high1 > high0 but high0 has no successor. Punt. */
4983 /* The case where we are excluding both ranges. Here the complex case
4984 is if they don't overlap. In that case, the only time we have a
4985 range is if they are adjacent. If the second is a subset of the
4986 first, the result is the first. Otherwise, the range to exclude
4987 starts at the beginning of the first range and ends at the end of the
4991 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4992 range_successor (high0),
4994 in_p = 0, low = low0, high = high1;
4997 /* Canonicalize - [min, x] into - [-, x]. */
4998 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4999 switch (TREE_CODE (TREE_TYPE (low0)))
5002 if (TYPE_PRECISION (TREE_TYPE (low0))
5003 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5007 if (tree_int_cst_equal (low0,
5008 TYPE_MIN_VALUE (TREE_TYPE (low0))))
5012 if (TYPE_UNSIGNED (TREE_TYPE (low0))
5013 && integer_zerop (low0))
5020 /* Canonicalize - [x, max] into - [x, -]. */
5021 if (high1 && TREE_CODE (high1) == INTEGER_CST)
5022 switch (TREE_CODE (TREE_TYPE (high1)))
5025 if (TYPE_PRECISION (TREE_TYPE (high1))
5026 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5030 if (tree_int_cst_equal (high1,
5031 TYPE_MAX_VALUE (TREE_TYPE (high1))))
5035 if (TYPE_UNSIGNED (TREE_TYPE (high1))
5036 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5038 integer_one_node, 1)))
5045 /* The ranges might be also adjacent between the maximum and
5046 minimum values of the given type. For
5047 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5048 return + [x + 1, y - 1]. */
5049 if (low0 == 0 && high1 == 0)
5051 low = range_successor (high0);
5052 high = range_predecessor (low1);
5053 if (low == 0 || high == 0)
5063 in_p = 0, low = low0, high = high0;
5065 in_p = 0, low = low0, high = high1;
5068 *pin_p = in_p, *plow = low, *phigh = high;
5073 /* Subroutine of fold, looking inside expressions of the form
5074 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5075 of the COND_EXPR. This function is being used also to optimize
5076 A op B ? C : A, by reversing the comparison first.
5078 Return a folded expression whose code is not a COND_EXPR
5079 anymore, or NULL_TREE if no folding opportunity is found. */
5082 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
5084 enum tree_code comp_code = TREE_CODE (arg0);
5085 tree arg00 = TREE_OPERAND (arg0, 0);
5086 tree arg01 = TREE_OPERAND (arg0, 1);
5087 tree arg1_type = TREE_TYPE (arg1);
5093 /* If we have A op 0 ? A : -A, consider applying the following
5096 A == 0? A : -A same as -A
5097 A != 0? A : -A same as A
5098 A >= 0? A : -A same as abs (A)
5099 A > 0? A : -A same as abs (A)
5100 A <= 0? A : -A same as -abs (A)
5101 A < 0? A : -A same as -abs (A)
5103 None of these transformations work for modes with signed
5104 zeros. If A is +/-0, the first two transformations will
5105 change the sign of the result (from +0 to -0, or vice
5106 versa). The last four will fix the sign of the result,
5107 even though the original expressions could be positive or
5108 negative, depending on the sign of A.
5110 Note that all these transformations are correct if A is
5111 NaN, since the two alternatives (A and -A) are also NaNs. */
5112 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5113 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5114 ? real_zerop (arg01)
5115 : integer_zerop (arg01))
5116 && ((TREE_CODE (arg2) == NEGATE_EXPR
5117 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5118 /* In the case that A is of the form X-Y, '-A' (arg2) may
5119 have already been folded to Y-X, check for that. */
5120 || (TREE_CODE (arg1) == MINUS_EXPR
5121 && TREE_CODE (arg2) == MINUS_EXPR
5122 && operand_equal_p (TREE_OPERAND (arg1, 0),
5123 TREE_OPERAND (arg2, 1), 0)
5124 && operand_equal_p (TREE_OPERAND (arg1, 1),
5125 TREE_OPERAND (arg2, 0), 0))))
5130 tem = fold_convert (arg1_type, arg1);
5131 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
5134 return pedantic_non_lvalue (fold_convert (type, arg1));
5137 if (flag_trapping_math)
5142 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5143 arg1 = fold_convert (signed_type_for
5144 (TREE_TYPE (arg1)), arg1);
5145 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5146 return pedantic_non_lvalue (fold_convert (type, tem));
5149 if (flag_trapping_math)
5153 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5154 arg1 = fold_convert (signed_type_for
5155 (TREE_TYPE (arg1)), arg1);
5156 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
5157 return negate_expr (fold_convert (type, tem));
5159 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5163 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5164 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5165 both transformations are correct when A is NaN: A != 0
5166 is then true, and A == 0 is false. */
5168 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5169 && integer_zerop (arg01) && integer_zerop (arg2))
5171 if (comp_code == NE_EXPR)
5172 return pedantic_non_lvalue (fold_convert (type, arg1));
5173 else if (comp_code == EQ_EXPR)
5174 return build_int_cst (type, 0);
5177 /* Try some transformations of A op B ? A : B.
5179 A == B? A : B same as B
5180 A != B? A : B same as A
5181 A >= B? A : B same as max (A, B)
5182 A > B? A : B same as max (B, A)
5183 A <= B? A : B same as min (A, B)
5184 A < B? A : B same as min (B, A)
5186 As above, these transformations don't work in the presence
5187 of signed zeros. For example, if A and B are zeros of
5188 opposite sign, the first two transformations will change
5189 the sign of the result. In the last four, the original
5190 expressions give different results for (A=+0, B=-0) and
5191 (A=-0, B=+0), but the transformed expressions do not.
5193 The first two transformations are correct if either A or B
5194 is a NaN. In the first transformation, the condition will
5195 be false, and B will indeed be chosen. In the case of the
5196 second transformation, the condition A != B will be true,
5197 and A will be chosen.
5199 The conversions to max() and min() are not correct if B is
5200 a number and A is not. The conditions in the original
5201 expressions will be false, so all four give B. The min()
5202 and max() versions would give a NaN instead. */
5203 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5204 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5205 /* Avoid these transformations if the COND_EXPR may be used
5206 as an lvalue in the C++ front-end. PR c++/19199. */
5208 || (strcmp (lang_hooks.name, "GNU C++") != 0
5209 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5210 || ! maybe_lvalue_p (arg1)
5211 || ! maybe_lvalue_p (arg2)))
5213 tree comp_op0 = arg00;
5214 tree comp_op1 = arg01;
5215 tree comp_type = TREE_TYPE (comp_op0);
5217 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5218 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5228 return pedantic_non_lvalue (fold_convert (type, arg2));
5230 return pedantic_non_lvalue (fold_convert (type, arg1));
5235 /* In C++ a ?: expression can be an lvalue, so put the
5236 operand which will be used if they are equal first
5237 so that we can convert this back to the
5238 corresponding COND_EXPR. */
5239 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5241 comp_op0 = fold_convert (comp_type, comp_op0);
5242 comp_op1 = fold_convert (comp_type, comp_op1);
5243 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5244 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
5245 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
5246 return pedantic_non_lvalue (fold_convert (type, tem));
5253 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5255 comp_op0 = fold_convert (comp_type, comp_op0);
5256 comp_op1 = fold_convert (comp_type, comp_op1);
5257 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5258 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
5259 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
5260 return pedantic_non_lvalue (fold_convert (type, tem));
5264 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5265 return pedantic_non_lvalue (fold_convert (type, arg2));
5268 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5269 return pedantic_non_lvalue (fold_convert (type, arg1));
5272 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5277 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5278 we might still be able to simplify this. For example,
5279 if C1 is one less or one more than C2, this might have started
5280 out as a MIN or MAX and been transformed by this function.
5281 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5283 if (INTEGRAL_TYPE_P (type)
5284 && TREE_CODE (arg01) == INTEGER_CST
5285 && TREE_CODE (arg2) == INTEGER_CST)
5289 /* We can replace A with C1 in this case. */
5290 arg1 = fold_convert (type, arg01);
5291 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
5294 /* If C1 is C2 + 1, this is min(A, C2). */
5295 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5297 && operand_equal_p (arg01,
5298 const_binop (PLUS_EXPR, arg2,
5299 build_int_cst (type, 1), 0),
5301 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5303 fold_convert (type, arg1),
5308 /* If C1 is C2 - 1, this is min(A, C2). */
5309 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5311 && operand_equal_p (arg01,
5312 const_binop (MINUS_EXPR, arg2,
5313 build_int_cst (type, 1), 0),
5315 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
5317 fold_convert (type, arg1),
5322 /* If C1 is C2 - 1, this is max(A, C2). */
5323 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5325 && operand_equal_p (arg01,
5326 const_binop (MINUS_EXPR, arg2,
5327 build_int_cst (type, 1), 0),
5329 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5331 fold_convert (type, arg1),
5336 /* If C1 is C2 + 1, this is max(A, C2). */
5337 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5339 && operand_equal_p (arg01,
5340 const_binop (PLUS_EXPR, arg2,
5341 build_int_cst (type, 1), 0),
5343 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
5345 fold_convert (type, arg1),
5359 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5360 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5361 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5365 /* EXP is some logical combination of boolean tests. See if we can
5366 merge it into some range test. Return the new tree if so. */
5369 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
5371 int or_op = (code == TRUTH_ORIF_EXPR
5372 || code == TRUTH_OR_EXPR);
5373 int in0_p, in1_p, in_p;
5374 tree low0, low1, low, high0, high1, high;
5375 bool strict_overflow_p = false;
5376 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5377 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5379 const char * const warnmsg = G_("assuming signed overflow does not occur "
5380 "when simplifying range test");
5382 /* If this is an OR operation, invert both sides; we will invert
5383 again at the end. */
5385 in0_p = ! in0_p, in1_p = ! in1_p;
5387 /* If both expressions are the same, if we can merge the ranges, and we
5388 can build the range test, return it or it inverted. If one of the
5389 ranges is always true or always false, consider it to be the same
5390 expression as the other. */
5391 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5392 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5394 && 0 != (tem = (build_range_check (type,
5396 : rhs != 0 ? rhs : integer_zero_node,
5399 if (strict_overflow_p)
5400 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5401 return or_op ? invert_truthvalue (tem) : tem;
5404 /* On machines where the branch cost is expensive, if this is a
5405 short-circuited branch and the underlying object on both sides
5406 is the same, make a non-short-circuit operation. */
5407 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5408 && lhs != 0 && rhs != 0
5409 && (code == TRUTH_ANDIF_EXPR
5410 || code == TRUTH_ORIF_EXPR)
5411 && operand_equal_p (lhs, rhs, 0))
5413 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5414 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5415 which cases we can't do this. */
5416 if (simple_operand_p (lhs))
5417 return build2 (code == TRUTH_ANDIF_EXPR
5418 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5421 else if (lang_hooks.decls.global_bindings_p () == 0
5422 && ! CONTAINS_PLACEHOLDER_P (lhs))
5424 tree common = save_expr (lhs);
5426 if (0 != (lhs = build_range_check (type, common,
5427 or_op ? ! in0_p : in0_p,
5429 && (0 != (rhs = build_range_check (type, common,
5430 or_op ? ! in1_p : in1_p,
5433 if (strict_overflow_p)
5434 fold_overflow_warning (warnmsg,
5435 WARN_STRICT_OVERFLOW_COMPARISON);
5436 return build2 (code == TRUTH_ANDIF_EXPR
5437 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5446 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5447 bit value. Arrange things so the extra bits will be set to zero if and
5448 only if C is signed-extended to its full width. If MASK is nonzero,
5449 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5452 unextend (tree c, int p, int unsignedp, tree mask)
5454 tree type = TREE_TYPE (c);
5455 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5458 if (p == modesize || unsignedp)
5461 /* We work by getting just the sign bit into the low-order bit, then
5462 into the high-order bit, then sign-extend. We then XOR that value
5464 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5465 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5467 /* We must use a signed type in order to get an arithmetic right shift.
5468 However, we must also avoid introducing accidental overflows, so that
5469 a subsequent call to integer_zerop will work. Hence we must
5470 do the type conversion here. At this point, the constant is either
5471 zero or one, and the conversion to a signed type can never overflow.
5472 We could get an overflow if this conversion is done anywhere else. */
5473 if (TYPE_UNSIGNED (type))
5474 temp = fold_convert (signed_type_for (type), temp);
5476 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5477 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5479 temp = const_binop (BIT_AND_EXPR, temp,
5480 fold_convert (TREE_TYPE (c), mask), 0);
5481 /* If necessary, convert the type back to match the type of C. */
5482 if (TYPE_UNSIGNED (type))
5483 temp = fold_convert (type, temp);
5485 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
5488 /* Find ways of folding logical expressions of LHS and RHS:
5489 Try to merge two comparisons to the same innermost item.
5490 Look for range tests like "ch >= '0' && ch <= '9'".
5491 Look for combinations of simple terms on machines with expensive branches
5492 and evaluate the RHS unconditionally.
5494 For example, if we have p->a == 2 && p->b == 4 and we can make an
5495 object large enough to span both A and B, we can do this with a comparison
5496 against the object ANDed with the a mask.
5498 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5499 operations to do this with one comparison.
5501 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5502 function and the one above.
5504 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5505 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5507 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5510 We return the simplified tree or 0 if no optimization is possible. */
5513 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
5515 /* If this is the "or" of two comparisons, we can do something if
5516 the comparisons are NE_EXPR. If this is the "and", we can do something
5517 if the comparisons are EQ_EXPR. I.e.,
5518 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5520 WANTED_CODE is this operation code. For single bit fields, we can
5521 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5522 comparison for one-bit fields. */
5524 enum tree_code wanted_code;
5525 enum tree_code lcode, rcode;
5526 tree ll_arg, lr_arg, rl_arg, rr_arg;
5527 tree ll_inner, lr_inner, rl_inner, rr_inner;
5528 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5529 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5530 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5531 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5532 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5533 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5534 enum machine_mode lnmode, rnmode;
5535 tree ll_mask, lr_mask, rl_mask, rr_mask;
5536 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5537 tree l_const, r_const;
5538 tree lntype, rntype, result;
5539 HOST_WIDE_INT first_bit, end_bit;
5541 tree orig_lhs = lhs, orig_rhs = rhs;
5542 enum tree_code orig_code = code;
5544 /* Start by getting the comparison codes. Fail if anything is volatile.
5545 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5546 it were surrounded with a NE_EXPR. */
5548 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5551 lcode = TREE_CODE (lhs);
5552 rcode = TREE_CODE (rhs);
5554 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5556 lhs = build2 (NE_EXPR, truth_type, lhs,
5557 build_int_cst (TREE_TYPE (lhs), 0));
5561 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5563 rhs = build2 (NE_EXPR, truth_type, rhs,
5564 build_int_cst (TREE_TYPE (rhs), 0));
5568 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5569 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5572 ll_arg = TREE_OPERAND (lhs, 0);
5573 lr_arg = TREE_OPERAND (lhs, 1);
5574 rl_arg = TREE_OPERAND (rhs, 0);
5575 rr_arg = TREE_OPERAND (rhs, 1);
5577 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5578 if (simple_operand_p (ll_arg)
5579 && simple_operand_p (lr_arg))
5582 if (operand_equal_p (ll_arg, rl_arg, 0)
5583 && operand_equal_p (lr_arg, rr_arg, 0))
5585 result = combine_comparisons (code, lcode, rcode,
5586 truth_type, ll_arg, lr_arg);
5590 else if (operand_equal_p (ll_arg, rr_arg, 0)
5591 && operand_equal_p (lr_arg, rl_arg, 0))
5593 result = combine_comparisons (code, lcode,
5594 swap_tree_comparison (rcode),
5595 truth_type, ll_arg, lr_arg);
5601 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5602 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5604 /* If the RHS can be evaluated unconditionally and its operands are
5605 simple, it wins to evaluate the RHS unconditionally on machines
5606 with expensive branches. In this case, this isn't a comparison
5607 that can be merged. Avoid doing this if the RHS is a floating-point
5608 comparison since those can trap. */
5610 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5612 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5613 && simple_operand_p (rl_arg)
5614 && simple_operand_p (rr_arg))
5616 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5617 if (code == TRUTH_OR_EXPR
5618 && lcode == NE_EXPR && integer_zerop (lr_arg)
5619 && rcode == NE_EXPR && integer_zerop (rr_arg)
5620 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5621 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5622 return build2 (NE_EXPR, truth_type,
5623 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5625 build_int_cst (TREE_TYPE (ll_arg), 0));
5627 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5628 if (code == TRUTH_AND_EXPR
5629 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5630 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5631 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5632 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5633 return build2 (EQ_EXPR, truth_type,
5634 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5636 build_int_cst (TREE_TYPE (ll_arg), 0));
5638 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5640 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5641 return build2 (code, truth_type, lhs, rhs);
5646 /* See if the comparisons can be merged. Then get all the parameters for
5649 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5650 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5654 ll_inner = decode_field_reference (ll_arg,
5655 &ll_bitsize, &ll_bitpos, &ll_mode,
5656 &ll_unsignedp, &volatilep, &ll_mask,
5658 lr_inner = decode_field_reference (lr_arg,
5659 &lr_bitsize, &lr_bitpos, &lr_mode,
5660 &lr_unsignedp, &volatilep, &lr_mask,
5662 rl_inner = decode_field_reference (rl_arg,
5663 &rl_bitsize, &rl_bitpos, &rl_mode,
5664 &rl_unsignedp, &volatilep, &rl_mask,
5666 rr_inner = decode_field_reference (rr_arg,
5667 &rr_bitsize, &rr_bitpos, &rr_mode,
5668 &rr_unsignedp, &volatilep, &rr_mask,
5671 /* It must be true that the inner operation on the lhs of each
5672 comparison must be the same if we are to be able to do anything.
5673 Then see if we have constants. If not, the same must be true for
5675 if (volatilep || ll_inner == 0 || rl_inner == 0
5676 || ! operand_equal_p (ll_inner, rl_inner, 0))
5679 if (TREE_CODE (lr_arg) == INTEGER_CST
5680 && TREE_CODE (rr_arg) == INTEGER_CST)
5681 l_const = lr_arg, r_const = rr_arg;
5682 else if (lr_inner == 0 || rr_inner == 0
5683 || ! operand_equal_p (lr_inner, rr_inner, 0))
5686 l_const = r_const = 0;
5688 /* If either comparison code is not correct for our logical operation,
5689 fail. However, we can convert a one-bit comparison against zero into
5690 the opposite comparison against that bit being set in the field. */
5692 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5693 if (lcode != wanted_code)
5695 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5697 /* Make the left operand unsigned, since we are only interested
5698 in the value of one bit. Otherwise we are doing the wrong
5707 /* This is analogous to the code for l_const above. */
5708 if (rcode != wanted_code)
5710 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5719 /* See if we can find a mode that contains both fields being compared on
5720 the left. If we can't, fail. Otherwise, update all constants and masks
5721 to be relative to a field of that size. */
5722 first_bit = MIN (ll_bitpos, rl_bitpos);
5723 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5724 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5725 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5727 if (lnmode == VOIDmode)
5730 lnbitsize = GET_MODE_BITSIZE (lnmode);
5731 lnbitpos = first_bit & ~ (lnbitsize - 1);
5732 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5733 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5735 if (BYTES_BIG_ENDIAN)
5737 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5738 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5741 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5742 size_int (xll_bitpos), 0);
5743 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5744 size_int (xrl_bitpos), 0);
5748 l_const = fold_convert (lntype, l_const);
5749 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5750 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5751 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5752 fold_build1 (BIT_NOT_EXPR,
5756 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5758 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5763 r_const = fold_convert (lntype, r_const);
5764 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5765 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5766 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5767 fold_build1 (BIT_NOT_EXPR,
5771 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5773 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5777 /* If the right sides are not constant, do the same for it. Also,
5778 disallow this optimization if a size or signedness mismatch occurs
5779 between the left and right sides. */
5782 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5783 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5784 /* Make sure the two fields on the right
5785 correspond to the left without being swapped. */
5786 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5789 first_bit = MIN (lr_bitpos, rr_bitpos);
5790 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5791 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5792 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5794 if (rnmode == VOIDmode)
5797 rnbitsize = GET_MODE_BITSIZE (rnmode);
5798 rnbitpos = first_bit & ~ (rnbitsize - 1);
5799 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5800 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5802 if (BYTES_BIG_ENDIAN)
5804 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5805 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5808 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5809 size_int (xlr_bitpos), 0);
5810 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5811 size_int (xrr_bitpos), 0);
5813 /* Make a mask that corresponds to both fields being compared.
5814 Do this for both items being compared. If the operands are the
5815 same size and the bits being compared are in the same position
5816 then we can do this by masking both and comparing the masked
5818 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5819 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5820 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5822 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5823 ll_unsignedp || rl_unsignedp);
5824 if (! all_ones_mask_p (ll_mask, lnbitsize))
5825 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5827 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5828 lr_unsignedp || rr_unsignedp);
5829 if (! all_ones_mask_p (lr_mask, rnbitsize))
5830 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5832 return build2 (wanted_code, truth_type, lhs, rhs);
5835 /* There is still another way we can do something: If both pairs of
5836 fields being compared are adjacent, we may be able to make a wider
5837 field containing them both.
5839 Note that we still must mask the lhs/rhs expressions. Furthermore,
5840 the mask must be shifted to account for the shift done by
5841 make_bit_field_ref. */
5842 if ((ll_bitsize + ll_bitpos == rl_bitpos
5843 && lr_bitsize + lr_bitpos == rr_bitpos)
5844 || (ll_bitpos == rl_bitpos + rl_bitsize
5845 && lr_bitpos == rr_bitpos + rr_bitsize))
5849 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5850 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5851 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5852 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5854 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5855 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5856 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5857 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5859 /* Convert to the smaller type before masking out unwanted bits. */
5861 if (lntype != rntype)
5863 if (lnbitsize > rnbitsize)
5865 lhs = fold_convert (rntype, lhs);
5866 ll_mask = fold_convert (rntype, ll_mask);
5869 else if (lnbitsize < rnbitsize)
5871 rhs = fold_convert (lntype, rhs);
5872 lr_mask = fold_convert (lntype, lr_mask);
5877 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5878 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5880 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5881 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5883 return build2 (wanted_code, truth_type, lhs, rhs);
5889 /* Handle the case of comparisons with constants. If there is something in
5890 common between the masks, those bits of the constants must be the same.
5891 If not, the condition is always false. Test for this to avoid generating
5892 incorrect code below. */
5893 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5894 if (! integer_zerop (result)
5895 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5896 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5898 if (wanted_code == NE_EXPR)
5900 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5901 return constant_boolean_node (true, truth_type);
5905 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5906 return constant_boolean_node (false, truth_type);
5910 /* Construct the expression we will return. First get the component
5911 reference we will make. Unless the mask is all ones the width of
5912 that field, perform the mask operation. Then compare with the
5914 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5915 ll_unsignedp || rl_unsignedp);
5917 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5918 if (! all_ones_mask_p (ll_mask, lnbitsize))
5919 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5921 return build2 (wanted_code, truth_type, result,
5922 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5925 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5929 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5932 enum tree_code op_code;
5935 int consts_equal, consts_lt;
5938 STRIP_SIGN_NOPS (arg0);
5940 op_code = TREE_CODE (arg0);
5941 minmax_const = TREE_OPERAND (arg0, 1);
5942 comp_const = fold_convert (TREE_TYPE (arg0), op1);
5943 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5944 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5945 inner = TREE_OPERAND (arg0, 0);
5947 /* If something does not permit us to optimize, return the original tree. */
5948 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5949 || TREE_CODE (comp_const) != INTEGER_CST
5950 || TREE_OVERFLOW (comp_const)
5951 || TREE_CODE (minmax_const) != INTEGER_CST
5952 || TREE_OVERFLOW (minmax_const))
5955 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5956 and GT_EXPR, doing the rest with recursive calls using logical
5960 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5962 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5965 return invert_truthvalue (tem);
5971 fold_build2 (TRUTH_ORIF_EXPR, type,
5972 optimize_minmax_comparison
5973 (EQ_EXPR, type, arg0, comp_const),
5974 optimize_minmax_comparison
5975 (GT_EXPR, type, arg0, comp_const));
5978 if (op_code == MAX_EXPR && consts_equal)
5979 /* MAX (X, 0) == 0 -> X <= 0 */
5980 return fold_build2 (LE_EXPR, type, inner, comp_const);
5982 else if (op_code == MAX_EXPR && consts_lt)
5983 /* MAX (X, 0) == 5 -> X == 5 */
5984 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5986 else if (op_code == MAX_EXPR)
5987 /* MAX (X, 0) == -1 -> false */
5988 return omit_one_operand (type, integer_zero_node, inner);
5990 else if (consts_equal)
5991 /* MIN (X, 0) == 0 -> X >= 0 */
5992 return fold_build2 (GE_EXPR, type, inner, comp_const);
5995 /* MIN (X, 0) == 5 -> false */
5996 return omit_one_operand (type, integer_zero_node, inner);
5999 /* MIN (X, 0) == -1 -> X == -1 */
6000 return fold_build2 (EQ_EXPR, type, inner, comp_const);
6003 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6004 /* MAX (X, 0) > 0 -> X > 0
6005 MAX (X, 0) > 5 -> X > 5 */
6006 return fold_build2 (GT_EXPR, type, inner, comp_const);
6008 else if (op_code == MAX_EXPR)
6009 /* MAX (X, 0) > -1 -> true */
6010 return omit_one_operand (type, integer_one_node, inner);
6012 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6013 /* MIN (X, 0) > 0 -> false
6014 MIN (X, 0) > 5 -> false */
6015 return omit_one_operand (type, integer_zero_node, inner);
6018 /* MIN (X, 0) > -1 -> X > -1 */
6019 return fold_build2 (GT_EXPR, type, inner, comp_const);
6026 /* T is an integer expression that is being multiplied, divided, or taken a
6027 modulus (CODE says which and what kind of divide or modulus) by a
6028 constant C. See if we can eliminate that operation by folding it with
6029 other operations already in T. WIDE_TYPE, if non-null, is a type that
6030 should be used for the computation if wider than our type.
6032 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6033 (X * 2) + (Y * 4). We must, however, be assured that either the original
6034 expression would not overflow or that overflow is undefined for the type
6035 in the language in question.
6037 If we return a non-null expression, it is an equivalent form of the
6038 original computation, but need not be in the original type.
6040 We set *STRICT_OVERFLOW_P to true if the return values depends on
6041 signed overflow being undefined. Otherwise we do not change
6042 *STRICT_OVERFLOW_P. */
6045 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6046 bool *strict_overflow_p)
6048 /* To avoid exponential search depth, refuse to allow recursion past
6049 three levels. Beyond that (1) it's highly unlikely that we'll find
6050 something interesting and (2) we've probably processed it before
6051 when we built the inner expression. */
6060 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6067 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6068 bool *strict_overflow_p)
6070 tree type = TREE_TYPE (t);
6071 enum tree_code tcode = TREE_CODE (t);
6072 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6073 > GET_MODE_SIZE (TYPE_MODE (type)))
6074 ? wide_type : type);
6076 int same_p = tcode == code;
6077 tree op0 = NULL_TREE, op1 = NULL_TREE;
6078 bool sub_strict_overflow_p;
6080 /* Don't deal with constants of zero here; they confuse the code below. */
6081 if (integer_zerop (c))
6084 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6085 op0 = TREE_OPERAND (t, 0);
6087 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6088 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6090 /* Note that we need not handle conditional operations here since fold
6091 already handles those cases. So just do arithmetic here. */
6095 /* For a constant, we can always simplify if we are a multiply
6096 or (for divide and modulus) if it is a multiple of our constant. */
6097 if (code == MULT_EXPR
6098 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6099 return const_binop (code, fold_convert (ctype, t),
6100 fold_convert (ctype, c), 0);
6103 CASE_CONVERT: case NON_LVALUE_EXPR:
6104 /* If op0 is an expression ... */
6105 if ((COMPARISON_CLASS_P (op0)
6106 || UNARY_CLASS_P (op0)
6107 || BINARY_CLASS_P (op0)
6108 || VL_EXP_CLASS_P (op0)
6109 || EXPRESSION_CLASS_P (op0))
6110 /* ... and has wrapping overflow, and its type is smaller
6111 than ctype, then we cannot pass through as widening. */
6112 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6113 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6114 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6115 && (TYPE_PRECISION (ctype)
6116 > TYPE_PRECISION (TREE_TYPE (op0))))
6117 /* ... or this is a truncation (t is narrower than op0),
6118 then we cannot pass through this narrowing. */
6119 || (TYPE_PRECISION (type)
6120 < TYPE_PRECISION (TREE_TYPE (op0)))
6121 /* ... or signedness changes for division or modulus,
6122 then we cannot pass through this conversion. */
6123 || (code != MULT_EXPR
6124 && (TYPE_UNSIGNED (ctype)
6125 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6126 /* ... or has undefined overflow while the converted to
6127 type has not, we cannot do the operation in the inner type
6128 as that would introduce undefined overflow. */
6129 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6130 && !TYPE_OVERFLOW_UNDEFINED (type))))
6133 /* Pass the constant down and see if we can make a simplification. If
6134 we can, replace this expression with the inner simplification for
6135 possible later conversion to our or some other type. */
6136 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6137 && TREE_CODE (t2) == INTEGER_CST
6138 && !TREE_OVERFLOW (t2)
6139 && (0 != (t1 = extract_muldiv (op0, t2, code,
6141 ? ctype : NULL_TREE,
6142 strict_overflow_p))))
6147 /* If widening the type changes it from signed to unsigned, then we
6148 must avoid building ABS_EXPR itself as unsigned. */
6149 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6151 tree cstype = (*signed_type_for) (ctype);
6152 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6155 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6156 return fold_convert (ctype, t1);
6160 /* If the constant is negative, we cannot simplify this. */
6161 if (tree_int_cst_sgn (c) == -1)
6165 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6167 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6170 case MIN_EXPR: case MAX_EXPR:
6171 /* If widening the type changes the signedness, then we can't perform
6172 this optimization as that changes the result. */
6173 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6176 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6177 sub_strict_overflow_p = false;
6178 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6179 &sub_strict_overflow_p)) != 0
6180 && (t2 = extract_muldiv (op1, c, code, wide_type,
6181 &sub_strict_overflow_p)) != 0)
6183 if (tree_int_cst_sgn (c) < 0)
6184 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6185 if (sub_strict_overflow_p)
6186 *strict_overflow_p = true;
6187 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6188 fold_convert (ctype, t2));
6192 case LSHIFT_EXPR: case RSHIFT_EXPR:
6193 /* If the second operand is constant, this is a multiplication
6194 or floor division, by a power of two, so we can treat it that
6195 way unless the multiplier or divisor overflows. Signed
6196 left-shift overflow is implementation-defined rather than
6197 undefined in C90, so do not convert signed left shift into
6199 if (TREE_CODE (op1) == INTEGER_CST
6200 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6201 /* const_binop may not detect overflow correctly,
6202 so check for it explicitly here. */
6203 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6204 && TREE_INT_CST_HIGH (op1) == 0
6205 && 0 != (t1 = fold_convert (ctype,
6206 const_binop (LSHIFT_EXPR,
6209 && !TREE_OVERFLOW (t1))
6210 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6211 ? MULT_EXPR : FLOOR_DIV_EXPR,
6212 ctype, fold_convert (ctype, op0), t1),
6213 c, code, wide_type, strict_overflow_p);
6216 case PLUS_EXPR: case MINUS_EXPR:
6217 /* See if we can eliminate the operation on both sides. If we can, we
6218 can return a new PLUS or MINUS. If we can't, the only remaining
6219 cases where we can do anything are if the second operand is a
6221 sub_strict_overflow_p = false;
6222 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6223 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6224 if (t1 != 0 && t2 != 0
6225 && (code == MULT_EXPR
6226 /* If not multiplication, we can only do this if both operands
6227 are divisible by c. */
6228 || (multiple_of_p (ctype, op0, c)
6229 && multiple_of_p (ctype, op1, c))))
6231 if (sub_strict_overflow_p)
6232 *strict_overflow_p = true;
6233 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6234 fold_convert (ctype, t2));
6237 /* If this was a subtraction, negate OP1 and set it to be an addition.
6238 This simplifies the logic below. */
6239 if (tcode == MINUS_EXPR)
6240 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6242 if (TREE_CODE (op1) != INTEGER_CST)
6245 /* If either OP1 or C are negative, this optimization is not safe for
6246 some of the division and remainder types while for others we need
6247 to change the code. */
6248 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6250 if (code == CEIL_DIV_EXPR)
6251 code = FLOOR_DIV_EXPR;
6252 else if (code == FLOOR_DIV_EXPR)
6253 code = CEIL_DIV_EXPR;
6254 else if (code != MULT_EXPR
6255 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6259 /* If it's a multiply or a division/modulus operation of a multiple
6260 of our constant, do the operation and verify it doesn't overflow. */
6261 if (code == MULT_EXPR
6262 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6264 op1 = const_binop (code, fold_convert (ctype, op1),
6265 fold_convert (ctype, c), 0);
6266 /* We allow the constant to overflow with wrapping semantics. */
6268 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6274 /* If we have an unsigned type is not a sizetype, we cannot widen
6275 the operation since it will change the result if the original
6276 computation overflowed. */
6277 if (TYPE_UNSIGNED (ctype)
6278 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6282 /* If we were able to eliminate our operation from the first side,
6283 apply our operation to the second side and reform the PLUS. */
6284 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6285 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6287 /* The last case is if we are a multiply. In that case, we can
6288 apply the distributive law to commute the multiply and addition
6289 if the multiplication of the constants doesn't overflow. */
6290 if (code == MULT_EXPR)
6291 return fold_build2 (tcode, ctype,
6292 fold_build2 (code, ctype,
6293 fold_convert (ctype, op0),
6294 fold_convert (ctype, c)),
6300 /* We have a special case here if we are doing something like
6301 (C * 8) % 4 since we know that's zero. */
6302 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6303 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6304 /* If the multiplication can overflow we cannot optimize this.
6305 ??? Until we can properly mark individual operations as
6306 not overflowing we need to treat sizetype special here as
6307 stor-layout relies on this opimization to make
6308 DECL_FIELD_BIT_OFFSET always a constant. */
6309 && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6310 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6311 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6312 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6313 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6315 *strict_overflow_p = true;
6316 return omit_one_operand (type, integer_zero_node, op0);
6319 /* ... fall through ... */
6321 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6322 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6323 /* If we can extract our operation from the LHS, do so and return a
6324 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6325 do something only if the second operand is a constant. */
6327 && (t1 = extract_muldiv (op0, c, code, wide_type,
6328 strict_overflow_p)) != 0)
6329 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6330 fold_convert (ctype, op1));
6331 else if (tcode == MULT_EXPR && code == MULT_EXPR
6332 && (t1 = extract_muldiv (op1, c, code, wide_type,
6333 strict_overflow_p)) != 0)
6334 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6335 fold_convert (ctype, t1));
6336 else if (TREE_CODE (op1) != INTEGER_CST)
6339 /* If these are the same operation types, we can associate them
6340 assuming no overflow. */
6342 && 0 != (t1 = int_const_binop (MULT_EXPR, fold_convert (ctype, op1),
6343 fold_convert (ctype, c), 1))
6344 && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6345 TREE_INT_CST_HIGH (t1),
6346 (TYPE_UNSIGNED (ctype)
6347 && tcode != MULT_EXPR) ? -1 : 1,
6348 TREE_OVERFLOW (t1)))
6349 && !TREE_OVERFLOW (t1))
6350 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6352 /* If these operations "cancel" each other, we have the main
6353 optimizations of this pass, which occur when either constant is a
6354 multiple of the other, in which case we replace this with either an
6355 operation or CODE or TCODE.
6357 If we have an unsigned type that is not a sizetype, we cannot do
6358 this since it will change the result if the original computation
6360 if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6361 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6362 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6363 || (tcode == MULT_EXPR
6364 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6365 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6366 && code != MULT_EXPR)))
6368 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6370 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6371 *strict_overflow_p = true;
6372 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6373 fold_convert (ctype,
6374 const_binop (TRUNC_DIV_EXPR,
6377 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6379 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6380 *strict_overflow_p = true;
6381 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6382 fold_convert (ctype,
6383 const_binop (TRUNC_DIV_EXPR,
6396 /* Return a node which has the indicated constant VALUE (either 0 or
6397 1), and is of the indicated TYPE. */
6400 constant_boolean_node (int value, tree type)
6402 if (type == integer_type_node)
6403 return value ? integer_one_node : integer_zero_node;
6404 else if (type == boolean_type_node)
6405 return value ? boolean_true_node : boolean_false_node;
6407 return build_int_cst (type, value);
6411 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6412 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6413 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6414 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6415 COND is the first argument to CODE; otherwise (as in the example
6416 given here), it is the second argument. TYPE is the type of the
6417 original expression. Return NULL_TREE if no simplification is
6421 fold_binary_op_with_conditional_arg (enum tree_code code,
6422 tree type, tree op0, tree op1,
6423 tree cond, tree arg, int cond_first_p)
6425 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6426 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6427 tree test, true_value, false_value;
6428 tree lhs = NULL_TREE;
6429 tree rhs = NULL_TREE;
6431 /* This transformation is only worthwhile if we don't have to wrap
6432 arg in a SAVE_EXPR, and the operation can be simplified on at least
6433 one of the branches once its pushed inside the COND_EXPR. */
6434 if (!TREE_CONSTANT (arg))
6437 if (TREE_CODE (cond) == COND_EXPR)
6439 test = TREE_OPERAND (cond, 0);
6440 true_value = TREE_OPERAND (cond, 1);
6441 false_value = TREE_OPERAND (cond, 2);
6442 /* If this operand throws an expression, then it does not make
6443 sense to try to perform a logical or arithmetic operation
6445 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6447 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6452 tree testtype = TREE_TYPE (cond);
6454 true_value = constant_boolean_node (true, testtype);
6455 false_value = constant_boolean_node (false, testtype);
6458 arg = fold_convert (arg_type, arg);
6461 true_value = fold_convert (cond_type, true_value);
6463 lhs = fold_build2 (code, type, true_value, arg);
6465 lhs = fold_build2 (code, type, arg, true_value);
6469 false_value = fold_convert (cond_type, false_value);
6471 rhs = fold_build2 (code, type, false_value, arg);
6473 rhs = fold_build2 (code, type, arg, false_value);
6476 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
6477 return fold_convert (type, test);
6481 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6483 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6484 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6485 ADDEND is the same as X.
6487 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6488 and finite. The problematic cases are when X is zero, and its mode
6489 has signed zeros. In the case of rounding towards -infinity,
6490 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6491 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6494 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6496 if (!real_zerop (addend))
6499 /* Don't allow the fold with -fsignaling-nans. */
6500 if (HONOR_SNANS (TYPE_MODE (type)))
6503 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6504 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6507 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6508 if (TREE_CODE (addend) == REAL_CST
6509 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6512 /* The mode has signed zeros, and we have to honor their sign.
6513 In this situation, there is only one case we can return true for.
6514 X - 0 is the same as X unless rounding towards -infinity is
6516 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6519 /* Subroutine of fold() that checks comparisons of built-in math
6520 functions against real constants.
6522 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6523 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6524 is the type of the result and ARG0 and ARG1 are the operands of the
6525 comparison. ARG1 must be a TREE_REAL_CST.
6527 The function returns the constant folded tree if a simplification
6528 can be made, and NULL_TREE otherwise. */
6531 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
6532 tree type, tree arg0, tree arg1)
6536 if (BUILTIN_SQRT_P (fcode))
6538 tree arg = CALL_EXPR_ARG (arg0, 0);
6539 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6541 c = TREE_REAL_CST (arg1);
6542 if (REAL_VALUE_NEGATIVE (c))
6544 /* sqrt(x) < y is always false, if y is negative. */
6545 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6546 return omit_one_operand (type, integer_zero_node, arg);
6548 /* sqrt(x) > y is always true, if y is negative and we
6549 don't care about NaNs, i.e. negative values of x. */
6550 if (code == NE_EXPR || !HONOR_NANS (mode))
6551 return omit_one_operand (type, integer_one_node, arg);
6553 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6554 return fold_build2 (GE_EXPR, type, arg,
6555 build_real (TREE_TYPE (arg), dconst0));
6557 else if (code == GT_EXPR || code == GE_EXPR)
6561 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6562 real_convert (&c2, mode, &c2);
6564 if (REAL_VALUE_ISINF (c2))
6566 /* sqrt(x) > y is x == +Inf, when y is very large. */
6567 if (HONOR_INFINITIES (mode))
6568 return fold_build2 (EQ_EXPR, type, arg,
6569 build_real (TREE_TYPE (arg), c2));
6571 /* sqrt(x) > y is always false, when y is very large
6572 and we don't care about infinities. */
6573 return omit_one_operand (type, integer_zero_node, arg);
6576 /* sqrt(x) > c is the same as x > c*c. */
6577 return fold_build2 (code, type, arg,
6578 build_real (TREE_TYPE (arg), c2));
6580 else if (code == LT_EXPR || code == LE_EXPR)
6584 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6585 real_convert (&c2, mode, &c2);
6587 if (REAL_VALUE_ISINF (c2))
6589 /* sqrt(x) < y is always true, when y is a very large
6590 value and we don't care about NaNs or Infinities. */
6591 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6592 return omit_one_operand (type, integer_one_node, arg);
6594 /* sqrt(x) < y is x != +Inf when y is very large and we
6595 don't care about NaNs. */
6596 if (! HONOR_NANS (mode))
6597 return fold_build2 (NE_EXPR, type, arg,
6598 build_real (TREE_TYPE (arg), c2));
6600 /* sqrt(x) < y is x >= 0 when y is very large and we
6601 don't care about Infinities. */
6602 if (! HONOR_INFINITIES (mode))
6603 return fold_build2 (GE_EXPR, type, arg,
6604 build_real (TREE_TYPE (arg), dconst0));
6606 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6607 if (lang_hooks.decls.global_bindings_p () != 0
6608 || CONTAINS_PLACEHOLDER_P (arg))
6611 arg = save_expr (arg);
6612 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6613 fold_build2 (GE_EXPR, type, arg,
6614 build_real (TREE_TYPE (arg),
6616 fold_build2 (NE_EXPR, type, arg,
6617 build_real (TREE_TYPE (arg),
6621 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6622 if (! HONOR_NANS (mode))
6623 return fold_build2 (code, type, arg,
6624 build_real (TREE_TYPE (arg), c2));
6626 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6627 if (lang_hooks.decls.global_bindings_p () == 0
6628 && ! CONTAINS_PLACEHOLDER_P (arg))
6630 arg = save_expr (arg);
6631 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6632 fold_build2 (GE_EXPR, type, arg,
6633 build_real (TREE_TYPE (arg),
6635 fold_build2 (code, type, arg,
6636 build_real (TREE_TYPE (arg),
6645 /* Subroutine of fold() that optimizes comparisons against Infinities,
6646 either +Inf or -Inf.
6648 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6649 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6650 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6652 The function returns the constant folded tree if a simplification
6653 can be made, and NULL_TREE otherwise. */
6656 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6658 enum machine_mode mode;
6659 REAL_VALUE_TYPE max;
6663 mode = TYPE_MODE (TREE_TYPE (arg0));
6665 /* For negative infinity swap the sense of the comparison. */
6666 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6668 code = swap_tree_comparison (code);
6673 /* x > +Inf is always false, if with ignore sNANs. */
6674 if (HONOR_SNANS (mode))
6676 return omit_one_operand (type, integer_zero_node, arg0);
6679 /* x <= +Inf is always true, if we don't case about NaNs. */
6680 if (! HONOR_NANS (mode))
6681 return omit_one_operand (type, integer_one_node, arg0);
6683 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6684 if (lang_hooks.decls.global_bindings_p () == 0
6685 && ! CONTAINS_PLACEHOLDER_P (arg0))
6687 arg0 = save_expr (arg0);
6688 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6694 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6695 real_maxval (&max, neg, mode);
6696 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6697 arg0, build_real (TREE_TYPE (arg0), max));
6700 /* x < +Inf is always equal to x <= DBL_MAX. */
6701 real_maxval (&max, neg, mode);
6702 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6703 arg0, build_real (TREE_TYPE (arg0), max));
6706 /* x != +Inf is always equal to !(x > DBL_MAX). */
6707 real_maxval (&max, neg, mode);
6708 if (! HONOR_NANS (mode))
6709 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6710 arg0, build_real (TREE_TYPE (arg0), max));
6712 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6713 arg0, build_real (TREE_TYPE (arg0), max));
6714 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6723 /* Subroutine of fold() that optimizes comparisons of a division by
6724 a nonzero integer constant against an integer constant, i.e.
6727 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6728 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6729 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6731 The function returns the constant folded tree if a simplification
6732 can be made, and NULL_TREE otherwise. */
6735 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6737 tree prod, tmp, hi, lo;
6738 tree arg00 = TREE_OPERAND (arg0, 0);
6739 tree arg01 = TREE_OPERAND (arg0, 1);
6740 unsigned HOST_WIDE_INT lpart;
6741 HOST_WIDE_INT hpart;
6742 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6746 /* We have to do this the hard way to detect unsigned overflow.
6747 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6748 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6749 TREE_INT_CST_HIGH (arg01),
6750 TREE_INT_CST_LOW (arg1),
6751 TREE_INT_CST_HIGH (arg1),
6752 &lpart, &hpart, unsigned_p);
6753 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6755 neg_overflow = false;
6759 tmp = int_const_binop (MINUS_EXPR, arg01,
6760 build_int_cst (TREE_TYPE (arg01), 1), 0);
6763 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6764 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6765 TREE_INT_CST_HIGH (prod),
6766 TREE_INT_CST_LOW (tmp),
6767 TREE_INT_CST_HIGH (tmp),
6768 &lpart, &hpart, unsigned_p);
6769 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6770 -1, overflow | TREE_OVERFLOW (prod));
6772 else if (tree_int_cst_sgn (arg01) >= 0)
6774 tmp = int_const_binop (MINUS_EXPR, arg01,
6775 build_int_cst (TREE_TYPE (arg01), 1), 0);
6776 switch (tree_int_cst_sgn (arg1))
6779 neg_overflow = true;
6780 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6785 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6790 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6800 /* A negative divisor reverses the relational operators. */
6801 code = swap_tree_comparison (code);
6803 tmp = int_const_binop (PLUS_EXPR, arg01,
6804 build_int_cst (TREE_TYPE (arg01), 1), 0);
6805 switch (tree_int_cst_sgn (arg1))
6808 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6813 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6818 neg_overflow = true;
6819 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6831 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6832 return omit_one_operand (type, integer_zero_node, arg00);
6833 if (TREE_OVERFLOW (hi))
6834 return fold_build2 (GE_EXPR, type, arg00, lo);
6835 if (TREE_OVERFLOW (lo))
6836 return fold_build2 (LE_EXPR, type, arg00, hi);
6837 return build_range_check (type, arg00, 1, lo, hi);
6840 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6841 return omit_one_operand (type, integer_one_node, arg00);
6842 if (TREE_OVERFLOW (hi))
6843 return fold_build2 (LT_EXPR, type, arg00, lo);
6844 if (TREE_OVERFLOW (lo))
6845 return fold_build2 (GT_EXPR, type, arg00, hi);
6846 return build_range_check (type, arg00, 0, lo, hi);
6849 if (TREE_OVERFLOW (lo))
6851 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6852 return omit_one_operand (type, tmp, arg00);
6854 return fold_build2 (LT_EXPR, type, arg00, lo);
6857 if (TREE_OVERFLOW (hi))
6859 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6860 return omit_one_operand (type, tmp, arg00);
6862 return fold_build2 (LE_EXPR, type, arg00, hi);
6865 if (TREE_OVERFLOW (hi))
6867 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6868 return omit_one_operand (type, tmp, arg00);
6870 return fold_build2 (GT_EXPR, type, arg00, hi);
6873 if (TREE_OVERFLOW (lo))
6875 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6876 return omit_one_operand (type, tmp, arg00);
6878 return fold_build2 (GE_EXPR, type, arg00, lo);
6888 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6889 equality/inequality test, then return a simplified form of the test
6890 using a sign testing. Otherwise return NULL. TYPE is the desired
6894 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6897 /* If this is testing a single bit, we can optimize the test. */
6898 if ((code == NE_EXPR || code == EQ_EXPR)
6899 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6900 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6902 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6903 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6904 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6906 if (arg00 != NULL_TREE
6907 /* This is only a win if casting to a signed type is cheap,
6908 i.e. when arg00's type is not a partial mode. */
6909 && TYPE_PRECISION (TREE_TYPE (arg00))
6910 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6912 tree stype = signed_type_for (TREE_TYPE (arg00));
6913 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6914 result_type, fold_convert (stype, arg00),
6915 build_int_cst (stype, 0));
6922 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6923 equality/inequality test, then return a simplified form of
6924 the test using shifts and logical operations. Otherwise return
6925 NULL. TYPE is the desired result type. */
6928 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6931 /* If this is testing a single bit, we can optimize the test. */
6932 if ((code == NE_EXPR || code == EQ_EXPR)
6933 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6934 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6936 tree inner = TREE_OPERAND (arg0, 0);
6937 tree type = TREE_TYPE (arg0);
6938 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6939 enum machine_mode operand_mode = TYPE_MODE (type);
6941 tree signed_type, unsigned_type, intermediate_type;
6944 /* First, see if we can fold the single bit test into a sign-bit
6946 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6951 /* Otherwise we have (A & C) != 0 where C is a single bit,
6952 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6953 Similarly for (A & C) == 0. */
6955 /* If INNER is a right shift of a constant and it plus BITNUM does
6956 not overflow, adjust BITNUM and INNER. */
6957 if (TREE_CODE (inner) == RSHIFT_EXPR
6958 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6959 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6960 && bitnum < TYPE_PRECISION (type)
6961 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6962 bitnum - TYPE_PRECISION (type)))
6964 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6965 inner = TREE_OPERAND (inner, 0);
6968 /* If we are going to be able to omit the AND below, we must do our
6969 operations as unsigned. If we must use the AND, we have a choice.
6970 Normally unsigned is faster, but for some machines signed is. */
6971 #ifdef LOAD_EXTEND_OP
6972 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6973 && !flag_syntax_only) ? 0 : 1;
6978 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6979 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6980 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6981 inner = fold_convert (intermediate_type, inner);
6984 inner = build2 (RSHIFT_EXPR, intermediate_type,
6985 inner, size_int (bitnum));
6987 one = build_int_cst (intermediate_type, 1);
6989 if (code == EQ_EXPR)
6990 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6992 /* Put the AND last so it can combine with more things. */
6993 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6995 /* Make sure to return the proper type. */
6996 inner = fold_convert (result_type, inner);
7003 /* Check whether we are allowed to reorder operands arg0 and arg1,
7004 such that the evaluation of arg1 occurs before arg0. */
7007 reorder_operands_p (const_tree arg0, const_tree arg1)
7009 if (! flag_evaluation_order)
7011 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7013 return ! TREE_SIDE_EFFECTS (arg0)
7014 && ! TREE_SIDE_EFFECTS (arg1);
7017 /* Test whether it is preferable two swap two operands, ARG0 and
7018 ARG1, for example because ARG0 is an integer constant and ARG1
7019 isn't. If REORDER is true, only recommend swapping if we can
7020 evaluate the operands in reverse order. */
7023 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7025 STRIP_SIGN_NOPS (arg0);
7026 STRIP_SIGN_NOPS (arg1);
7028 if (TREE_CODE (arg1) == INTEGER_CST)
7030 if (TREE_CODE (arg0) == INTEGER_CST)
7033 if (TREE_CODE (arg1) == REAL_CST)
7035 if (TREE_CODE (arg0) == REAL_CST)
7038 if (TREE_CODE (arg1) == FIXED_CST)
7040 if (TREE_CODE (arg0) == FIXED_CST)
7043 if (TREE_CODE (arg1) == COMPLEX_CST)
7045 if (TREE_CODE (arg0) == COMPLEX_CST)
7048 if (TREE_CONSTANT (arg1))
7050 if (TREE_CONSTANT (arg0))
7053 if (optimize_function_for_size_p (cfun))
7056 if (reorder && flag_evaluation_order
7057 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7060 /* It is preferable to swap two SSA_NAME to ensure a canonical form
7061 for commutative and comparison operators. Ensuring a canonical
7062 form allows the optimizers to find additional redundancies without
7063 having to explicitly check for both orderings. */
7064 if (TREE_CODE (arg0) == SSA_NAME
7065 && TREE_CODE (arg1) == SSA_NAME
7066 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7069 /* Put SSA_NAMEs last. */
7070 if (TREE_CODE (arg1) == SSA_NAME)
7072 if (TREE_CODE (arg0) == SSA_NAME)
7075 /* Put variables last. */
7084 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7085 ARG0 is extended to a wider type. */
7088 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
7090 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7092 tree shorter_type, outer_type;
7096 if (arg0_unw == arg0)
7098 shorter_type = TREE_TYPE (arg0_unw);
7100 #ifdef HAVE_canonicalize_funcptr_for_compare
7101 /* Disable this optimization if we're casting a function pointer
7102 type on targets that require function pointer canonicalization. */
7103 if (HAVE_canonicalize_funcptr_for_compare
7104 && TREE_CODE (shorter_type) == POINTER_TYPE
7105 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7109 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7112 arg1_unw = get_unwidened (arg1, NULL_TREE);
7114 /* If possible, express the comparison in the shorter mode. */
7115 if ((code == EQ_EXPR || code == NE_EXPR
7116 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7117 && (TREE_TYPE (arg1_unw) == shorter_type
7118 || ((TYPE_PRECISION (shorter_type)
7119 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7120 && (TYPE_UNSIGNED (shorter_type)
7121 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7122 || (TREE_CODE (arg1_unw) == INTEGER_CST
7123 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7124 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7125 && int_fits_type_p (arg1_unw, shorter_type))))
7126 return fold_build2 (code, type, arg0_unw,
7127 fold_convert (shorter_type, arg1_unw));
7129 if (TREE_CODE (arg1_unw) != INTEGER_CST
7130 || TREE_CODE (shorter_type) != INTEGER_TYPE
7131 || !int_fits_type_p (arg1_unw, shorter_type))
7134 /* If we are comparing with the integer that does not fit into the range
7135 of the shorter type, the result is known. */
7136 outer_type = TREE_TYPE (arg1_unw);
7137 min = lower_bound_in_type (outer_type, shorter_type);
7138 max = upper_bound_in_type (outer_type, shorter_type);
7140 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7142 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7149 return omit_one_operand (type, integer_zero_node, arg0);
7154 return omit_one_operand (type, integer_one_node, arg0);
7160 return omit_one_operand (type, integer_one_node, arg0);
7162 return omit_one_operand (type, integer_zero_node, arg0);
7167 return omit_one_operand (type, integer_zero_node, arg0);
7169 return omit_one_operand (type, integer_one_node, arg0);
7178 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7179 ARG0 just the signedness is changed. */
7182 fold_sign_changed_comparison (enum tree_code code, tree type,
7183 tree arg0, tree arg1)
7186 tree inner_type, outer_type;
7188 if (!CONVERT_EXPR_P (arg0))
7191 outer_type = TREE_TYPE (arg0);
7192 arg0_inner = TREE_OPERAND (arg0, 0);
7193 inner_type = TREE_TYPE (arg0_inner);
7195 #ifdef HAVE_canonicalize_funcptr_for_compare
7196 /* Disable this optimization if we're casting a function pointer
7197 type on targets that require function pointer canonicalization. */
7198 if (HAVE_canonicalize_funcptr_for_compare
7199 && TREE_CODE (inner_type) == POINTER_TYPE
7200 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7204 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7207 /* If the conversion is from an integral subtype to its basetype
7209 if (TREE_TYPE (inner_type) == outer_type)
7212 if (TREE_CODE (arg1) != INTEGER_CST
7213 && !(CONVERT_EXPR_P (arg1)
7214 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7217 if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7218 || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7223 if (TREE_CODE (arg1) == INTEGER_CST)
7224 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7225 TREE_INT_CST_HIGH (arg1), 0,
7226 TREE_OVERFLOW (arg1));
7228 arg1 = fold_convert (inner_type, arg1);
7230 return fold_build2 (code, type, arg0_inner, arg1);
7233 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7234 step of the array. Reconstructs s and delta in the case of s * delta
7235 being an integer constant (and thus already folded).
7236 ADDR is the address. MULT is the multiplicative expression.
7237 If the function succeeds, the new address expression is returned. Otherwise
7238 NULL_TREE is returned. */
7241 try_move_mult_to_index (tree addr, tree op1)
7243 tree s, delta, step;
7244 tree ref = TREE_OPERAND (addr, 0), pref;
7249 /* Strip the nops that might be added when converting op1 to sizetype. */
7252 /* Canonicalize op1 into a possibly non-constant delta
7253 and an INTEGER_CST s. */
7254 if (TREE_CODE (op1) == MULT_EXPR)
7256 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7261 if (TREE_CODE (arg0) == INTEGER_CST)
7266 else if (TREE_CODE (arg1) == INTEGER_CST)
7274 else if (TREE_CODE (op1) == INTEGER_CST)
7281 /* Simulate we are delta * 1. */
7283 s = integer_one_node;
7286 for (;; ref = TREE_OPERAND (ref, 0))
7288 if (TREE_CODE (ref) == ARRAY_REF)
7290 /* Remember if this was a multi-dimensional array. */
7291 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7294 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7298 step = array_ref_element_size (ref);
7299 if (TREE_CODE (step) != INTEGER_CST)
7304 if (! tree_int_cst_equal (step, s))
7309 /* Try if delta is a multiple of step. */
7310 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7316 /* Only fold here if we can verify we do not overflow one
7317 dimension of a multi-dimensional array. */
7322 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7323 || !INTEGRAL_TYPE_P (itype)
7324 || !TYPE_MAX_VALUE (itype)
7325 || TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
7328 tmp = fold_binary (PLUS_EXPR, itype,
7329 fold_convert (itype,
7330 TREE_OPERAND (ref, 1)),
7331 fold_convert (itype, delta));
7333 || TREE_CODE (tmp) != INTEGER_CST
7334 || tree_int_cst_lt (TYPE_MAX_VALUE (itype), tmp))
7343 if (!handled_component_p (ref))
7347 /* We found the suitable array reference. So copy everything up to it,
7348 and replace the index. */
7350 pref = TREE_OPERAND (addr, 0);
7351 ret = copy_node (pref);
7356 pref = TREE_OPERAND (pref, 0);
7357 TREE_OPERAND (pos, 0) = copy_node (pref);
7358 pos = TREE_OPERAND (pos, 0);
7361 TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
7362 fold_convert (itype,
7363 TREE_OPERAND (pos, 1)),
7364 fold_convert (itype, delta));
7366 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
7370 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7371 means A >= Y && A != MAX, but in this case we know that
7372 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7375 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
7377 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7379 if (TREE_CODE (bound) == LT_EXPR)
7380 a = TREE_OPERAND (bound, 0);
7381 else if (TREE_CODE (bound) == GT_EXPR)
7382 a = TREE_OPERAND (bound, 1);
7386 typea = TREE_TYPE (a);
7387 if (!INTEGRAL_TYPE_P (typea)
7388 && !POINTER_TYPE_P (typea))
7391 if (TREE_CODE (ineq) == LT_EXPR)
7393 a1 = TREE_OPERAND (ineq, 1);
7394 y = TREE_OPERAND (ineq, 0);
7396 else if (TREE_CODE (ineq) == GT_EXPR)
7398 a1 = TREE_OPERAND (ineq, 0);
7399 y = TREE_OPERAND (ineq, 1);
7404 if (TREE_TYPE (a1) != typea)
7407 if (POINTER_TYPE_P (typea))
7409 /* Convert the pointer types into integer before taking the difference. */
7410 tree ta = fold_convert (ssizetype, a);
7411 tree ta1 = fold_convert (ssizetype, a1);
7412 diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
7415 diff = fold_binary (MINUS_EXPR, typea, a1, a);
7417 if (!diff || !integer_onep (diff))
7420 return fold_build2 (GE_EXPR, type, a, y);
7423 /* Fold a sum or difference of at least one multiplication.
7424 Returns the folded tree or NULL if no simplification could be made. */
7427 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
7429 tree arg00, arg01, arg10, arg11;
7430 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7432 /* (A * C) +- (B * C) -> (A+-B) * C.
7433 (A * C) +- A -> A * (C+-1).
7434 We are most concerned about the case where C is a constant,
7435 but other combinations show up during loop reduction. Since
7436 it is not difficult, try all four possibilities. */
7438 if (TREE_CODE (arg0) == MULT_EXPR)
7440 arg00 = TREE_OPERAND (arg0, 0);
7441 arg01 = TREE_OPERAND (arg0, 1);
7443 else if (TREE_CODE (arg0) == INTEGER_CST)
7445 arg00 = build_one_cst (type);
7450 /* We cannot generate constant 1 for fract. */
7451 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7454 arg01 = build_one_cst (type);
7456 if (TREE_CODE (arg1) == MULT_EXPR)
7458 arg10 = TREE_OPERAND (arg1, 0);
7459 arg11 = TREE_OPERAND (arg1, 1);
7461 else if (TREE_CODE (arg1) == INTEGER_CST)
7463 arg10 = build_one_cst (type);
7464 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7465 the purpose of this canonicalization. */
7466 if (TREE_INT_CST_HIGH (arg1) == -1
7467 && negate_expr_p (arg1)
7468 && code == PLUS_EXPR)
7470 arg11 = negate_expr (arg1);
7478 /* We cannot generate constant 1 for fract. */
7479 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7482 arg11 = build_one_cst (type);
7486 if (operand_equal_p (arg01, arg11, 0))
7487 same = arg01, alt0 = arg00, alt1 = arg10;
7488 else if (operand_equal_p (arg00, arg10, 0))
7489 same = arg00, alt0 = arg01, alt1 = arg11;
7490 else if (operand_equal_p (arg00, arg11, 0))
7491 same = arg00, alt0 = arg01, alt1 = arg10;
7492 else if (operand_equal_p (arg01, arg10, 0))
7493 same = arg01, alt0 = arg00, alt1 = arg11;
7495 /* No identical multiplicands; see if we can find a common
7496 power-of-two factor in non-power-of-two multiplies. This
7497 can help in multi-dimensional array access. */
7498 else if (host_integerp (arg01, 0)
7499 && host_integerp (arg11, 0))
7501 HOST_WIDE_INT int01, int11, tmp;
7504 int01 = TREE_INT_CST_LOW (arg01);
7505 int11 = TREE_INT_CST_LOW (arg11);
7507 /* Move min of absolute values to int11. */
7508 if ((int01 >= 0 ? int01 : -int01)
7509 < (int11 >= 0 ? int11 : -int11))
7511 tmp = int01, int01 = int11, int11 = tmp;
7512 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7519 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7520 /* The remainder should not be a constant, otherwise we
7521 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7522 increased the number of multiplications necessary. */
7523 && TREE_CODE (arg10) != INTEGER_CST)
7525 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
7526 build_int_cst (TREE_TYPE (arg00),
7531 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7536 return fold_build2 (MULT_EXPR, type,
7537 fold_build2 (code, type,
7538 fold_convert (type, alt0),
7539 fold_convert (type, alt1)),
7540 fold_convert (type, same));
7545 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7546 specified by EXPR into the buffer PTR of length LEN bytes.
7547 Return the number of bytes placed in the buffer, or zero
7551 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7553 tree type = TREE_TYPE (expr);
7554 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7555 int byte, offset, word, words;
7556 unsigned char value;
7558 if (total_bytes > len)
7560 words = total_bytes / UNITS_PER_WORD;
7562 for (byte = 0; byte < total_bytes; byte++)
7564 int bitpos = byte * BITS_PER_UNIT;
7565 if (bitpos < HOST_BITS_PER_WIDE_INT)
7566 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7568 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7569 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7571 if (total_bytes > UNITS_PER_WORD)
7573 word = byte / UNITS_PER_WORD;
7574 if (WORDS_BIG_ENDIAN)
7575 word = (words - 1) - word;
7576 offset = word * UNITS_PER_WORD;
7577 if (BYTES_BIG_ENDIAN)
7578 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7580 offset += byte % UNITS_PER_WORD;
7583 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7584 ptr[offset] = value;
7590 /* Subroutine of native_encode_expr. Encode the REAL_CST
7591 specified by EXPR into the buffer PTR of length LEN bytes.
7592 Return the number of bytes placed in the buffer, or zero
7596 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7598 tree type = TREE_TYPE (expr);
7599 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7600 int byte, offset, word, words, bitpos;
7601 unsigned char value;
7603 /* There are always 32 bits in each long, no matter the size of
7604 the hosts long. We handle floating point representations with
7608 if (total_bytes > len)
7610 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7612 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7614 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7615 bitpos += BITS_PER_UNIT)
7617 byte = (bitpos / BITS_PER_UNIT) & 3;
7618 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7620 if (UNITS_PER_WORD < 4)
7622 word = byte / UNITS_PER_WORD;
7623 if (WORDS_BIG_ENDIAN)
7624 word = (words - 1) - word;
7625 offset = word * UNITS_PER_WORD;
7626 if (BYTES_BIG_ENDIAN)
7627 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7629 offset += byte % UNITS_PER_WORD;
7632 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7633 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7638 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7639 specified by EXPR into the buffer PTR of length LEN bytes.
7640 Return the number of bytes placed in the buffer, or zero
7644 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7649 part = TREE_REALPART (expr);
7650 rsize = native_encode_expr (part, ptr, len);
7653 part = TREE_IMAGPART (expr);
7654 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7657 return rsize + isize;
7661 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7662 specified by EXPR into the buffer PTR of length LEN bytes.
7663 Return the number of bytes placed in the buffer, or zero
7667 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7669 int i, size, offset, count;
7670 tree itype, elem, elements;
7673 elements = TREE_VECTOR_CST_ELTS (expr);
7674 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7675 itype = TREE_TYPE (TREE_TYPE (expr));
7676 size = GET_MODE_SIZE (TYPE_MODE (itype));
7677 for (i = 0; i < count; i++)
7681 elem = TREE_VALUE (elements);
7682 elements = TREE_CHAIN (elements);
7689 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7694 if (offset + size > len)
7696 memset (ptr+offset, 0, size);
7704 /* Subroutine of native_encode_expr. Encode the STRING_CST
7705 specified by EXPR into the buffer PTR of length LEN bytes.
7706 Return the number of bytes placed in the buffer, or zero
7710 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7712 tree type = TREE_TYPE (expr);
7713 HOST_WIDE_INT total_bytes;
7715 if (TREE_CODE (type) != ARRAY_TYPE
7716 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7717 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7718 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7720 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7721 if (total_bytes > len)
7723 if (TREE_STRING_LENGTH (expr) < total_bytes)
7725 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7726 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7727 total_bytes - TREE_STRING_LENGTH (expr));
7730 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7735 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7736 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7737 buffer PTR of length LEN bytes. Return the number of bytes
7738 placed in the buffer, or zero upon failure. */
7741 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7743 switch (TREE_CODE (expr))
7746 return native_encode_int (expr, ptr, len);
7749 return native_encode_real (expr, ptr, len);
7752 return native_encode_complex (expr, ptr, len);
7755 return native_encode_vector (expr, ptr, len);
7758 return native_encode_string (expr, ptr, len);
7766 /* Subroutine of native_interpret_expr. Interpret the contents of
7767 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7768 If the buffer cannot be interpreted, return NULL_TREE. */
7771 native_interpret_int (tree type, const unsigned char *ptr, int len)
7773 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7774 int byte, offset, word, words;
7775 unsigned char value;
7776 unsigned int HOST_WIDE_INT lo = 0;
7777 HOST_WIDE_INT hi = 0;
7779 if (total_bytes > len)
7781 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7783 words = total_bytes / UNITS_PER_WORD;
7785 for (byte = 0; byte < total_bytes; byte++)
7787 int bitpos = byte * BITS_PER_UNIT;
7788 if (total_bytes > UNITS_PER_WORD)
7790 word = byte / UNITS_PER_WORD;
7791 if (WORDS_BIG_ENDIAN)
7792 word = (words - 1) - word;
7793 offset = word * UNITS_PER_WORD;
7794 if (BYTES_BIG_ENDIAN)
7795 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7797 offset += byte % UNITS_PER_WORD;
7800 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7801 value = ptr[offset];
7803 if (bitpos < HOST_BITS_PER_WIDE_INT)
7804 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7806 hi |= (unsigned HOST_WIDE_INT) value
7807 << (bitpos - HOST_BITS_PER_WIDE_INT);
7810 return build_int_cst_wide_type (type, lo, hi);
7814 /* Subroutine of native_interpret_expr. Interpret the contents of
7815 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7816 If the buffer cannot be interpreted, return NULL_TREE. */
7819 native_interpret_real (tree type, const unsigned char *ptr, int len)
7821 enum machine_mode mode = TYPE_MODE (type);
7822 int total_bytes = GET_MODE_SIZE (mode);
7823 int byte, offset, word, words, bitpos;
7824 unsigned char value;
7825 /* There are always 32 bits in each long, no matter the size of
7826 the hosts long. We handle floating point representations with
7831 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7832 if (total_bytes > len || total_bytes > 24)
7834 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7836 memset (tmp, 0, sizeof (tmp));
7837 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7838 bitpos += BITS_PER_UNIT)
7840 byte = (bitpos / BITS_PER_UNIT) & 3;
7841 if (UNITS_PER_WORD < 4)
7843 word = byte / UNITS_PER_WORD;
7844 if (WORDS_BIG_ENDIAN)
7845 word = (words - 1) - word;
7846 offset = word * UNITS_PER_WORD;
7847 if (BYTES_BIG_ENDIAN)
7848 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7850 offset += byte % UNITS_PER_WORD;
7853 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7854 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7856 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7859 real_from_target (&r, tmp, mode);
7860 return build_real (type, r);
7864 /* Subroutine of native_interpret_expr. Interpret the contents of
7865 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7866 If the buffer cannot be interpreted, return NULL_TREE. */
7869 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7871 tree etype, rpart, ipart;
7874 etype = TREE_TYPE (type);
7875 size = GET_MODE_SIZE (TYPE_MODE (etype));
7878 rpart = native_interpret_expr (etype, ptr, size);
7881 ipart = native_interpret_expr (etype, ptr+size, size);
7884 return build_complex (type, rpart, ipart);
7888 /* Subroutine of native_interpret_expr. Interpret the contents of
7889 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7890 If the buffer cannot be interpreted, return NULL_TREE. */
7893 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7895 tree etype, elem, elements;
7898 etype = TREE_TYPE (type);
7899 size = GET_MODE_SIZE (TYPE_MODE (etype));
7900 count = TYPE_VECTOR_SUBPARTS (type);
7901 if (size * count > len)
7904 elements = NULL_TREE;
7905 for (i = count - 1; i >= 0; i--)
7907 elem = native_interpret_expr (etype, ptr+(i*size), size);
7910 elements = tree_cons (NULL_TREE, elem, elements);
7912 return build_vector (type, elements);
7916 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7917 the buffer PTR of length LEN as a constant of type TYPE. For
7918 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7919 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7920 return NULL_TREE. */
7923 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7925 switch (TREE_CODE (type))
7930 return native_interpret_int (type, ptr, len);
7933 return native_interpret_real (type, ptr, len);
7936 return native_interpret_complex (type, ptr, len);
7939 return native_interpret_vector (type, ptr, len);
7947 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7948 TYPE at compile-time. If we're unable to perform the conversion
7949 return NULL_TREE. */
7952 fold_view_convert_expr (tree type, tree expr)
7954 /* We support up to 512-bit values (for V8DFmode). */
7955 unsigned char buffer[64];
7958 /* Check that the host and target are sane. */
7959 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7962 len = native_encode_expr (expr, buffer, sizeof (buffer));
7966 return native_interpret_expr (type, buffer, len);
7969 /* Build an expression for the address of T. Folds away INDIRECT_REF
7970 to avoid confusing the gimplify process. */
7973 build_fold_addr_expr_with_type (tree t, tree ptrtype)
7975 /* The size of the object is not relevant when talking about its address. */
7976 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7977 t = TREE_OPERAND (t, 0);
7979 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
7980 if (TREE_CODE (t) == INDIRECT_REF
7981 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
7983 t = TREE_OPERAND (t, 0);
7985 if (TREE_TYPE (t) != ptrtype)
7986 t = build1 (NOP_EXPR, ptrtype, t);
7989 t = build1 (ADDR_EXPR, ptrtype, t);
7994 /* Build an expression for the address of T. */
7997 build_fold_addr_expr (tree t)
7999 tree ptrtype = build_pointer_type (TREE_TYPE (t));
8001 return build_fold_addr_expr_with_type (t, ptrtype);
8004 /* Fold a unary expression of code CODE and type TYPE with operand
8005 OP0. Return the folded expression if folding is successful.
8006 Otherwise, return NULL_TREE. */
8009 fold_unary (enum tree_code code, tree type, tree op0)
8013 enum tree_code_class kind = TREE_CODE_CLASS (code);
8015 gcc_assert (IS_EXPR_CODE_CLASS (kind)
8016 && TREE_CODE_LENGTH (code) == 1);
8021 if (CONVERT_EXPR_CODE_P (code)
8022 || code == FLOAT_EXPR || code == ABS_EXPR)
8024 /* Don't use STRIP_NOPS, because signedness of argument type
8026 STRIP_SIGN_NOPS (arg0);
8030 /* Strip any conversions that don't change the mode. This
8031 is safe for every expression, except for a comparison
8032 expression because its signedness is derived from its
8035 Note that this is done as an internal manipulation within
8036 the constant folder, in order to find the simplest
8037 representation of the arguments so that their form can be
8038 studied. In any cases, the appropriate type conversions
8039 should be put back in the tree that will get out of the
8045 if (TREE_CODE_CLASS (code) == tcc_unary)
8047 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8048 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8049 fold_build1 (code, type,
8050 fold_convert (TREE_TYPE (op0),
8051 TREE_OPERAND (arg0, 1))));
8052 else if (TREE_CODE (arg0) == COND_EXPR)
8054 tree arg01 = TREE_OPERAND (arg0, 1);
8055 tree arg02 = TREE_OPERAND (arg0, 2);
8056 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8057 arg01 = fold_build1 (code, type,
8058 fold_convert (TREE_TYPE (op0), arg01));
8059 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8060 arg02 = fold_build1 (code, type,
8061 fold_convert (TREE_TYPE (op0), arg02));
8062 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
8065 /* If this was a conversion, and all we did was to move into
8066 inside the COND_EXPR, bring it back out. But leave it if
8067 it is a conversion from integer to integer and the
8068 result precision is no wider than a word since such a
8069 conversion is cheap and may be optimized away by combine,
8070 while it couldn't if it were outside the COND_EXPR. Then return
8071 so we don't get into an infinite recursion loop taking the
8072 conversion out and then back in. */
8074 if ((CONVERT_EXPR_CODE_P (code)
8075 || code == NON_LVALUE_EXPR)
8076 && TREE_CODE (tem) == COND_EXPR
8077 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8078 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8079 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8080 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8081 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8082 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8083 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8085 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8086 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8087 || flag_syntax_only))
8088 tem = build1 (code, type,
8090 TREE_TYPE (TREE_OPERAND
8091 (TREE_OPERAND (tem, 1), 0)),
8092 TREE_OPERAND (tem, 0),
8093 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8094 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8097 else if (COMPARISON_CLASS_P (arg0))
8099 if (TREE_CODE (type) == BOOLEAN_TYPE)
8101 arg0 = copy_node (arg0);
8102 TREE_TYPE (arg0) = type;
8105 else if (TREE_CODE (type) != INTEGER_TYPE)
8106 return fold_build3 (COND_EXPR, type, arg0,
8107 fold_build1 (code, type,
8109 fold_build1 (code, type,
8110 integer_zero_node));
8117 /* Re-association barriers around constants and other re-association
8118 barriers can be removed. */
8119 if (CONSTANT_CLASS_P (op0)
8120 || TREE_CODE (op0) == PAREN_EXPR)
8121 return fold_convert (type, op0);
8126 case FIX_TRUNC_EXPR:
8127 if (TREE_TYPE (op0) == type)
8130 /* If we have (type) (a CMP b) and type is an integral type, return
8131 new expression involving the new type. */
8132 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8133 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8134 TREE_OPERAND (op0, 1));
8136 /* Handle cases of two conversions in a row. */
8137 if (CONVERT_EXPR_P (op0))
8139 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8140 tree inter_type = TREE_TYPE (op0);
8141 int inside_int = INTEGRAL_TYPE_P (inside_type);
8142 int inside_ptr = POINTER_TYPE_P (inside_type);
8143 int inside_float = FLOAT_TYPE_P (inside_type);
8144 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8145 unsigned int inside_prec = TYPE_PRECISION (inside_type);
8146 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8147 int inter_int = INTEGRAL_TYPE_P (inter_type);
8148 int inter_ptr = POINTER_TYPE_P (inter_type);
8149 int inter_float = FLOAT_TYPE_P (inter_type);
8150 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8151 unsigned int inter_prec = TYPE_PRECISION (inter_type);
8152 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8153 int final_int = INTEGRAL_TYPE_P (type);
8154 int final_ptr = POINTER_TYPE_P (type);
8155 int final_float = FLOAT_TYPE_P (type);
8156 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8157 unsigned int final_prec = TYPE_PRECISION (type);
8158 int final_unsignedp = TYPE_UNSIGNED (type);
8160 /* In addition to the cases of two conversions in a row
8161 handled below, if we are converting something to its own
8162 type via an object of identical or wider precision, neither
8163 conversion is needed. */
8164 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8165 && (((inter_int || inter_ptr) && final_int)
8166 || (inter_float && final_float))
8167 && inter_prec >= final_prec)
8168 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8170 /* Likewise, if the intermediate and initial types are either both
8171 float or both integer, we don't need the middle conversion if the
8172 former is wider than the latter and doesn't change the signedness
8173 (for integers). Avoid this if the final type is a pointer since
8174 then we sometimes need the middle conversion. Likewise if the
8175 final type has a precision not equal to the size of its mode. */
8176 if (((inter_int && inside_int)
8177 || (inter_float && inside_float)
8178 || (inter_vec && inside_vec))
8179 && inter_prec >= inside_prec
8180 && (inter_float || inter_vec
8181 || inter_unsignedp == inside_unsignedp)
8182 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8183 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8185 && (! final_vec || inter_prec == inside_prec))
8186 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8188 /* If we have a sign-extension of a zero-extended value, we can
8189 replace that by a single zero-extension. */
8190 if (inside_int && inter_int && final_int
8191 && inside_prec < inter_prec && inter_prec < final_prec
8192 && inside_unsignedp && !inter_unsignedp)
8193 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8195 /* Two conversions in a row are not needed unless:
8196 - some conversion is floating-point (overstrict for now), or
8197 - some conversion is a vector (overstrict for now), or
8198 - the intermediate type is narrower than both initial and
8200 - the intermediate type and innermost type differ in signedness,
8201 and the outermost type is wider than the intermediate, or
8202 - the initial type is a pointer type and the precisions of the
8203 intermediate and final types differ, or
8204 - the final type is a pointer type and the precisions of the
8205 initial and intermediate types differ. */
8206 if (! inside_float && ! inter_float && ! final_float
8207 && ! inside_vec && ! inter_vec && ! final_vec
8208 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8209 && ! (inside_int && inter_int
8210 && inter_unsignedp != inside_unsignedp
8211 && inter_prec < final_prec)
8212 && ((inter_unsignedp && inter_prec > inside_prec)
8213 == (final_unsignedp && final_prec > inter_prec))
8214 && ! (inside_ptr && inter_prec != final_prec)
8215 && ! (final_ptr && inside_prec != inter_prec)
8216 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8217 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8218 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
8221 /* Handle (T *)&A.B.C for A being of type T and B and C
8222 living at offset zero. This occurs frequently in
8223 C++ upcasting and then accessing the base. */
8224 if (TREE_CODE (op0) == ADDR_EXPR
8225 && POINTER_TYPE_P (type)
8226 && handled_component_p (TREE_OPERAND (op0, 0)))
8228 HOST_WIDE_INT bitsize, bitpos;
8230 enum machine_mode mode;
8231 int unsignedp, volatilep;
8232 tree base = TREE_OPERAND (op0, 0);
8233 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8234 &mode, &unsignedp, &volatilep, false);
8235 /* If the reference was to a (constant) zero offset, we can use
8236 the address of the base if it has the same base type
8237 as the result type. */
8238 if (! offset && bitpos == 0
8239 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8240 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8241 return fold_convert (type, build_fold_addr_expr (base));
8244 if (TREE_CODE (op0) == MODIFY_EXPR
8245 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8246 /* Detect assigning a bitfield. */
8247 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8249 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8251 /* Don't leave an assignment inside a conversion
8252 unless assigning a bitfield. */
8253 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
8254 /* First do the assignment, then return converted constant. */
8255 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8256 TREE_NO_WARNING (tem) = 1;
8257 TREE_USED (tem) = 1;
8261 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8262 constants (if x has signed type, the sign bit cannot be set
8263 in c). This folds extension into the BIT_AND_EXPR.
8264 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8265 very likely don't have maximal range for their precision and this
8266 transformation effectively doesn't preserve non-maximal ranges. */
8267 if (TREE_CODE (type) == INTEGER_TYPE
8268 && TREE_CODE (op0) == BIT_AND_EXPR
8269 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST
8270 /* Not if the conversion is to the sub-type. */
8271 && TREE_TYPE (type) != TREE_TYPE (op0))
8274 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
8277 if (TYPE_UNSIGNED (TREE_TYPE (and))
8278 || (TYPE_PRECISION (type)
8279 <= TYPE_PRECISION (TREE_TYPE (and))))
8281 else if (TYPE_PRECISION (TREE_TYPE (and1))
8282 <= HOST_BITS_PER_WIDE_INT
8283 && host_integerp (and1, 1))
8285 unsigned HOST_WIDE_INT cst;
8287 cst = tree_low_cst (and1, 1);
8288 cst &= (HOST_WIDE_INT) -1
8289 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8290 change = (cst == 0);
8291 #ifdef LOAD_EXTEND_OP
8293 && !flag_syntax_only
8294 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8297 tree uns = unsigned_type_for (TREE_TYPE (and0));
8298 and0 = fold_convert (uns, and0);
8299 and1 = fold_convert (uns, and1);
8305 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8306 TREE_INT_CST_HIGH (and1), 0,
8307 TREE_OVERFLOW (and1));
8308 return fold_build2 (BIT_AND_EXPR, type,
8309 fold_convert (type, and0), tem);
8313 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8314 when one of the new casts will fold away. Conservatively we assume
8315 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8316 if (POINTER_TYPE_P (type)
8317 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8318 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8319 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8320 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8322 tree arg00 = TREE_OPERAND (arg0, 0);
8323 tree arg01 = TREE_OPERAND (arg0, 1);
8325 return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
8326 fold_convert (sizetype, arg01));
8329 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8330 of the same precision, and X is an integer type not narrower than
8331 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8332 if (INTEGRAL_TYPE_P (type)
8333 && TREE_CODE (op0) == BIT_NOT_EXPR
8334 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8335 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8336 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8338 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8339 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8340 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8341 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
8344 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8345 type of X and Y (integer types only). */
8346 if (INTEGRAL_TYPE_P (type)
8347 && TREE_CODE (op0) == MULT_EXPR
8348 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8349 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8351 /* Be careful not to introduce new overflows. */
8353 if (TYPE_OVERFLOW_WRAPS (type))
8356 mult_type = unsigned_type_for (type);
8358 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8360 tem = fold_build2 (MULT_EXPR, mult_type,
8361 fold_convert (mult_type,
8362 TREE_OPERAND (op0, 0)),
8363 fold_convert (mult_type,
8364 TREE_OPERAND (op0, 1)));
8365 return fold_convert (type, tem);
8369 tem = fold_convert_const (code, type, op0);
8370 return tem ? tem : NULL_TREE;
8372 case FIXED_CONVERT_EXPR:
8373 tem = fold_convert_const (code, type, arg0);
8374 return tem ? tem : NULL_TREE;
8376 case VIEW_CONVERT_EXPR:
8377 if (TREE_TYPE (op0) == type)
8379 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8380 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8382 /* For integral conversions with the same precision or pointer
8383 conversions use a NOP_EXPR instead. */
8384 if ((INTEGRAL_TYPE_P (type)
8385 || POINTER_TYPE_P (type))
8386 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8387 || POINTER_TYPE_P (TREE_TYPE (op0)))
8388 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0))
8389 /* Do not muck with VIEW_CONVERT_EXPRs that convert from
8390 a sub-type to its base type as generated by the Ada FE. */
8391 && !(INTEGRAL_TYPE_P (TREE_TYPE (op0))
8392 && TREE_TYPE (TREE_TYPE (op0))))
8393 return fold_convert (type, op0);
8395 /* Strip inner integral conversions that do not change the precision. */
8396 if (CONVERT_EXPR_P (op0)
8397 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8398 || POINTER_TYPE_P (TREE_TYPE (op0)))
8399 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8400 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8401 && (TYPE_PRECISION (TREE_TYPE (op0))
8402 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8403 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
8405 return fold_view_convert_expr (type, op0);
8408 tem = fold_negate_expr (arg0);
8410 return fold_convert (type, tem);
8414 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8415 return fold_abs_const (arg0, type);
8416 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8417 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8418 /* Convert fabs((double)float) into (double)fabsf(float). */
8419 else if (TREE_CODE (arg0) == NOP_EXPR
8420 && TREE_CODE (type) == REAL_TYPE)
8422 tree targ0 = strip_float_extensions (arg0);
8424 return fold_convert (type, fold_build1 (ABS_EXPR,
8428 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8429 else if (TREE_CODE (arg0) == ABS_EXPR)
8431 else if (tree_expr_nonnegative_p (arg0))
8434 /* Strip sign ops from argument. */
8435 if (TREE_CODE (type) == REAL_TYPE)
8437 tem = fold_strip_sign_ops (arg0);
8439 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
8444 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8445 return fold_convert (type, arg0);
8446 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8448 tree itype = TREE_TYPE (type);
8449 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
8450 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
8451 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
8453 if (TREE_CODE (arg0) == COMPLEX_CST)
8455 tree itype = TREE_TYPE (type);
8456 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
8457 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
8458 return build_complex (type, rpart, negate_expr (ipart));
8460 if (TREE_CODE (arg0) == CONJ_EXPR)
8461 return fold_convert (type, TREE_OPERAND (arg0, 0));
8465 if (TREE_CODE (arg0) == INTEGER_CST)
8466 return fold_not_const (arg0, type);
8467 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8468 return fold_convert (type, TREE_OPERAND (arg0, 0));
8469 /* Convert ~ (-A) to A - 1. */
8470 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8471 return fold_build2 (MINUS_EXPR, type,
8472 fold_convert (type, TREE_OPERAND (arg0, 0)),
8473 build_int_cst (type, 1));
8474 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8475 else if (INTEGRAL_TYPE_P (type)
8476 && ((TREE_CODE (arg0) == MINUS_EXPR
8477 && integer_onep (TREE_OPERAND (arg0, 1)))
8478 || (TREE_CODE (arg0) == PLUS_EXPR
8479 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8480 return fold_build1 (NEGATE_EXPR, type,
8481 fold_convert (type, TREE_OPERAND (arg0, 0)));
8482 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8483 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8484 && (tem = fold_unary (BIT_NOT_EXPR, type,
8486 TREE_OPERAND (arg0, 0)))))
8487 return fold_build2 (BIT_XOR_EXPR, type, tem,
8488 fold_convert (type, TREE_OPERAND (arg0, 1)));
8489 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8490 && (tem = fold_unary (BIT_NOT_EXPR, type,
8492 TREE_OPERAND (arg0, 1)))))
8493 return fold_build2 (BIT_XOR_EXPR, type,
8494 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
8495 /* Perform BIT_NOT_EXPR on each element individually. */
8496 else if (TREE_CODE (arg0) == VECTOR_CST)
8498 tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8499 int count = TYPE_VECTOR_SUBPARTS (type), i;
8501 for (i = 0; i < count; i++)
8505 elem = TREE_VALUE (elements);
8506 elem = fold_unary (BIT_NOT_EXPR, TREE_TYPE (type), elem);
8507 if (elem == NULL_TREE)
8509 elements = TREE_CHAIN (elements);
8512 elem = build_int_cst (TREE_TYPE (type), -1);
8513 list = tree_cons (NULL_TREE, elem, list);
8516 return build_vector (type, nreverse (list));
8521 case TRUTH_NOT_EXPR:
8522 /* The argument to invert_truthvalue must have Boolean type. */
8523 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8524 arg0 = fold_convert (boolean_type_node, arg0);
8526 /* Note that the operand of this must be an int
8527 and its values must be 0 or 1.
8528 ("true" is a fixed value perhaps depending on the language,
8529 but we don't handle values other than 1 correctly yet.) */
8530 tem = fold_truth_not_expr (arg0);
8533 return fold_convert (type, tem);
8536 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8537 return fold_convert (type, arg0);
8538 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8539 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8540 TREE_OPERAND (arg0, 1));
8541 if (TREE_CODE (arg0) == COMPLEX_CST)
8542 return fold_convert (type, TREE_REALPART (arg0));
8543 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8545 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8546 tem = fold_build2 (TREE_CODE (arg0), itype,
8547 fold_build1 (REALPART_EXPR, itype,
8548 TREE_OPERAND (arg0, 0)),
8549 fold_build1 (REALPART_EXPR, itype,
8550 TREE_OPERAND (arg0, 1)));
8551 return fold_convert (type, tem);
8553 if (TREE_CODE (arg0) == CONJ_EXPR)
8555 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8556 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8557 return fold_convert (type, tem);
8559 if (TREE_CODE (arg0) == CALL_EXPR)
8561 tree fn = get_callee_fndecl (arg0);
8562 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8563 switch (DECL_FUNCTION_CODE (fn))
8565 CASE_FLT_FN (BUILT_IN_CEXPI):
8566 fn = mathfn_built_in (type, BUILT_IN_COS);
8568 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8578 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8579 return fold_convert (type, integer_zero_node);
8580 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8581 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8582 TREE_OPERAND (arg0, 0));
8583 if (TREE_CODE (arg0) == COMPLEX_CST)
8584 return fold_convert (type, TREE_IMAGPART (arg0));
8585 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8587 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8588 tem = fold_build2 (TREE_CODE (arg0), itype,
8589 fold_build1 (IMAGPART_EXPR, itype,
8590 TREE_OPERAND (arg0, 0)),
8591 fold_build1 (IMAGPART_EXPR, itype,
8592 TREE_OPERAND (arg0, 1)));
8593 return fold_convert (type, tem);
8595 if (TREE_CODE (arg0) == CONJ_EXPR)
8597 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8598 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8599 return fold_convert (type, negate_expr (tem));
8601 if (TREE_CODE (arg0) == CALL_EXPR)
8603 tree fn = get_callee_fndecl (arg0);
8604 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8605 switch (DECL_FUNCTION_CODE (fn))
8607 CASE_FLT_FN (BUILT_IN_CEXPI):
8608 fn = mathfn_built_in (type, BUILT_IN_SIN);
8610 return build_call_expr (fn, 1, CALL_EXPR_ARG (arg0, 0));
8621 } /* switch (code) */
8625 /* If the operation was a conversion do _not_ mark a resulting constant
8626 with TREE_OVERFLOW if the original constant was not. These conversions
8627 have implementation defined behavior and retaining the TREE_OVERFLOW
8628 flag here would confuse later passes such as VRP. */
8630 fold_unary_ignore_overflow (enum tree_code code, tree type, tree op0)
8632 tree res = fold_unary (code, type, op0);
8634 && TREE_CODE (res) == INTEGER_CST
8635 && TREE_CODE (op0) == INTEGER_CST
8636 && CONVERT_EXPR_CODE_P (code))
8637 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8642 /* Fold a binary expression of code CODE and type TYPE with operands
8643 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8644 Return the folded expression if folding is successful. Otherwise,
8645 return NULL_TREE. */
8648 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
8650 enum tree_code compl_code;
8652 if (code == MIN_EXPR)
8653 compl_code = MAX_EXPR;
8654 else if (code == MAX_EXPR)
8655 compl_code = MIN_EXPR;
8659 /* MIN (MAX (a, b), b) == b. */
8660 if (TREE_CODE (op0) == compl_code
8661 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8662 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
8664 /* MIN (MAX (b, a), b) == b. */
8665 if (TREE_CODE (op0) == compl_code
8666 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8667 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8668 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
8670 /* MIN (a, MAX (a, b)) == a. */
8671 if (TREE_CODE (op1) == compl_code
8672 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8673 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8674 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
8676 /* MIN (a, MAX (b, a)) == a. */
8677 if (TREE_CODE (op1) == compl_code
8678 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8679 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8680 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
8685 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8686 by changing CODE to reduce the magnitude of constants involved in
8687 ARG0 of the comparison.
8688 Returns a canonicalized comparison tree if a simplification was
8689 possible, otherwise returns NULL_TREE.
8690 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8691 valid if signed overflow is undefined. */
8694 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
8695 tree arg0, tree arg1,
8696 bool *strict_overflow_p)
8698 enum tree_code code0 = TREE_CODE (arg0);
8699 tree t, cst0 = NULL_TREE;
8703 /* Match A +- CST code arg1 and CST code arg1. We can change the
8704 first form only if overflow is undefined. */
8705 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8706 /* In principle pointers also have undefined overflow behavior,
8707 but that causes problems elsewhere. */
8708 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8709 && (code0 == MINUS_EXPR
8710 || code0 == PLUS_EXPR)
8711 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8712 || code0 == INTEGER_CST))
8715 /* Identify the constant in arg0 and its sign. */
8716 if (code0 == INTEGER_CST)
8719 cst0 = TREE_OPERAND (arg0, 1);
8720 sgn0 = tree_int_cst_sgn (cst0);
8722 /* Overflowed constants and zero will cause problems. */
8723 if (integer_zerop (cst0)
8724 || TREE_OVERFLOW (cst0))
8727 /* See if we can reduce the magnitude of the constant in
8728 arg0 by changing the comparison code. */
8729 if (code0 == INTEGER_CST)
8731 /* CST <= arg1 -> CST-1 < arg1. */
8732 if (code == LE_EXPR && sgn0 == 1)
8734 /* -CST < arg1 -> -CST-1 <= arg1. */
8735 else if (code == LT_EXPR && sgn0 == -1)
8737 /* CST > arg1 -> CST-1 >= arg1. */
8738 else if (code == GT_EXPR && sgn0 == 1)
8740 /* -CST >= arg1 -> -CST-1 > arg1. */
8741 else if (code == GE_EXPR && sgn0 == -1)
8745 /* arg1 code' CST' might be more canonical. */
8750 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8752 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8754 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8755 else if (code == GT_EXPR
8756 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8758 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8759 else if (code == LE_EXPR
8760 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8762 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8763 else if (code == GE_EXPR
8764 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8768 *strict_overflow_p = true;
8771 /* Now build the constant reduced in magnitude. But not if that
8772 would produce one outside of its types range. */
8773 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8775 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8776 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8778 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8779 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8780 /* We cannot swap the comparison here as that would cause us to
8781 endlessly recurse. */
8784 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8785 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
8786 if (code0 != INTEGER_CST)
8787 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8789 /* If swapping might yield to a more canonical form, do so. */
8791 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
8793 return fold_build2 (code, type, t, arg1);
8796 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8797 overflow further. Try to decrease the magnitude of constants involved
8798 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8799 and put sole constants at the second argument position.
8800 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8803 maybe_canonicalize_comparison (enum tree_code code, tree type,
8804 tree arg0, tree arg1)
8807 bool strict_overflow_p;
8808 const char * const warnmsg = G_("assuming signed overflow does not occur "
8809 "when reducing constant in comparison");
8811 /* Try canonicalization by simplifying arg0. */
8812 strict_overflow_p = false;
8813 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1,
8814 &strict_overflow_p);
8817 if (strict_overflow_p)
8818 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8822 /* Try canonicalization by simplifying arg1 using the swapped
8824 code = swap_tree_comparison (code);
8825 strict_overflow_p = false;
8826 t = maybe_canonicalize_comparison_1 (code, type, arg1, arg0,
8827 &strict_overflow_p);
8828 if (t && strict_overflow_p)
8829 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8833 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8834 space. This is used to avoid issuing overflow warnings for
8835 expressions like &p->x which can not wrap. */
8838 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8840 unsigned HOST_WIDE_INT offset_low, total_low;
8841 HOST_WIDE_INT size, offset_high, total_high;
8843 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8849 if (offset == NULL_TREE)
8854 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8858 offset_low = TREE_INT_CST_LOW (offset);
8859 offset_high = TREE_INT_CST_HIGH (offset);
8862 if (add_double_with_sign (offset_low, offset_high,
8863 bitpos / BITS_PER_UNIT, 0,
8864 &total_low, &total_high,
8868 if (total_high != 0)
8871 size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8875 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8877 if (TREE_CODE (base) == ADDR_EXPR)
8879 HOST_WIDE_INT base_size;
8881 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8882 if (base_size > 0 && size < base_size)
8886 return total_low > (unsigned HOST_WIDE_INT) size;
8889 /* Subroutine of fold_binary. This routine performs all of the
8890 transformations that are common to the equality/inequality
8891 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8892 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8893 fold_binary should call fold_binary. Fold a comparison with
8894 tree code CODE and type TYPE with operands OP0 and OP1. Return
8895 the folded comparison or NULL_TREE. */
8898 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
8900 tree arg0, arg1, tem;
8905 STRIP_SIGN_NOPS (arg0);
8906 STRIP_SIGN_NOPS (arg1);
8908 tem = fold_relational_const (code, type, arg0, arg1);
8909 if (tem != NULL_TREE)
8912 /* If one arg is a real or integer constant, put it last. */
8913 if (tree_swap_operands_p (arg0, arg1, true))
8914 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8916 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8917 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8918 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8919 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8920 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8921 && (TREE_CODE (arg1) == INTEGER_CST
8922 && !TREE_OVERFLOW (arg1)))
8924 tree const1 = TREE_OPERAND (arg0, 1);
8926 tree variable = TREE_OPERAND (arg0, 0);
8929 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8931 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8932 TREE_TYPE (arg1), const2, const1);
8934 /* If the constant operation overflowed this can be
8935 simplified as a comparison against INT_MAX/INT_MIN. */
8936 if (TREE_CODE (lhs) == INTEGER_CST
8937 && TREE_OVERFLOW (lhs))
8939 int const1_sgn = tree_int_cst_sgn (const1);
8940 enum tree_code code2 = code;
8942 /* Get the sign of the constant on the lhs if the
8943 operation were VARIABLE + CONST1. */
8944 if (TREE_CODE (arg0) == MINUS_EXPR)
8945 const1_sgn = -const1_sgn;
8947 /* The sign of the constant determines if we overflowed
8948 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8949 Canonicalize to the INT_MIN overflow by swapping the comparison
8951 if (const1_sgn == -1)
8952 code2 = swap_tree_comparison (code);
8954 /* We now can look at the canonicalized case
8955 VARIABLE + 1 CODE2 INT_MIN
8956 and decide on the result. */
8957 if (code2 == LT_EXPR
8959 || code2 == EQ_EXPR)
8960 return omit_one_operand (type, boolean_false_node, variable);
8961 else if (code2 == NE_EXPR
8963 || code2 == GT_EXPR)
8964 return omit_one_operand (type, boolean_true_node, variable);
8967 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8968 && (TREE_CODE (lhs) != INTEGER_CST
8969 || !TREE_OVERFLOW (lhs)))
8971 fold_overflow_warning (("assuming signed overflow does not occur "
8972 "when changing X +- C1 cmp C2 to "
8974 WARN_STRICT_OVERFLOW_COMPARISON);
8975 return fold_build2 (code, type, variable, lhs);
8979 /* For comparisons of pointers we can decompose it to a compile time
8980 comparison of the base objects and the offsets into the object.
8981 This requires at least one operand being an ADDR_EXPR or a
8982 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8983 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8984 && (TREE_CODE (arg0) == ADDR_EXPR
8985 || TREE_CODE (arg1) == ADDR_EXPR
8986 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8987 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8989 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8990 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8991 enum machine_mode mode;
8992 int volatilep, unsignedp;
8993 bool indirect_base0 = false, indirect_base1 = false;
8995 /* Get base and offset for the access. Strip ADDR_EXPR for
8996 get_inner_reference, but put it back by stripping INDIRECT_REF
8997 off the base object if possible. indirect_baseN will be true
8998 if baseN is not an address but refers to the object itself. */
9000 if (TREE_CODE (arg0) == ADDR_EXPR)
9002 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9003 &bitsize, &bitpos0, &offset0, &mode,
9004 &unsignedp, &volatilep, false);
9005 if (TREE_CODE (base0) == INDIRECT_REF)
9006 base0 = TREE_OPERAND (base0, 0);
9008 indirect_base0 = true;
9010 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9012 base0 = TREE_OPERAND (arg0, 0);
9013 offset0 = TREE_OPERAND (arg0, 1);
9017 if (TREE_CODE (arg1) == ADDR_EXPR)
9019 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9020 &bitsize, &bitpos1, &offset1, &mode,
9021 &unsignedp, &volatilep, false);
9022 if (TREE_CODE (base1) == INDIRECT_REF)
9023 base1 = TREE_OPERAND (base1, 0);
9025 indirect_base1 = true;
9027 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9029 base1 = TREE_OPERAND (arg1, 0);
9030 offset1 = TREE_OPERAND (arg1, 1);
9033 /* If we have equivalent bases we might be able to simplify. */
9034 if (indirect_base0 == indirect_base1
9035 && operand_equal_p (base0, base1, 0))
9037 /* We can fold this expression to a constant if the non-constant
9038 offset parts are equal. */
9039 if ((offset0 == offset1
9040 || (offset0 && offset1
9041 && operand_equal_p (offset0, offset1, 0)))
9044 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9049 && bitpos0 != bitpos1
9050 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9051 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9052 fold_overflow_warning (("assuming pointer wraparound does not "
9053 "occur when comparing P +- C1 with "
9055 WARN_STRICT_OVERFLOW_CONDITIONAL);
9060 return constant_boolean_node (bitpos0 == bitpos1, type);
9062 return constant_boolean_node (bitpos0 != bitpos1, type);
9064 return constant_boolean_node (bitpos0 < bitpos1, type);
9066 return constant_boolean_node (bitpos0 <= bitpos1, type);
9068 return constant_boolean_node (bitpos0 >= bitpos1, type);
9070 return constant_boolean_node (bitpos0 > bitpos1, type);
9074 /* We can simplify the comparison to a comparison of the variable
9075 offset parts if the constant offset parts are equal.
9076 Be careful to use signed size type here because otherwise we
9077 mess with array offsets in the wrong way. This is possible
9078 because pointer arithmetic is restricted to retain within an
9079 object and overflow on pointer differences is undefined as of
9080 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9081 else if (bitpos0 == bitpos1
9082 && ((code == EQ_EXPR || code == NE_EXPR)
9083 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9085 tree signed_size_type_node;
9086 signed_size_type_node = signed_type_for (size_type_node);
9088 /* By converting to signed size type we cover middle-end pointer
9089 arithmetic which operates on unsigned pointer types of size
9090 type size and ARRAY_REF offsets which are properly sign or
9091 zero extended from their type in case it is narrower than
9093 if (offset0 == NULL_TREE)
9094 offset0 = build_int_cst (signed_size_type_node, 0);
9096 offset0 = fold_convert (signed_size_type_node, offset0);
9097 if (offset1 == NULL_TREE)
9098 offset1 = build_int_cst (signed_size_type_node, 0);
9100 offset1 = fold_convert (signed_size_type_node, offset1);
9104 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9105 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9106 fold_overflow_warning (("assuming pointer wraparound does not "
9107 "occur when comparing P +- C1 with "
9109 WARN_STRICT_OVERFLOW_COMPARISON);
9111 return fold_build2 (code, type, offset0, offset1);
9114 /* For non-equal bases we can simplify if they are addresses
9115 of local binding decls or constants. */
9116 else if (indirect_base0 && indirect_base1
9117 /* We know that !operand_equal_p (base0, base1, 0)
9118 because the if condition was false. But make
9119 sure two decls are not the same. */
9121 && TREE_CODE (arg0) == ADDR_EXPR
9122 && TREE_CODE (arg1) == ADDR_EXPR
9123 && (((TREE_CODE (base0) == VAR_DECL
9124 || TREE_CODE (base0) == PARM_DECL)
9125 && (targetm.binds_local_p (base0)
9126 || CONSTANT_CLASS_P (base1)))
9127 || CONSTANT_CLASS_P (base0))
9128 && (((TREE_CODE (base1) == VAR_DECL
9129 || TREE_CODE (base1) == PARM_DECL)
9130 && (targetm.binds_local_p (base1)
9131 || CONSTANT_CLASS_P (base0)))
9132 || CONSTANT_CLASS_P (base1)))
9134 if (code == EQ_EXPR)
9135 return omit_two_operands (type, boolean_false_node, arg0, arg1);
9136 else if (code == NE_EXPR)
9137 return omit_two_operands (type, boolean_true_node, arg0, arg1);
9139 /* For equal offsets we can simplify to a comparison of the
9141 else if (bitpos0 == bitpos1
9143 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9145 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9146 && ((offset0 == offset1)
9147 || (offset0 && offset1
9148 && operand_equal_p (offset0, offset1, 0))))
9151 base0 = build_fold_addr_expr (base0);
9153 base1 = build_fold_addr_expr (base1);
9154 return fold_build2 (code, type, base0, base1);
9158 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9159 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9160 the resulting offset is smaller in absolute value than the
9162 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9163 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9164 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9165 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9166 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9167 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9168 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9170 tree const1 = TREE_OPERAND (arg0, 1);
9171 tree const2 = TREE_OPERAND (arg1, 1);
9172 tree variable1 = TREE_OPERAND (arg0, 0);
9173 tree variable2 = TREE_OPERAND (arg1, 0);
9175 const char * const warnmsg = G_("assuming signed overflow does not "
9176 "occur when combining constants around "
9179 /* Put the constant on the side where it doesn't overflow and is
9180 of lower absolute value than before. */
9181 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9182 ? MINUS_EXPR : PLUS_EXPR,
9184 if (!TREE_OVERFLOW (cst)
9185 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9187 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9188 return fold_build2 (code, type,
9190 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
9194 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9195 ? MINUS_EXPR : PLUS_EXPR,
9197 if (!TREE_OVERFLOW (cst)
9198 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9200 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9201 return fold_build2 (code, type,
9202 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
9208 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9209 signed arithmetic case. That form is created by the compiler
9210 often enough for folding it to be of value. One example is in
9211 computing loop trip counts after Operator Strength Reduction. */
9212 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9213 && TREE_CODE (arg0) == MULT_EXPR
9214 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9215 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9216 && integer_zerop (arg1))
9218 tree const1 = TREE_OPERAND (arg0, 1);
9219 tree const2 = arg1; /* zero */
9220 tree variable1 = TREE_OPERAND (arg0, 0);
9221 enum tree_code cmp_code = code;
9223 gcc_assert (!integer_zerop (const1));
9225 fold_overflow_warning (("assuming signed overflow does not occur when "
9226 "eliminating multiplication in comparison "
9228 WARN_STRICT_OVERFLOW_COMPARISON);
9230 /* If const1 is negative we swap the sense of the comparison. */
9231 if (tree_int_cst_sgn (const1) < 0)
9232 cmp_code = swap_tree_comparison (cmp_code);
9234 return fold_build2 (cmp_code, type, variable1, const2);
9237 tem = maybe_canonicalize_comparison (code, type, op0, op1);
9241 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9243 tree targ0 = strip_float_extensions (arg0);
9244 tree targ1 = strip_float_extensions (arg1);
9245 tree newtype = TREE_TYPE (targ0);
9247 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9248 newtype = TREE_TYPE (targ1);
9250 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9251 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9252 return fold_build2 (code, type, fold_convert (newtype, targ0),
9253 fold_convert (newtype, targ1));
9255 /* (-a) CMP (-b) -> b CMP a */
9256 if (TREE_CODE (arg0) == NEGATE_EXPR
9257 && TREE_CODE (arg1) == NEGATE_EXPR)
9258 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9259 TREE_OPERAND (arg0, 0));
9261 if (TREE_CODE (arg1) == REAL_CST)
9263 REAL_VALUE_TYPE cst;
9264 cst = TREE_REAL_CST (arg1);
9266 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9267 if (TREE_CODE (arg0) == NEGATE_EXPR)
9268 return fold_build2 (swap_tree_comparison (code), type,
9269 TREE_OPERAND (arg0, 0),
9270 build_real (TREE_TYPE (arg1),
9271 REAL_VALUE_NEGATE (cst)));
9273 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9274 /* a CMP (-0) -> a CMP 0 */
9275 if (REAL_VALUE_MINUS_ZERO (cst))
9276 return fold_build2 (code, type, arg0,
9277 build_real (TREE_TYPE (arg1), dconst0));
9279 /* x != NaN is always true, other ops are always false. */
9280 if (REAL_VALUE_ISNAN (cst)
9281 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9283 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9284 return omit_one_operand (type, tem, arg0);
9287 /* Fold comparisons against infinity. */
9288 if (REAL_VALUE_ISINF (cst)
9289 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9291 tem = fold_inf_compare (code, type, arg0, arg1);
9292 if (tem != NULL_TREE)
9297 /* If this is a comparison of a real constant with a PLUS_EXPR
9298 or a MINUS_EXPR of a real constant, we can convert it into a
9299 comparison with a revised real constant as long as no overflow
9300 occurs when unsafe_math_optimizations are enabled. */
9301 if (flag_unsafe_math_optimizations
9302 && TREE_CODE (arg1) == REAL_CST
9303 && (TREE_CODE (arg0) == PLUS_EXPR
9304 || TREE_CODE (arg0) == MINUS_EXPR)
9305 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9306 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9307 ? MINUS_EXPR : PLUS_EXPR,
9308 arg1, TREE_OPERAND (arg0, 1), 0))
9309 && !TREE_OVERFLOW (tem))
9310 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9312 /* Likewise, we can simplify a comparison of a real constant with
9313 a MINUS_EXPR whose first operand is also a real constant, i.e.
9314 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9315 floating-point types only if -fassociative-math is set. */
9316 if (flag_associative_math
9317 && TREE_CODE (arg1) == REAL_CST
9318 && TREE_CODE (arg0) == MINUS_EXPR
9319 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9320 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9322 && !TREE_OVERFLOW (tem))
9323 return fold_build2 (swap_tree_comparison (code), type,
9324 TREE_OPERAND (arg0, 1), tem);
9326 /* Fold comparisons against built-in math functions. */
9327 if (TREE_CODE (arg1) == REAL_CST
9328 && flag_unsafe_math_optimizations
9329 && ! flag_errno_math)
9331 enum built_in_function fcode = builtin_mathfn_code (arg0);
9333 if (fcode != END_BUILTINS)
9335 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9336 if (tem != NULL_TREE)
9342 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9343 && CONVERT_EXPR_P (arg0))
9345 /* If we are widening one operand of an integer comparison,
9346 see if the other operand is similarly being widened. Perhaps we
9347 can do the comparison in the narrower type. */
9348 tem = fold_widened_comparison (code, type, arg0, arg1);
9352 /* Or if we are changing signedness. */
9353 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9358 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9359 constant, we can simplify it. */
9360 if (TREE_CODE (arg1) == INTEGER_CST
9361 && (TREE_CODE (arg0) == MIN_EXPR
9362 || TREE_CODE (arg0) == MAX_EXPR)
9363 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9365 tem = optimize_minmax_comparison (code, type, op0, op1);
9370 /* Simplify comparison of something with itself. (For IEEE
9371 floating-point, we can only do some of these simplifications.) */
9372 if (operand_equal_p (arg0, arg1, 0))
9377 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9378 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9379 return constant_boolean_node (1, type);
9384 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9385 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9386 return constant_boolean_node (1, type);
9387 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9390 /* For NE, we can only do this simplification if integer
9391 or we don't honor IEEE floating point NaNs. */
9392 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9393 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9395 /* ... fall through ... */
9398 return constant_boolean_node (0, type);
9404 /* If we are comparing an expression that just has comparisons
9405 of two integer values, arithmetic expressions of those comparisons,
9406 and constants, we can simplify it. There are only three cases
9407 to check: the two values can either be equal, the first can be
9408 greater, or the second can be greater. Fold the expression for
9409 those three values. Since each value must be 0 or 1, we have
9410 eight possibilities, each of which corresponds to the constant 0
9411 or 1 or one of the six possible comparisons.
9413 This handles common cases like (a > b) == 0 but also handles
9414 expressions like ((x > y) - (y > x)) > 0, which supposedly
9415 occur in macroized code. */
9417 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9419 tree cval1 = 0, cval2 = 0;
9422 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9423 /* Don't handle degenerate cases here; they should already
9424 have been handled anyway. */
9425 && cval1 != 0 && cval2 != 0
9426 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9427 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9428 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9429 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9430 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9431 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9432 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9434 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9435 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9437 /* We can't just pass T to eval_subst in case cval1 or cval2
9438 was the same as ARG1. */
9441 = fold_build2 (code, type,
9442 eval_subst (arg0, cval1, maxval,
9446 = fold_build2 (code, type,
9447 eval_subst (arg0, cval1, maxval,
9451 = fold_build2 (code, type,
9452 eval_subst (arg0, cval1, minval,
9456 /* All three of these results should be 0 or 1. Confirm they are.
9457 Then use those values to select the proper code to use. */
9459 if (TREE_CODE (high_result) == INTEGER_CST
9460 && TREE_CODE (equal_result) == INTEGER_CST
9461 && TREE_CODE (low_result) == INTEGER_CST)
9463 /* Make a 3-bit mask with the high-order bit being the
9464 value for `>', the next for '=', and the low for '<'. */
9465 switch ((integer_onep (high_result) * 4)
9466 + (integer_onep (equal_result) * 2)
9467 + integer_onep (low_result))
9471 return omit_one_operand (type, integer_zero_node, arg0);
9492 return omit_one_operand (type, integer_one_node, arg0);
9496 return save_expr (build2 (code, type, cval1, cval2));
9497 return fold_build2 (code, type, cval1, cval2);
9502 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9503 into a single range test. */
9504 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9505 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9506 && TREE_CODE (arg1) == INTEGER_CST
9507 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9508 && !integer_zerop (TREE_OPERAND (arg0, 1))
9509 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9510 && !TREE_OVERFLOW (arg1))
9512 tem = fold_div_compare (code, type, arg0, arg1);
9513 if (tem != NULL_TREE)
9517 /* Fold ~X op ~Y as Y op X. */
9518 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9519 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9521 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9522 return fold_build2 (code, type,
9523 fold_convert (cmp_type, TREE_OPERAND (arg1, 0)),
9524 TREE_OPERAND (arg0, 0));
9527 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9528 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9529 && TREE_CODE (arg1) == INTEGER_CST)
9531 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9532 return fold_build2 (swap_tree_comparison (code), type,
9533 TREE_OPERAND (arg0, 0),
9534 fold_build1 (BIT_NOT_EXPR, cmp_type,
9535 fold_convert (cmp_type, arg1)));
9542 /* Subroutine of fold_binary. Optimize complex multiplications of the
9543 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9544 argument EXPR represents the expression "z" of type TYPE. */
9547 fold_mult_zconjz (tree type, tree expr)
9549 tree itype = TREE_TYPE (type);
9550 tree rpart, ipart, tem;
9552 if (TREE_CODE (expr) == COMPLEX_EXPR)
9554 rpart = TREE_OPERAND (expr, 0);
9555 ipart = TREE_OPERAND (expr, 1);
9557 else if (TREE_CODE (expr) == COMPLEX_CST)
9559 rpart = TREE_REALPART (expr);
9560 ipart = TREE_IMAGPART (expr);
9564 expr = save_expr (expr);
9565 rpart = fold_build1 (REALPART_EXPR, itype, expr);
9566 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
9569 rpart = save_expr (rpart);
9570 ipart = save_expr (ipart);
9571 tem = fold_build2 (PLUS_EXPR, itype,
9572 fold_build2 (MULT_EXPR, itype, rpart, rpart),
9573 fold_build2 (MULT_EXPR, itype, ipart, ipart));
9574 return fold_build2 (COMPLEX_EXPR, type, tem,
9575 fold_convert (itype, integer_zero_node));
9579 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9580 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9581 guarantees that P and N have the same least significant log2(M) bits.
9582 N is not otherwise constrained. In particular, N is not normalized to
9583 0 <= N < M as is common. In general, the precise value of P is unknown.
9584 M is chosen as large as possible such that constant N can be determined.
9586 Returns M and sets *RESIDUE to N.
9588 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9589 account. This is not always possible due to PR 35705.
9592 static unsigned HOST_WIDE_INT
9593 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9594 bool allow_func_align)
9596 enum tree_code code;
9600 code = TREE_CODE (expr);
9601 if (code == ADDR_EXPR)
9603 expr = TREE_OPERAND (expr, 0);
9604 if (handled_component_p (expr))
9606 HOST_WIDE_INT bitsize, bitpos;
9608 enum machine_mode mode;
9609 int unsignedp, volatilep;
9611 expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9612 &mode, &unsignedp, &volatilep, false);
9613 *residue = bitpos / BITS_PER_UNIT;
9616 if (TREE_CODE (offset) == INTEGER_CST)
9617 *residue += TREE_INT_CST_LOW (offset);
9619 /* We don't handle more complicated offset expressions. */
9625 && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
9626 return DECL_ALIGN_UNIT (expr);
9628 else if (code == POINTER_PLUS_EXPR)
9631 unsigned HOST_WIDE_INT modulus;
9632 enum tree_code inner_code;
9634 op0 = TREE_OPERAND (expr, 0);
9636 modulus = get_pointer_modulus_and_residue (op0, residue,
9639 op1 = TREE_OPERAND (expr, 1);
9641 inner_code = TREE_CODE (op1);
9642 if (inner_code == INTEGER_CST)
9644 *residue += TREE_INT_CST_LOW (op1);
9647 else if (inner_code == MULT_EXPR)
9649 op1 = TREE_OPERAND (op1, 1);
9650 if (TREE_CODE (op1) == INTEGER_CST)
9652 unsigned HOST_WIDE_INT align;
9654 /* Compute the greatest power-of-2 divisor of op1. */
9655 align = TREE_INT_CST_LOW (op1);
9658 /* If align is non-zero and less than *modulus, replace
9659 *modulus with align., If align is 0, then either op1 is 0
9660 or the greatest power-of-2 divisor of op1 doesn't fit in an
9661 unsigned HOST_WIDE_INT. In either case, no additional
9662 constraint is imposed. */
9664 modulus = MIN (modulus, align);
9671 /* If we get here, we were unable to determine anything useful about the
9677 /* Fold a binary expression of code CODE and type TYPE with operands
9678 OP0 and OP1. Return the folded expression if folding is
9679 successful. Otherwise, return NULL_TREE. */
9682 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
9684 enum tree_code_class kind = TREE_CODE_CLASS (code);
9685 tree arg0, arg1, tem;
9686 tree t1 = NULL_TREE;
9687 bool strict_overflow_p;
9689 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9690 && TREE_CODE_LENGTH (code) == 2
9692 && op1 != NULL_TREE);
9697 /* Strip any conversions that don't change the mode. This is
9698 safe for every expression, except for a comparison expression
9699 because its signedness is derived from its operands. So, in
9700 the latter case, only strip conversions that don't change the
9701 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9704 Note that this is done as an internal manipulation within the
9705 constant folder, in order to find the simplest representation
9706 of the arguments so that their form can be studied. In any
9707 cases, the appropriate type conversions should be put back in
9708 the tree that will get out of the constant folder. */
9710 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9712 STRIP_SIGN_NOPS (arg0);
9713 STRIP_SIGN_NOPS (arg1);
9721 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9722 constant but we can't do arithmetic on them. */
9723 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9724 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9725 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9726 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9727 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9728 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
9730 if (kind == tcc_binary)
9732 /* Make sure type and arg0 have the same saturating flag. */
9733 gcc_assert (TYPE_SATURATING (type)
9734 == TYPE_SATURATING (TREE_TYPE (arg0)));
9735 tem = const_binop (code, arg0, arg1, 0);
9737 else if (kind == tcc_comparison)
9738 tem = fold_relational_const (code, type, arg0, arg1);
9742 if (tem != NULL_TREE)
9744 if (TREE_TYPE (tem) != type)
9745 tem = fold_convert (type, tem);
9750 /* If this is a commutative operation, and ARG0 is a constant, move it
9751 to ARG1 to reduce the number of tests below. */
9752 if (commutative_tree_code (code)
9753 && tree_swap_operands_p (arg0, arg1, true))
9754 return fold_build2 (code, type, op1, op0);
9756 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9758 First check for cases where an arithmetic operation is applied to a
9759 compound, conditional, or comparison operation. Push the arithmetic
9760 operation inside the compound or conditional to see if any folding
9761 can then be done. Convert comparison to conditional for this purpose.
9762 The also optimizes non-constant cases that used to be done in
9765 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9766 one of the operands is a comparison and the other is a comparison, a
9767 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9768 code below would make the expression more complex. Change it to a
9769 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9770 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9772 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9773 || code == EQ_EXPR || code == NE_EXPR)
9774 && ((truth_value_p (TREE_CODE (arg0))
9775 && (truth_value_p (TREE_CODE (arg1))
9776 || (TREE_CODE (arg1) == BIT_AND_EXPR
9777 && integer_onep (TREE_OPERAND (arg1, 1)))))
9778 || (truth_value_p (TREE_CODE (arg1))
9779 && (truth_value_p (TREE_CODE (arg0))
9780 || (TREE_CODE (arg0) == BIT_AND_EXPR
9781 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9783 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9784 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9787 fold_convert (boolean_type_node, arg0),
9788 fold_convert (boolean_type_node, arg1));
9790 if (code == EQ_EXPR)
9791 tem = invert_truthvalue (tem);
9793 return fold_convert (type, tem);
9796 if (TREE_CODE_CLASS (code) == tcc_binary
9797 || TREE_CODE_CLASS (code) == tcc_comparison)
9799 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9800 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9801 fold_build2 (code, type,
9802 fold_convert (TREE_TYPE (op0),
9803 TREE_OPERAND (arg0, 1)),
9805 if (TREE_CODE (arg1) == COMPOUND_EXPR
9806 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9807 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9808 fold_build2 (code, type, op0,
9809 fold_convert (TREE_TYPE (op1),
9810 TREE_OPERAND (arg1, 1))));
9812 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
9814 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9816 /*cond_first_p=*/1);
9817 if (tem != NULL_TREE)
9821 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
9823 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
9825 /*cond_first_p=*/0);
9826 if (tem != NULL_TREE)
9833 case POINTER_PLUS_EXPR:
9834 /* 0 +p index -> (type)index */
9835 if (integer_zerop (arg0))
9836 return non_lvalue (fold_convert (type, arg1));
9838 /* PTR +p 0 -> PTR */
9839 if (integer_zerop (arg1))
9840 return non_lvalue (fold_convert (type, arg0));
9842 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9843 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9844 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9845 return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
9846 fold_convert (sizetype, arg1),
9847 fold_convert (sizetype, arg0)));
9849 /* index +p PTR -> PTR +p index */
9850 if (POINTER_TYPE_P (TREE_TYPE (arg1))
9851 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9852 return fold_build2 (POINTER_PLUS_EXPR, type,
9853 fold_convert (type, arg1),
9854 fold_convert (sizetype, arg0));
9856 /* (PTR +p B) +p A -> PTR +p (B + A) */
9857 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9860 tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
9861 tree arg00 = TREE_OPERAND (arg0, 0);
9862 inner = fold_build2 (PLUS_EXPR, sizetype,
9863 arg01, fold_convert (sizetype, arg1));
9864 return fold_convert (type,
9865 fold_build2 (POINTER_PLUS_EXPR,
9866 TREE_TYPE (arg00), arg00, inner));
9869 /* PTR_CST +p CST -> CST1 */
9870 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9871 return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
9873 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
9874 of the array. Loop optimizer sometimes produce this type of
9876 if (TREE_CODE (arg0) == ADDR_EXPR)
9878 tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
9880 return fold_convert (type, tem);
9886 /* A + (-B) -> A - B */
9887 if (TREE_CODE (arg1) == NEGATE_EXPR)
9888 return fold_build2 (MINUS_EXPR, type,
9889 fold_convert (type, arg0),
9890 fold_convert (type, TREE_OPERAND (arg1, 0)));
9891 /* (-A) + B -> B - A */
9892 if (TREE_CODE (arg0) == NEGATE_EXPR
9893 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
9894 return fold_build2 (MINUS_EXPR, type,
9895 fold_convert (type, arg1),
9896 fold_convert (type, TREE_OPERAND (arg0, 0)));
9898 if (INTEGRAL_TYPE_P (type))
9900 /* Convert ~A + 1 to -A. */
9901 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9902 && integer_onep (arg1))
9903 return fold_build1 (NEGATE_EXPR, type,
9904 fold_convert (type, TREE_OPERAND (arg0, 0)));
9907 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9908 && !TYPE_OVERFLOW_TRAPS (type))
9910 tree tem = TREE_OPERAND (arg0, 0);
9913 if (operand_equal_p (tem, arg1, 0))
9915 t1 = build_int_cst_type (type, -1);
9916 return omit_one_operand (type, t1, arg1);
9921 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9922 && !TYPE_OVERFLOW_TRAPS (type))
9924 tree tem = TREE_OPERAND (arg1, 0);
9927 if (operand_equal_p (arg0, tem, 0))
9929 t1 = build_int_cst_type (type, -1);
9930 return omit_one_operand (type, t1, arg0);
9934 /* X + (X / CST) * -CST is X % CST. */
9935 if (TREE_CODE (arg1) == MULT_EXPR
9936 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9937 && operand_equal_p (arg0,
9938 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9940 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9941 tree cst1 = TREE_OPERAND (arg1, 1);
9942 tree sum = fold_binary (PLUS_EXPR, TREE_TYPE (cst1), cst1, cst0);
9943 if (sum && integer_zerop (sum))
9944 return fold_convert (type,
9945 fold_build2 (TRUNC_MOD_EXPR,
9946 TREE_TYPE (arg0), arg0, cst0));
9950 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
9951 same or one. Make sure type is not saturating.
9952 fold_plusminus_mult_expr will re-associate. */
9953 if ((TREE_CODE (arg0) == MULT_EXPR
9954 || TREE_CODE (arg1) == MULT_EXPR)
9955 && !TYPE_SATURATING (type)
9956 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9958 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9963 if (! FLOAT_TYPE_P (type))
9965 if (integer_zerop (arg1))
9966 return non_lvalue (fold_convert (type, arg0));
9968 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
9969 with a constant, and the two constants have no bits in common,
9970 we should treat this as a BIT_IOR_EXPR since this may produce more
9972 if (TREE_CODE (arg0) == BIT_AND_EXPR
9973 && TREE_CODE (arg1) == BIT_AND_EXPR
9974 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9975 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9976 && integer_zerop (const_binop (BIT_AND_EXPR,
9977 TREE_OPERAND (arg0, 1),
9978 TREE_OPERAND (arg1, 1), 0)))
9980 code = BIT_IOR_EXPR;
9984 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9985 (plus (plus (mult) (mult)) (foo)) so that we can
9986 take advantage of the factoring cases below. */
9987 if (((TREE_CODE (arg0) == PLUS_EXPR
9988 || TREE_CODE (arg0) == MINUS_EXPR)
9989 && TREE_CODE (arg1) == MULT_EXPR)
9990 || ((TREE_CODE (arg1) == PLUS_EXPR
9991 || TREE_CODE (arg1) == MINUS_EXPR)
9992 && TREE_CODE (arg0) == MULT_EXPR))
9994 tree parg0, parg1, parg, marg;
9995 enum tree_code pcode;
9997 if (TREE_CODE (arg1) == MULT_EXPR)
9998 parg = arg0, marg = arg1;
10000 parg = arg1, marg = arg0;
10001 pcode = TREE_CODE (parg);
10002 parg0 = TREE_OPERAND (parg, 0);
10003 parg1 = TREE_OPERAND (parg, 1);
10004 STRIP_NOPS (parg0);
10005 STRIP_NOPS (parg1);
10007 if (TREE_CODE (parg0) == MULT_EXPR
10008 && TREE_CODE (parg1) != MULT_EXPR)
10009 return fold_build2 (pcode, type,
10010 fold_build2 (PLUS_EXPR, type,
10011 fold_convert (type, parg0),
10012 fold_convert (type, marg)),
10013 fold_convert (type, parg1));
10014 if (TREE_CODE (parg0) != MULT_EXPR
10015 && TREE_CODE (parg1) == MULT_EXPR)
10016 return fold_build2 (PLUS_EXPR, type,
10017 fold_convert (type, parg0),
10018 fold_build2 (pcode, type,
10019 fold_convert (type, marg),
10020 fold_convert (type,
10026 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10027 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10028 return non_lvalue (fold_convert (type, arg0));
10030 /* Likewise if the operands are reversed. */
10031 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10032 return non_lvalue (fold_convert (type, arg1));
10034 /* Convert X + -C into X - C. */
10035 if (TREE_CODE (arg1) == REAL_CST
10036 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10038 tem = fold_negate_const (arg1, type);
10039 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10040 return fold_build2 (MINUS_EXPR, type,
10041 fold_convert (type, arg0),
10042 fold_convert (type, tem));
10045 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10046 to __complex__ ( x, y ). This is not the same for SNaNs or
10047 if signed zeros are involved. */
10048 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10049 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10050 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10052 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10053 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10054 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10055 bool arg0rz = false, arg0iz = false;
10056 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10057 || (arg0i && (arg0iz = real_zerop (arg0i))))
10059 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10060 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10061 if (arg0rz && arg1i && real_zerop (arg1i))
10063 tree rp = arg1r ? arg1r
10064 : build1 (REALPART_EXPR, rtype, arg1);
10065 tree ip = arg0i ? arg0i
10066 : build1 (IMAGPART_EXPR, rtype, arg0);
10067 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10069 else if (arg0iz && arg1r && real_zerop (arg1r))
10071 tree rp = arg0r ? arg0r
10072 : build1 (REALPART_EXPR, rtype, arg0);
10073 tree ip = arg1i ? arg1i
10074 : build1 (IMAGPART_EXPR, rtype, arg1);
10075 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10080 if (flag_unsafe_math_optimizations
10081 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10082 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10083 && (tem = distribute_real_division (code, type, arg0, arg1)))
10086 /* Convert x+x into x*2.0. */
10087 if (operand_equal_p (arg0, arg1, 0)
10088 && SCALAR_FLOAT_TYPE_P (type))
10089 return fold_build2 (MULT_EXPR, type, arg0,
10090 build_real (type, dconst2));
10092 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10093 We associate floats only if the user has specified
10094 -fassociative-math. */
10095 if (flag_associative_math
10096 && TREE_CODE (arg1) == PLUS_EXPR
10097 && TREE_CODE (arg0) != MULT_EXPR)
10099 tree tree10 = TREE_OPERAND (arg1, 0);
10100 tree tree11 = TREE_OPERAND (arg1, 1);
10101 if (TREE_CODE (tree11) == MULT_EXPR
10102 && TREE_CODE (tree10) == MULT_EXPR)
10105 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
10106 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
10109 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10110 We associate floats only if the user has specified
10111 -fassociative-math. */
10112 if (flag_associative_math
10113 && TREE_CODE (arg0) == PLUS_EXPR
10114 && TREE_CODE (arg1) != MULT_EXPR)
10116 tree tree00 = TREE_OPERAND (arg0, 0);
10117 tree tree01 = TREE_OPERAND (arg0, 1);
10118 if (TREE_CODE (tree01) == MULT_EXPR
10119 && TREE_CODE (tree00) == MULT_EXPR)
10122 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
10123 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
10129 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10130 is a rotate of A by C1 bits. */
10131 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10132 is a rotate of A by B bits. */
10134 enum tree_code code0, code1;
10136 code0 = TREE_CODE (arg0);
10137 code1 = TREE_CODE (arg1);
10138 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10139 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10140 && operand_equal_p (TREE_OPERAND (arg0, 0),
10141 TREE_OPERAND (arg1, 0), 0)
10142 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10143 TYPE_UNSIGNED (rtype))
10144 /* Only create rotates in complete modes. Other cases are not
10145 expanded properly. */
10146 && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10148 tree tree01, tree11;
10149 enum tree_code code01, code11;
10151 tree01 = TREE_OPERAND (arg0, 1);
10152 tree11 = TREE_OPERAND (arg1, 1);
10153 STRIP_NOPS (tree01);
10154 STRIP_NOPS (tree11);
10155 code01 = TREE_CODE (tree01);
10156 code11 = TREE_CODE (tree11);
10157 if (code01 == INTEGER_CST
10158 && code11 == INTEGER_CST
10159 && TREE_INT_CST_HIGH (tree01) == 0
10160 && TREE_INT_CST_HIGH (tree11) == 0
10161 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10162 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10163 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
10164 code0 == LSHIFT_EXPR ? tree01 : tree11);
10165 else if (code11 == MINUS_EXPR)
10167 tree tree110, tree111;
10168 tree110 = TREE_OPERAND (tree11, 0);
10169 tree111 = TREE_OPERAND (tree11, 1);
10170 STRIP_NOPS (tree110);
10171 STRIP_NOPS (tree111);
10172 if (TREE_CODE (tree110) == INTEGER_CST
10173 && 0 == compare_tree_int (tree110,
10175 (TREE_TYPE (TREE_OPERAND
10177 && operand_equal_p (tree01, tree111, 0))
10178 return build2 ((code0 == LSHIFT_EXPR
10181 type, TREE_OPERAND (arg0, 0), tree01);
10183 else if (code01 == MINUS_EXPR)
10185 tree tree010, tree011;
10186 tree010 = TREE_OPERAND (tree01, 0);
10187 tree011 = TREE_OPERAND (tree01, 1);
10188 STRIP_NOPS (tree010);
10189 STRIP_NOPS (tree011);
10190 if (TREE_CODE (tree010) == INTEGER_CST
10191 && 0 == compare_tree_int (tree010,
10193 (TREE_TYPE (TREE_OPERAND
10195 && operand_equal_p (tree11, tree011, 0))
10196 return build2 ((code0 != LSHIFT_EXPR
10199 type, TREE_OPERAND (arg0, 0), tree11);
10205 /* In most languages, can't associate operations on floats through
10206 parentheses. Rather than remember where the parentheses were, we
10207 don't associate floats at all, unless the user has specified
10208 -fassociative-math.
10209 And, we need to make sure type is not saturating. */
10211 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10212 && !TYPE_SATURATING (type))
10214 tree var0, con0, lit0, minus_lit0;
10215 tree var1, con1, lit1, minus_lit1;
10218 /* Split both trees into variables, constants, and literals. Then
10219 associate each group together, the constants with literals,
10220 then the result with variables. This increases the chances of
10221 literals being recombined later and of generating relocatable
10222 expressions for the sum of a constant and literal. */
10223 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10224 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10225 code == MINUS_EXPR);
10227 /* With undefined overflow we can only associate constants
10228 with one variable. */
10229 if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10230 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10236 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10237 tmp0 = TREE_OPERAND (tmp0, 0);
10238 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10239 tmp1 = TREE_OPERAND (tmp1, 0);
10240 /* The only case we can still associate with two variables
10241 is if they are the same, modulo negation. */
10242 if (!operand_equal_p (tmp0, tmp1, 0))
10246 /* Only do something if we found more than two objects. Otherwise,
10247 nothing has changed and we risk infinite recursion. */
10249 && (2 < ((var0 != 0) + (var1 != 0)
10250 + (con0 != 0) + (con1 != 0)
10251 + (lit0 != 0) + (lit1 != 0)
10252 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10254 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10255 if (code == MINUS_EXPR)
10258 var0 = associate_trees (var0, var1, code, type);
10259 con0 = associate_trees (con0, con1, code, type);
10260 lit0 = associate_trees (lit0, lit1, code, type);
10261 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
10263 /* Preserve the MINUS_EXPR if the negative part of the literal is
10264 greater than the positive part. Otherwise, the multiplicative
10265 folding code (i.e extract_muldiv) may be fooled in case
10266 unsigned constants are subtracted, like in the following
10267 example: ((X*2 + 4) - 8U)/2. */
10268 if (minus_lit0 && lit0)
10270 if (TREE_CODE (lit0) == INTEGER_CST
10271 && TREE_CODE (minus_lit0) == INTEGER_CST
10272 && tree_int_cst_lt (lit0, minus_lit0))
10274 minus_lit0 = associate_trees (minus_lit0, lit0,
10280 lit0 = associate_trees (lit0, minus_lit0,
10288 return fold_convert (type,
10289 associate_trees (var0, minus_lit0,
10290 MINUS_EXPR, type));
10293 con0 = associate_trees (con0, minus_lit0,
10295 return fold_convert (type,
10296 associate_trees (var0, con0,
10301 con0 = associate_trees (con0, lit0, code, type);
10302 return fold_convert (type, associate_trees (var0, con0,
10310 /* Pointer simplifications for subtraction, simple reassociations. */
10311 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10313 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10314 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10315 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10317 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10318 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10319 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10320 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10321 return fold_build2 (PLUS_EXPR, type,
10322 fold_build2 (MINUS_EXPR, type, arg00, arg10),
10323 fold_build2 (MINUS_EXPR, type, arg01, arg11));
10325 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10326 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10328 tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10329 tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
10330 tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
10332 return fold_build2 (PLUS_EXPR, type, tmp, arg01);
10335 /* A - (-B) -> A + B */
10336 if (TREE_CODE (arg1) == NEGATE_EXPR)
10337 return fold_build2 (PLUS_EXPR, type, op0,
10338 fold_convert (type, TREE_OPERAND (arg1, 0)));
10339 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10340 if (TREE_CODE (arg0) == NEGATE_EXPR
10341 && (FLOAT_TYPE_P (type)
10342 || INTEGRAL_TYPE_P (type))
10343 && negate_expr_p (arg1)
10344 && reorder_operands_p (arg0, arg1))
10345 return fold_build2 (MINUS_EXPR, type,
10346 fold_convert (type, negate_expr (arg1)),
10347 fold_convert (type, TREE_OPERAND (arg0, 0)));
10348 /* Convert -A - 1 to ~A. */
10349 if (INTEGRAL_TYPE_P (type)
10350 && TREE_CODE (arg0) == NEGATE_EXPR
10351 && integer_onep (arg1)
10352 && !TYPE_OVERFLOW_TRAPS (type))
10353 return fold_build1 (BIT_NOT_EXPR, type,
10354 fold_convert (type, TREE_OPERAND (arg0, 0)));
10356 /* Convert -1 - A to ~A. */
10357 if (INTEGRAL_TYPE_P (type)
10358 && integer_all_onesp (arg0))
10359 return fold_build1 (BIT_NOT_EXPR, type, op1);
10362 /* X - (X / CST) * CST is X % CST. */
10363 if (INTEGRAL_TYPE_P (type)
10364 && TREE_CODE (arg1) == MULT_EXPR
10365 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10366 && operand_equal_p (arg0,
10367 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10368 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10369 TREE_OPERAND (arg1, 1), 0))
10370 return fold_convert (type,
10371 fold_build2 (TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10372 arg0, TREE_OPERAND (arg1, 1)));
10374 if (! FLOAT_TYPE_P (type))
10376 if (integer_zerop (arg0))
10377 return negate_expr (fold_convert (type, arg1));
10378 if (integer_zerop (arg1))
10379 return non_lvalue (fold_convert (type, arg0));
10381 /* Fold A - (A & B) into ~B & A. */
10382 if (!TREE_SIDE_EFFECTS (arg0)
10383 && TREE_CODE (arg1) == BIT_AND_EXPR)
10385 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10387 tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
10388 return fold_build2 (BIT_AND_EXPR, type,
10389 fold_build1 (BIT_NOT_EXPR, type, arg10),
10390 fold_convert (type, arg0));
10392 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10394 tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
10395 return fold_build2 (BIT_AND_EXPR, type,
10396 fold_build1 (BIT_NOT_EXPR, type, arg11),
10397 fold_convert (type, arg0));
10401 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10402 any power of 2 minus 1. */
10403 if (TREE_CODE (arg0) == BIT_AND_EXPR
10404 && TREE_CODE (arg1) == BIT_AND_EXPR
10405 && operand_equal_p (TREE_OPERAND (arg0, 0),
10406 TREE_OPERAND (arg1, 0), 0))
10408 tree mask0 = TREE_OPERAND (arg0, 1);
10409 tree mask1 = TREE_OPERAND (arg1, 1);
10410 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
10412 if (operand_equal_p (tem, mask1, 0))
10414 tem = fold_build2 (BIT_XOR_EXPR, type,
10415 TREE_OPERAND (arg0, 0), mask1);
10416 return fold_build2 (MINUS_EXPR, type, tem, mask1);
10421 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10422 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10423 return non_lvalue (fold_convert (type, arg0));
10425 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10426 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10427 (-ARG1 + ARG0) reduces to -ARG1. */
10428 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10429 return negate_expr (fold_convert (type, arg1));
10431 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10432 __complex__ ( x, -y ). This is not the same for SNaNs or if
10433 signed zeros are involved. */
10434 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10435 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10436 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10438 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10439 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
10440 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
10441 bool arg0rz = false, arg0iz = false;
10442 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10443 || (arg0i && (arg0iz = real_zerop (arg0i))))
10445 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
10446 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
10447 if (arg0rz && arg1i && real_zerop (arg1i))
10449 tree rp = fold_build1 (NEGATE_EXPR, rtype,
10451 : build1 (REALPART_EXPR, rtype, arg1));
10452 tree ip = arg0i ? arg0i
10453 : build1 (IMAGPART_EXPR, rtype, arg0);
10454 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10456 else if (arg0iz && arg1r && real_zerop (arg1r))
10458 tree rp = arg0r ? arg0r
10459 : build1 (REALPART_EXPR, rtype, arg0);
10460 tree ip = fold_build1 (NEGATE_EXPR, rtype,
10462 : build1 (IMAGPART_EXPR, rtype, arg1));
10463 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
10468 /* Fold &x - &x. This can happen from &x.foo - &x.
10469 This is unsafe for certain floats even in non-IEEE formats.
10470 In IEEE, it is unsafe because it does wrong for NaNs.
10471 Also note that operand_equal_p is always false if an operand
10474 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10475 && operand_equal_p (arg0, arg1, 0))
10476 return fold_convert (type, integer_zero_node);
10478 /* A - B -> A + (-B) if B is easily negatable. */
10479 if (negate_expr_p (arg1)
10480 && ((FLOAT_TYPE_P (type)
10481 /* Avoid this transformation if B is a positive REAL_CST. */
10482 && (TREE_CODE (arg1) != REAL_CST
10483 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10484 || INTEGRAL_TYPE_P (type)))
10485 return fold_build2 (PLUS_EXPR, type,
10486 fold_convert (type, arg0),
10487 fold_convert (type, negate_expr (arg1)));
10489 /* Try folding difference of addresses. */
10491 HOST_WIDE_INT diff;
10493 if ((TREE_CODE (arg0) == ADDR_EXPR
10494 || TREE_CODE (arg1) == ADDR_EXPR)
10495 && ptr_difference_const (arg0, arg1, &diff))
10496 return build_int_cst_type (type, diff);
10499 /* Fold &a[i] - &a[j] to i-j. */
10500 if (TREE_CODE (arg0) == ADDR_EXPR
10501 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10502 && TREE_CODE (arg1) == ADDR_EXPR
10503 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10505 tree aref0 = TREE_OPERAND (arg0, 0);
10506 tree aref1 = TREE_OPERAND (arg1, 0);
10507 if (operand_equal_p (TREE_OPERAND (aref0, 0),
10508 TREE_OPERAND (aref1, 0), 0))
10510 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
10511 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
10512 tree esz = array_ref_element_size (aref0);
10513 tree diff = build2 (MINUS_EXPR, type, op0, op1);
10514 return fold_build2 (MULT_EXPR, type, diff,
10515 fold_convert (type, esz));
10520 if (flag_unsafe_math_optimizations
10521 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10522 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10523 && (tem = distribute_real_division (code, type, arg0, arg1)))
10526 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10527 same or one. Make sure type is not saturating.
10528 fold_plusminus_mult_expr will re-associate. */
10529 if ((TREE_CODE (arg0) == MULT_EXPR
10530 || TREE_CODE (arg1) == MULT_EXPR)
10531 && !TYPE_SATURATING (type)
10532 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10534 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
10542 /* (-A) * (-B) -> A * B */
10543 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10544 return fold_build2 (MULT_EXPR, type,
10545 fold_convert (type, TREE_OPERAND (arg0, 0)),
10546 fold_convert (type, negate_expr (arg1)));
10547 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10548 return fold_build2 (MULT_EXPR, type,
10549 fold_convert (type, negate_expr (arg0)),
10550 fold_convert (type, TREE_OPERAND (arg1, 0)));
10552 if (! FLOAT_TYPE_P (type))
10554 if (integer_zerop (arg1))
10555 return omit_one_operand (type, arg1, arg0);
10556 if (integer_onep (arg1))
10557 return non_lvalue (fold_convert (type, arg0));
10558 /* Transform x * -1 into -x. Make sure to do the negation
10559 on the original operand with conversions not stripped
10560 because we can only strip non-sign-changing conversions. */
10561 if (integer_all_onesp (arg1))
10562 return fold_convert (type, negate_expr (op0));
10563 /* Transform x * -C into -x * C if x is easily negatable. */
10564 if (TREE_CODE (arg1) == INTEGER_CST
10565 && tree_int_cst_sgn (arg1) == -1
10566 && negate_expr_p (arg0)
10567 && (tem = negate_expr (arg1)) != arg1
10568 && !TREE_OVERFLOW (tem))
10569 return fold_build2 (MULT_EXPR, type,
10570 fold_convert (type, negate_expr (arg0)), tem);
10572 /* (a * (1 << b)) is (a << b) */
10573 if (TREE_CODE (arg1) == LSHIFT_EXPR
10574 && integer_onep (TREE_OPERAND (arg1, 0)))
10575 return fold_build2 (LSHIFT_EXPR, type, op0,
10576 TREE_OPERAND (arg1, 1));
10577 if (TREE_CODE (arg0) == LSHIFT_EXPR
10578 && integer_onep (TREE_OPERAND (arg0, 0)))
10579 return fold_build2 (LSHIFT_EXPR, type, op1,
10580 TREE_OPERAND (arg0, 1));
10582 /* (A + A) * C -> A * 2 * C */
10583 if (TREE_CODE (arg0) == PLUS_EXPR
10584 && TREE_CODE (arg1) == INTEGER_CST
10585 && operand_equal_p (TREE_OPERAND (arg0, 0),
10586 TREE_OPERAND (arg0, 1), 0))
10587 return fold_build2 (MULT_EXPR, type,
10588 omit_one_operand (type, TREE_OPERAND (arg0, 0),
10589 TREE_OPERAND (arg0, 1)),
10590 fold_build2 (MULT_EXPR, type,
10591 build_int_cst (type, 2) , arg1));
10593 strict_overflow_p = false;
10594 if (TREE_CODE (arg1) == INTEGER_CST
10595 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10596 &strict_overflow_p)))
10598 if (strict_overflow_p)
10599 fold_overflow_warning (("assuming signed overflow does not "
10600 "occur when simplifying "
10602 WARN_STRICT_OVERFLOW_MISC);
10603 return fold_convert (type, tem);
10606 /* Optimize z * conj(z) for integer complex numbers. */
10607 if (TREE_CODE (arg0) == CONJ_EXPR
10608 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10609 return fold_mult_zconjz (type, arg1);
10610 if (TREE_CODE (arg1) == CONJ_EXPR
10611 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10612 return fold_mult_zconjz (type, arg0);
10616 /* Maybe fold x * 0 to 0. The expressions aren't the same
10617 when x is NaN, since x * 0 is also NaN. Nor are they the
10618 same in modes with signed zeros, since multiplying a
10619 negative value by 0 gives -0, not +0. */
10620 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10621 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10622 && real_zerop (arg1))
10623 return omit_one_operand (type, arg1, arg0);
10624 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
10625 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10626 && real_onep (arg1))
10627 return non_lvalue (fold_convert (type, arg0));
10629 /* Transform x * -1.0 into -x. */
10630 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10631 && real_minus_onep (arg1))
10632 return fold_convert (type, negate_expr (arg0));
10634 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10635 the result for floating point types due to rounding so it is applied
10636 only if -fassociative-math was specify. */
10637 if (flag_associative_math
10638 && TREE_CODE (arg0) == RDIV_EXPR
10639 && TREE_CODE (arg1) == REAL_CST
10640 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10642 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10645 return fold_build2 (RDIV_EXPR, type, tem,
10646 TREE_OPERAND (arg0, 1));
10649 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10650 if (operand_equal_p (arg0, arg1, 0))
10652 tree tem = fold_strip_sign_ops (arg0);
10653 if (tem != NULL_TREE)
10655 tem = fold_convert (type, tem);
10656 return fold_build2 (MULT_EXPR, type, tem, tem);
10660 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10661 This is not the same for NaNs or if signed zeros are
10663 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10664 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10665 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10666 && TREE_CODE (arg1) == COMPLEX_CST
10667 && real_zerop (TREE_REALPART (arg1)))
10669 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10670 if (real_onep (TREE_IMAGPART (arg1)))
10671 return fold_build2 (COMPLEX_EXPR, type,
10672 negate_expr (fold_build1 (IMAGPART_EXPR,
10674 fold_build1 (REALPART_EXPR, rtype, arg0));
10675 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10676 return fold_build2 (COMPLEX_EXPR, type,
10677 fold_build1 (IMAGPART_EXPR, rtype, arg0),
10678 negate_expr (fold_build1 (REALPART_EXPR,
10682 /* Optimize z * conj(z) for floating point complex numbers.
10683 Guarded by flag_unsafe_math_optimizations as non-finite
10684 imaginary components don't produce scalar results. */
10685 if (flag_unsafe_math_optimizations
10686 && TREE_CODE (arg0) == CONJ_EXPR
10687 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10688 return fold_mult_zconjz (type, arg1);
10689 if (flag_unsafe_math_optimizations
10690 && TREE_CODE (arg1) == CONJ_EXPR
10691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10692 return fold_mult_zconjz (type, arg0);
10694 if (flag_unsafe_math_optimizations)
10696 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10697 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10699 /* Optimizations of root(...)*root(...). */
10700 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10703 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10704 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10706 /* Optimize sqrt(x)*sqrt(x) as x. */
10707 if (BUILTIN_SQRT_P (fcode0)
10708 && operand_equal_p (arg00, arg10, 0)
10709 && ! HONOR_SNANS (TYPE_MODE (type)))
10712 /* Optimize root(x)*root(y) as root(x*y). */
10713 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10714 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10715 return build_call_expr (rootfn, 1, arg);
10718 /* Optimize expN(x)*expN(y) as expN(x+y). */
10719 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10721 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10722 tree arg = fold_build2 (PLUS_EXPR, type,
10723 CALL_EXPR_ARG (arg0, 0),
10724 CALL_EXPR_ARG (arg1, 0));
10725 return build_call_expr (expfn, 1, arg);
10728 /* Optimizations of pow(...)*pow(...). */
10729 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10730 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10731 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10733 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10734 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10735 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10736 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10738 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10739 if (operand_equal_p (arg01, arg11, 0))
10741 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10742 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
10743 return build_call_expr (powfn, 2, arg, arg01);
10746 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10747 if (operand_equal_p (arg00, arg10, 0))
10749 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10750 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
10751 return build_call_expr (powfn, 2, arg00, arg);
10755 /* Optimize tan(x)*cos(x) as sin(x). */
10756 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10757 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10758 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10759 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10760 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10761 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10762 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10763 CALL_EXPR_ARG (arg1, 0), 0))
10765 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10767 if (sinfn != NULL_TREE)
10768 return build_call_expr (sinfn, 1, CALL_EXPR_ARG (arg0, 0));
10771 /* Optimize x*pow(x,c) as pow(x,c+1). */
10772 if (fcode1 == BUILT_IN_POW
10773 || fcode1 == BUILT_IN_POWF
10774 || fcode1 == BUILT_IN_POWL)
10776 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10777 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10778 if (TREE_CODE (arg11) == REAL_CST
10779 && !TREE_OVERFLOW (arg11)
10780 && operand_equal_p (arg0, arg10, 0))
10782 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10786 c = TREE_REAL_CST (arg11);
10787 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10788 arg = build_real (type, c);
10789 return build_call_expr (powfn, 2, arg0, arg);
10793 /* Optimize pow(x,c)*x as pow(x,c+1). */
10794 if (fcode0 == BUILT_IN_POW
10795 || fcode0 == BUILT_IN_POWF
10796 || fcode0 == BUILT_IN_POWL)
10798 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10799 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10800 if (TREE_CODE (arg01) == REAL_CST
10801 && !TREE_OVERFLOW (arg01)
10802 && operand_equal_p (arg1, arg00, 0))
10804 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10808 c = TREE_REAL_CST (arg01);
10809 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10810 arg = build_real (type, c);
10811 return build_call_expr (powfn, 2, arg1, arg);
10815 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
10816 if (optimize_function_for_speed_p (cfun)
10817 && operand_equal_p (arg0, arg1, 0))
10819 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10823 tree arg = build_real (type, dconst2);
10824 return build_call_expr (powfn, 2, arg0, arg);
10833 if (integer_all_onesp (arg1))
10834 return omit_one_operand (type, arg1, arg0);
10835 if (integer_zerop (arg1))
10836 return non_lvalue (fold_convert (type, arg0));
10837 if (operand_equal_p (arg0, arg1, 0))
10838 return non_lvalue (fold_convert (type, arg0));
10840 /* ~X | X is -1. */
10841 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10842 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10844 t1 = fold_convert (type, integer_zero_node);
10845 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10846 return omit_one_operand (type, t1, arg1);
10849 /* X | ~X is -1. */
10850 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10851 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10853 t1 = fold_convert (type, integer_zero_node);
10854 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10855 return omit_one_operand (type, t1, arg0);
10858 /* Canonicalize (X & C1) | C2. */
10859 if (TREE_CODE (arg0) == BIT_AND_EXPR
10860 && TREE_CODE (arg1) == INTEGER_CST
10861 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10863 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
10864 int width = TYPE_PRECISION (type), w;
10865 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
10866 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10867 hi2 = TREE_INT_CST_HIGH (arg1);
10868 lo2 = TREE_INT_CST_LOW (arg1);
10870 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10871 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
10872 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10874 if (width > HOST_BITS_PER_WIDE_INT)
10876 mhi = (unsigned HOST_WIDE_INT) -1
10877 >> (2 * HOST_BITS_PER_WIDE_INT - width);
10883 mlo = (unsigned HOST_WIDE_INT) -1
10884 >> (HOST_BITS_PER_WIDE_INT - width);
10887 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10888 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
10889 return fold_build2 (BIT_IOR_EXPR, type,
10890 TREE_OPERAND (arg0, 0), arg1);
10892 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10893 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10894 mode which allows further optimizations. */
10901 for (w = BITS_PER_UNIT;
10902 w <= width && w <= HOST_BITS_PER_WIDE_INT;
10905 unsigned HOST_WIDE_INT mask
10906 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
10907 if (((lo1 | lo2) & mask) == mask
10908 && (lo1 & ~mask) == 0 && hi1 == 0)
10915 if (hi3 != hi1 || lo3 != lo1)
10916 return fold_build2 (BIT_IOR_EXPR, type,
10917 fold_build2 (BIT_AND_EXPR, type,
10918 TREE_OPERAND (arg0, 0),
10919 build_int_cst_wide (type,
10924 /* (X & Y) | Y is (X, Y). */
10925 if (TREE_CODE (arg0) == BIT_AND_EXPR
10926 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10927 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
10928 /* (X & Y) | X is (Y, X). */
10929 if (TREE_CODE (arg0) == BIT_AND_EXPR
10930 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10931 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10932 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
10933 /* X | (X & Y) is (Y, X). */
10934 if (TREE_CODE (arg1) == BIT_AND_EXPR
10935 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
10936 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
10937 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
10938 /* X | (Y & X) is (Y, X). */
10939 if (TREE_CODE (arg1) == BIT_AND_EXPR
10940 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10941 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10942 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
10944 t1 = distribute_bit_expr (code, type, arg0, arg1);
10945 if (t1 != NULL_TREE)
10948 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
10950 This results in more efficient code for machines without a NAND
10951 instruction. Combine will canonicalize to the first form
10952 which will allow use of NAND instructions provided by the
10953 backend if they exist. */
10954 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10955 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10957 return fold_build1 (BIT_NOT_EXPR, type,
10958 build2 (BIT_AND_EXPR, type,
10959 fold_convert (type,
10960 TREE_OPERAND (arg0, 0)),
10961 fold_convert (type,
10962 TREE_OPERAND (arg1, 0))));
10965 /* See if this can be simplified into a rotate first. If that
10966 is unsuccessful continue in the association code. */
10970 if (integer_zerop (arg1))
10971 return non_lvalue (fold_convert (type, arg0));
10972 if (integer_all_onesp (arg1))
10973 return fold_build1 (BIT_NOT_EXPR, type, op0);
10974 if (operand_equal_p (arg0, arg1, 0))
10975 return omit_one_operand (type, integer_zero_node, arg0);
10977 /* ~X ^ X is -1. */
10978 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10979 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10981 t1 = fold_convert (type, integer_zero_node);
10982 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10983 return omit_one_operand (type, t1, arg1);
10986 /* X ^ ~X is -1. */
10987 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10988 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10990 t1 = fold_convert (type, integer_zero_node);
10991 t1 = fold_unary (BIT_NOT_EXPR, type, t1);
10992 return omit_one_operand (type, t1, arg0);
10995 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
10996 with a constant, and the two constants have no bits in common,
10997 we should treat this as a BIT_IOR_EXPR since this may produce more
10998 simplifications. */
10999 if (TREE_CODE (arg0) == BIT_AND_EXPR
11000 && TREE_CODE (arg1) == BIT_AND_EXPR
11001 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11002 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11003 && integer_zerop (const_binop (BIT_AND_EXPR,
11004 TREE_OPERAND (arg0, 1),
11005 TREE_OPERAND (arg1, 1), 0)))
11007 code = BIT_IOR_EXPR;
11011 /* (X | Y) ^ X -> Y & ~ X*/
11012 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11013 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11015 tree t2 = TREE_OPERAND (arg0, 1);
11016 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11018 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11019 fold_convert (type, t1));
11023 /* (Y | X) ^ X -> Y & ~ X*/
11024 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11025 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11027 tree t2 = TREE_OPERAND (arg0, 0);
11028 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
11030 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11031 fold_convert (type, t1));
11035 /* X ^ (X | Y) -> Y & ~ X*/
11036 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11037 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11039 tree t2 = TREE_OPERAND (arg1, 1);
11040 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11042 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11043 fold_convert (type, t1));
11047 /* X ^ (Y | X) -> Y & ~ X*/
11048 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11049 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11051 tree t2 = TREE_OPERAND (arg1, 0);
11052 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
11054 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
11055 fold_convert (type, t1));
11059 /* Convert ~X ^ ~Y to X ^ Y. */
11060 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11061 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11062 return fold_build2 (code, type,
11063 fold_convert (type, TREE_OPERAND (arg0, 0)),
11064 fold_convert (type, TREE_OPERAND (arg1, 0)));
11066 /* Convert ~X ^ C to X ^ ~C. */
11067 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11068 && TREE_CODE (arg1) == INTEGER_CST)
11069 return fold_build2 (code, type,
11070 fold_convert (type, TREE_OPERAND (arg0, 0)),
11071 fold_build1 (BIT_NOT_EXPR, type, arg1));
11073 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11074 if (TREE_CODE (arg0) == BIT_AND_EXPR
11075 && integer_onep (TREE_OPERAND (arg0, 1))
11076 && integer_onep (arg1))
11077 return fold_build2 (EQ_EXPR, type, arg0,
11078 build_int_cst (TREE_TYPE (arg0), 0));
11080 /* Fold (X & Y) ^ Y as ~X & Y. */
11081 if (TREE_CODE (arg0) == BIT_AND_EXPR
11082 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11084 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11085 return fold_build2 (BIT_AND_EXPR, type,
11086 fold_build1 (BIT_NOT_EXPR, type, tem),
11087 fold_convert (type, arg1));
11089 /* Fold (X & Y) ^ X as ~Y & X. */
11090 if (TREE_CODE (arg0) == BIT_AND_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11092 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11094 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11095 return fold_build2 (BIT_AND_EXPR, type,
11096 fold_build1 (BIT_NOT_EXPR, type, tem),
11097 fold_convert (type, arg1));
11099 /* Fold X ^ (X & Y) as X & ~Y. */
11100 if (TREE_CODE (arg1) == BIT_AND_EXPR
11101 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11103 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11104 return fold_build2 (BIT_AND_EXPR, type,
11105 fold_convert (type, arg0),
11106 fold_build1 (BIT_NOT_EXPR, type, tem));
11108 /* Fold X ^ (Y & X) as ~Y & X. */
11109 if (TREE_CODE (arg1) == BIT_AND_EXPR
11110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11111 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11113 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11114 return fold_build2 (BIT_AND_EXPR, type,
11115 fold_build1 (BIT_NOT_EXPR, type, tem),
11116 fold_convert (type, arg0));
11119 /* See if this can be simplified into a rotate first. If that
11120 is unsuccessful continue in the association code. */
11124 if (integer_all_onesp (arg1))
11125 return non_lvalue (fold_convert (type, arg0));
11126 if (integer_zerop (arg1))
11127 return omit_one_operand (type, arg1, arg0);
11128 if (operand_equal_p (arg0, arg1, 0))
11129 return non_lvalue (fold_convert (type, arg0));
11131 /* ~X & X is always zero. */
11132 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11134 return omit_one_operand (type, integer_zero_node, arg1);
11136 /* X & ~X is always zero. */
11137 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11138 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11139 return omit_one_operand (type, integer_zero_node, arg0);
11141 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11142 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11143 && TREE_CODE (arg1) == INTEGER_CST
11144 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11146 tree tmp1 = fold_convert (type, arg1);
11147 tree tmp2 = fold_convert (type, TREE_OPERAND (arg0, 0));
11148 tree tmp3 = fold_convert (type, TREE_OPERAND (arg0, 1));
11149 tmp2 = fold_build2 (BIT_AND_EXPR, type, tmp2, tmp1);
11150 tmp3 = fold_build2 (BIT_AND_EXPR, type, tmp3, tmp1);
11151 return fold_convert (type,
11152 fold_build2 (BIT_IOR_EXPR, type, tmp2, tmp3));
11155 /* (X | Y) & Y is (X, Y). */
11156 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11157 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11158 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
11159 /* (X | Y) & X is (Y, X). */
11160 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11161 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11162 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11163 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
11164 /* X & (X | Y) is (Y, X). */
11165 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11166 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11167 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11168 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
11169 /* X & (Y | X) is (Y, X). */
11170 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11171 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11172 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11173 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
11175 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11176 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11177 && integer_onep (TREE_OPERAND (arg0, 1))
11178 && integer_onep (arg1))
11180 tem = TREE_OPERAND (arg0, 0);
11181 return fold_build2 (EQ_EXPR, type,
11182 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11183 build_int_cst (TREE_TYPE (tem), 1)),
11184 build_int_cst (TREE_TYPE (tem), 0));
11186 /* Fold ~X & 1 as (X & 1) == 0. */
11187 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11188 && integer_onep (arg1))
11190 tem = TREE_OPERAND (arg0, 0);
11191 return fold_build2 (EQ_EXPR, type,
11192 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
11193 build_int_cst (TREE_TYPE (tem), 1)),
11194 build_int_cst (TREE_TYPE (tem), 0));
11197 /* Fold (X ^ Y) & Y as ~X & Y. */
11198 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11201 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
11202 return fold_build2 (BIT_AND_EXPR, type,
11203 fold_build1 (BIT_NOT_EXPR, type, tem),
11204 fold_convert (type, arg1));
11206 /* Fold (X ^ Y) & X as ~Y & X. */
11207 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11209 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11211 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
11212 return fold_build2 (BIT_AND_EXPR, type,
11213 fold_build1 (BIT_NOT_EXPR, type, tem),
11214 fold_convert (type, arg1));
11216 /* Fold X & (X ^ Y) as X & ~Y. */
11217 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11218 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11220 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
11221 return fold_build2 (BIT_AND_EXPR, type,
11222 fold_convert (type, arg0),
11223 fold_build1 (BIT_NOT_EXPR, type, tem));
11225 /* Fold X & (Y ^ X) as ~Y & X. */
11226 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11227 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11228 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11230 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
11231 return fold_build2 (BIT_AND_EXPR, type,
11232 fold_build1 (BIT_NOT_EXPR, type, tem),
11233 fold_convert (type, arg0));
11236 t1 = distribute_bit_expr (code, type, arg0, arg1);
11237 if (t1 != NULL_TREE)
11239 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11240 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11241 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11244 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11246 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11247 && (~TREE_INT_CST_LOW (arg1)
11248 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11249 return fold_convert (type, TREE_OPERAND (arg0, 0));
11252 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11254 This results in more efficient code for machines without a NOR
11255 instruction. Combine will canonicalize to the first form
11256 which will allow use of NOR instructions provided by the
11257 backend if they exist. */
11258 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11259 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11261 return fold_build1 (BIT_NOT_EXPR, type,
11262 build2 (BIT_IOR_EXPR, type,
11263 fold_convert (type,
11264 TREE_OPERAND (arg0, 0)),
11265 fold_convert (type,
11266 TREE_OPERAND (arg1, 0))));
11269 /* If arg0 is derived from the address of an object or function, we may
11270 be able to fold this expression using the object or function's
11272 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11274 unsigned HOST_WIDE_INT modulus, residue;
11275 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11277 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11278 integer_onep (arg1));
11280 /* This works because modulus is a power of 2. If this weren't the
11281 case, we'd have to replace it by its greatest power-of-2
11282 divisor: modulus & -modulus. */
11284 return build_int_cst (type, residue & low);
11287 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11288 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11289 if the new mask might be further optimized. */
11290 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11291 || TREE_CODE (arg0) == RSHIFT_EXPR)
11292 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11293 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11294 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11295 < TYPE_PRECISION (TREE_TYPE (arg0))
11296 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11297 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11299 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11300 unsigned HOST_WIDE_INT mask
11301 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11302 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11303 tree shift_type = TREE_TYPE (arg0);
11305 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11306 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11307 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11308 && TYPE_PRECISION (TREE_TYPE (arg0))
11309 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11311 unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11312 tree arg00 = TREE_OPERAND (arg0, 0);
11313 /* See if more bits can be proven as zero because of
11315 if (TREE_CODE (arg00) == NOP_EXPR
11316 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11318 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11319 if (TYPE_PRECISION (inner_type)
11320 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11321 && TYPE_PRECISION (inner_type) < prec)
11323 prec = TYPE_PRECISION (inner_type);
11324 /* See if we can shorten the right shift. */
11326 shift_type = inner_type;
11329 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11330 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11331 zerobits <<= prec - shiftc;
11332 /* For arithmetic shift if sign bit could be set, zerobits
11333 can contain actually sign bits, so no transformation is
11334 possible, unless MASK masks them all away. In that
11335 case the shift needs to be converted into logical shift. */
11336 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11337 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11339 if ((mask & zerobits) == 0)
11340 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11346 /* ((X << 16) & 0xff00) is (X, 0). */
11347 if ((mask & zerobits) == mask)
11348 return omit_one_operand (type, build_int_cst (type, 0), arg0);
11350 newmask = mask | zerobits;
11351 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11355 /* Only do the transformation if NEWMASK is some integer
11357 for (prec = BITS_PER_UNIT;
11358 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11359 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11361 if (prec < HOST_BITS_PER_WIDE_INT
11362 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11364 if (shift_type != TREE_TYPE (arg0))
11366 tem = fold_build2 (TREE_CODE (arg0), shift_type,
11367 fold_convert (shift_type,
11368 TREE_OPERAND (arg0, 0)),
11369 TREE_OPERAND (arg0, 1));
11370 tem = fold_convert (type, tem);
11374 return fold_build2 (BIT_AND_EXPR, type, tem,
11375 build_int_cst_type (TREE_TYPE (op1),
11384 /* Don't touch a floating-point divide by zero unless the mode
11385 of the constant can represent infinity. */
11386 if (TREE_CODE (arg1) == REAL_CST
11387 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11388 && real_zerop (arg1))
11391 /* Optimize A / A to 1.0 if we don't care about
11392 NaNs or Infinities. Skip the transformation
11393 for non-real operands. */
11394 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11395 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11396 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11397 && operand_equal_p (arg0, arg1, 0))
11399 tree r = build_real (TREE_TYPE (arg0), dconst1);
11401 return omit_two_operands (type, r, arg0, arg1);
11404 /* The complex version of the above A / A optimization. */
11405 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11406 && operand_equal_p (arg0, arg1, 0))
11408 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11409 if (! HONOR_NANS (TYPE_MODE (elem_type))
11410 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11412 tree r = build_real (elem_type, dconst1);
11413 /* omit_two_operands will call fold_convert for us. */
11414 return omit_two_operands (type, r, arg0, arg1);
11418 /* (-A) / (-B) -> A / B */
11419 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11420 return fold_build2 (RDIV_EXPR, type,
11421 TREE_OPERAND (arg0, 0),
11422 negate_expr (arg1));
11423 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11424 return fold_build2 (RDIV_EXPR, type,
11425 negate_expr (arg0),
11426 TREE_OPERAND (arg1, 0));
11428 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11429 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11430 && real_onep (arg1))
11431 return non_lvalue (fold_convert (type, arg0));
11433 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11434 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11435 && real_minus_onep (arg1))
11436 return non_lvalue (fold_convert (type, negate_expr (arg0)));
11438 /* If ARG1 is a constant, we can convert this to a multiply by the
11439 reciprocal. This does not have the same rounding properties,
11440 so only do this if -freciprocal-math. We can actually
11441 always safely do it if ARG1 is a power of two, but it's hard to
11442 tell if it is or not in a portable manner. */
11443 if (TREE_CODE (arg1) == REAL_CST)
11445 if (flag_reciprocal_math
11446 && 0 != (tem = const_binop (code, build_real (type, dconst1),
11448 return fold_build2 (MULT_EXPR, type, arg0, tem);
11449 /* Find the reciprocal if optimizing and the result is exact. */
11453 r = TREE_REAL_CST (arg1);
11454 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11456 tem = build_real (type, r);
11457 return fold_build2 (MULT_EXPR, type,
11458 fold_convert (type, arg0), tem);
11462 /* Convert A/B/C to A/(B*C). */
11463 if (flag_reciprocal_math
11464 && TREE_CODE (arg0) == RDIV_EXPR)
11465 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11466 fold_build2 (MULT_EXPR, type,
11467 TREE_OPERAND (arg0, 1), arg1));
11469 /* Convert A/(B/C) to (A/B)*C. */
11470 if (flag_reciprocal_math
11471 && TREE_CODE (arg1) == RDIV_EXPR)
11472 return fold_build2 (MULT_EXPR, type,
11473 fold_build2 (RDIV_EXPR, type, arg0,
11474 TREE_OPERAND (arg1, 0)),
11475 TREE_OPERAND (arg1, 1));
11477 /* Convert C1/(X*C2) into (C1/C2)/X. */
11478 if (flag_reciprocal_math
11479 && TREE_CODE (arg1) == MULT_EXPR
11480 && TREE_CODE (arg0) == REAL_CST
11481 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11483 tree tem = const_binop (RDIV_EXPR, arg0,
11484 TREE_OPERAND (arg1, 1), 0);
11486 return fold_build2 (RDIV_EXPR, type, tem,
11487 TREE_OPERAND (arg1, 0));
11490 if (flag_unsafe_math_optimizations)
11492 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11493 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11495 /* Optimize sin(x)/cos(x) as tan(x). */
11496 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11497 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11498 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11499 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11500 CALL_EXPR_ARG (arg1, 0), 0))
11502 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11504 if (tanfn != NULL_TREE)
11505 return build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11508 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11509 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11510 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11511 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11512 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11513 CALL_EXPR_ARG (arg1, 0), 0))
11515 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11517 if (tanfn != NULL_TREE)
11519 tree tmp = build_call_expr (tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11520 return fold_build2 (RDIV_EXPR, type,
11521 build_real (type, dconst1), tmp);
11525 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11526 NaNs or Infinities. */
11527 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11528 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11529 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11531 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11532 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11534 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11535 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11536 && operand_equal_p (arg00, arg01, 0))
11538 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11540 if (cosfn != NULL_TREE)
11541 return build_call_expr (cosfn, 1, arg00);
11545 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11546 NaNs or Infinities. */
11547 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11548 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11549 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11551 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11552 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11554 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11555 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11556 && operand_equal_p (arg00, arg01, 0))
11558 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11560 if (cosfn != NULL_TREE)
11562 tree tmp = build_call_expr (cosfn, 1, arg00);
11563 return fold_build2 (RDIV_EXPR, type,
11564 build_real (type, dconst1),
11570 /* Optimize pow(x,c)/x as pow(x,c-1). */
11571 if (fcode0 == BUILT_IN_POW
11572 || fcode0 == BUILT_IN_POWF
11573 || fcode0 == BUILT_IN_POWL)
11575 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11576 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11577 if (TREE_CODE (arg01) == REAL_CST
11578 && !TREE_OVERFLOW (arg01)
11579 && operand_equal_p (arg1, arg00, 0))
11581 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11585 c = TREE_REAL_CST (arg01);
11586 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11587 arg = build_real (type, c);
11588 return build_call_expr (powfn, 2, arg1, arg);
11592 /* Optimize a/root(b/c) into a*root(c/b). */
11593 if (BUILTIN_ROOT_P (fcode1))
11595 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11597 if (TREE_CODE (rootarg) == RDIV_EXPR)
11599 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11600 tree b = TREE_OPERAND (rootarg, 0);
11601 tree c = TREE_OPERAND (rootarg, 1);
11603 tree tmp = fold_build2 (RDIV_EXPR, type, c, b);
11605 tmp = build_call_expr (rootfn, 1, tmp);
11606 return fold_build2 (MULT_EXPR, type, arg0, tmp);
11610 /* Optimize x/expN(y) into x*expN(-y). */
11611 if (BUILTIN_EXPONENT_P (fcode1))
11613 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11614 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11615 arg1 = build_call_expr (expfn, 1, fold_convert (type, arg));
11616 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11619 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11620 if (fcode1 == BUILT_IN_POW
11621 || fcode1 == BUILT_IN_POWF
11622 || fcode1 == BUILT_IN_POWL)
11624 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11625 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11626 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11627 tree neg11 = fold_convert (type, negate_expr (arg11));
11628 arg1 = build_call_expr (powfn, 2, arg10, neg11);
11629 return fold_build2 (MULT_EXPR, type, arg0, arg1);
11634 case TRUNC_DIV_EXPR:
11635 case FLOOR_DIV_EXPR:
11636 /* Simplify A / (B << N) where A and B are positive and B is
11637 a power of 2, to A >> (N + log2(B)). */
11638 strict_overflow_p = false;
11639 if (TREE_CODE (arg1) == LSHIFT_EXPR
11640 && (TYPE_UNSIGNED (type)
11641 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11643 tree sval = TREE_OPERAND (arg1, 0);
11644 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11646 tree sh_cnt = TREE_OPERAND (arg1, 1);
11647 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
11649 if (strict_overflow_p)
11650 fold_overflow_warning (("assuming signed overflow does not "
11651 "occur when simplifying A / (B << N)"),
11652 WARN_STRICT_OVERFLOW_MISC);
11654 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
11655 sh_cnt, build_int_cst (NULL_TREE, pow2));
11656 return fold_build2 (RSHIFT_EXPR, type,
11657 fold_convert (type, arg0), sh_cnt);
11661 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
11662 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
11663 if (INTEGRAL_TYPE_P (type)
11664 && TYPE_UNSIGNED (type)
11665 && code == FLOOR_DIV_EXPR)
11666 return fold_build2 (TRUNC_DIV_EXPR, type, op0, op1);
11670 case ROUND_DIV_EXPR:
11671 case CEIL_DIV_EXPR:
11672 case EXACT_DIV_EXPR:
11673 if (integer_onep (arg1))
11674 return non_lvalue (fold_convert (type, arg0));
11675 if (integer_zerop (arg1))
11677 /* X / -1 is -X. */
11678 if (!TYPE_UNSIGNED (type)
11679 && TREE_CODE (arg1) == INTEGER_CST
11680 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11681 && TREE_INT_CST_HIGH (arg1) == -1)
11682 return fold_convert (type, negate_expr (arg0));
11684 /* Convert -A / -B to A / B when the type is signed and overflow is
11686 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11687 && TREE_CODE (arg0) == NEGATE_EXPR
11688 && negate_expr_p (arg1))
11690 if (INTEGRAL_TYPE_P (type))
11691 fold_overflow_warning (("assuming signed overflow does not occur "
11692 "when distributing negation across "
11694 WARN_STRICT_OVERFLOW_MISC);
11695 return fold_build2 (code, type,
11696 fold_convert (type, TREE_OPERAND (arg0, 0)),
11697 fold_convert (type, negate_expr (arg1)));
11699 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11700 && TREE_CODE (arg1) == NEGATE_EXPR
11701 && negate_expr_p (arg0))
11703 if (INTEGRAL_TYPE_P (type))
11704 fold_overflow_warning (("assuming signed overflow does not occur "
11705 "when distributing negation across "
11707 WARN_STRICT_OVERFLOW_MISC);
11708 return fold_build2 (code, type,
11709 fold_convert (type, negate_expr (arg0)),
11710 fold_convert (type, TREE_OPERAND (arg1, 0)));
11713 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11714 operation, EXACT_DIV_EXPR.
11716 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11717 At one time others generated faster code, it's not clear if they do
11718 after the last round to changes to the DIV code in expmed.c. */
11719 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11720 && multiple_of_p (type, arg0, arg1))
11721 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
11723 strict_overflow_p = false;
11724 if (TREE_CODE (arg1) == INTEGER_CST
11725 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11726 &strict_overflow_p)))
11728 if (strict_overflow_p)
11729 fold_overflow_warning (("assuming signed overflow does not occur "
11730 "when simplifying division"),
11731 WARN_STRICT_OVERFLOW_MISC);
11732 return fold_convert (type, tem);
11737 case CEIL_MOD_EXPR:
11738 case FLOOR_MOD_EXPR:
11739 case ROUND_MOD_EXPR:
11740 case TRUNC_MOD_EXPR:
11741 /* X % 1 is always zero, but be sure to preserve any side
11743 if (integer_onep (arg1))
11744 return omit_one_operand (type, integer_zero_node, arg0);
11746 /* X % 0, return X % 0 unchanged so that we can get the
11747 proper warnings and errors. */
11748 if (integer_zerop (arg1))
11751 /* 0 % X is always zero, but be sure to preserve any side
11752 effects in X. Place this after checking for X == 0. */
11753 if (integer_zerop (arg0))
11754 return omit_one_operand (type, integer_zero_node, arg1);
11756 /* X % -1 is zero. */
11757 if (!TYPE_UNSIGNED (type)
11758 && TREE_CODE (arg1) == INTEGER_CST
11759 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
11760 && TREE_INT_CST_HIGH (arg1) == -1)
11761 return omit_one_operand (type, integer_zero_node, arg0);
11763 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11764 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11765 strict_overflow_p = false;
11766 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11767 && (TYPE_UNSIGNED (type)
11768 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11771 /* Also optimize A % (C << N) where C is a power of 2,
11772 to A & ((C << N) - 1). */
11773 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11774 c = TREE_OPERAND (arg1, 0);
11776 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11778 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
11779 build_int_cst (TREE_TYPE (arg1), 1));
11780 if (strict_overflow_p)
11781 fold_overflow_warning (("assuming signed overflow does not "
11782 "occur when simplifying "
11783 "X % (power of two)"),
11784 WARN_STRICT_OVERFLOW_MISC);
11785 return fold_build2 (BIT_AND_EXPR, type,
11786 fold_convert (type, arg0),
11787 fold_convert (type, mask));
11791 /* X % -C is the same as X % C. */
11792 if (code == TRUNC_MOD_EXPR
11793 && !TYPE_UNSIGNED (type)
11794 && TREE_CODE (arg1) == INTEGER_CST
11795 && !TREE_OVERFLOW (arg1)
11796 && TREE_INT_CST_HIGH (arg1) < 0
11797 && !TYPE_OVERFLOW_TRAPS (type)
11798 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
11799 && !sign_bit_p (arg1, arg1))
11800 return fold_build2 (code, type, fold_convert (type, arg0),
11801 fold_convert (type, negate_expr (arg1)));
11803 /* X % -Y is the same as X % Y. */
11804 if (code == TRUNC_MOD_EXPR
11805 && !TYPE_UNSIGNED (type)
11806 && TREE_CODE (arg1) == NEGATE_EXPR
11807 && !TYPE_OVERFLOW_TRAPS (type))
11808 return fold_build2 (code, type, fold_convert (type, arg0),
11809 fold_convert (type, TREE_OPERAND (arg1, 0)));
11811 if (TREE_CODE (arg1) == INTEGER_CST
11812 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11813 &strict_overflow_p)))
11815 if (strict_overflow_p)
11816 fold_overflow_warning (("assuming signed overflow does not occur "
11817 "when simplifying modulus"),
11818 WARN_STRICT_OVERFLOW_MISC);
11819 return fold_convert (type, tem);
11826 if (integer_all_onesp (arg0))
11827 return omit_one_operand (type, arg0, arg1);
11831 /* Optimize -1 >> x for arithmetic right shifts. */
11832 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
11833 && tree_expr_nonnegative_p (arg1))
11834 return omit_one_operand (type, arg0, arg1);
11835 /* ... fall through ... */
11839 if (integer_zerop (arg1))
11840 return non_lvalue (fold_convert (type, arg0));
11841 if (integer_zerop (arg0))
11842 return omit_one_operand (type, arg0, arg1);
11844 /* Since negative shift count is not well-defined,
11845 don't try to compute it in the compiler. */
11846 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11849 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11850 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
11851 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11852 && host_integerp (TREE_OPERAND (arg0, 1), false)
11853 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11855 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
11856 + TREE_INT_CST_LOW (arg1));
11858 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11859 being well defined. */
11860 if (low >= TYPE_PRECISION (type))
11862 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11863 low = low % TYPE_PRECISION (type);
11864 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11865 return build_int_cst (type, 0);
11867 low = TYPE_PRECISION (type) - 1;
11870 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11871 build_int_cst (type, low));
11874 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11875 into x & ((unsigned)-1 >> c) for unsigned types. */
11876 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11877 || (TYPE_UNSIGNED (type)
11878 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11879 && host_integerp (arg1, false)
11880 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
11881 && host_integerp (TREE_OPERAND (arg0, 1), false)
11882 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
11884 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11885 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
11891 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
11893 lshift = build_int_cst (type, -1);
11894 lshift = int_const_binop (code, lshift, arg1, 0);
11896 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
11900 /* Rewrite an LROTATE_EXPR by a constant into an
11901 RROTATE_EXPR by a new constant. */
11902 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
11904 tree tem = build_int_cst (TREE_TYPE (arg1),
11905 TYPE_PRECISION (type));
11906 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
11907 return fold_build2 (RROTATE_EXPR, type, op0, tem);
11910 /* If we have a rotate of a bit operation with the rotate count and
11911 the second operand of the bit operation both constant,
11912 permute the two operations. */
11913 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11914 && (TREE_CODE (arg0) == BIT_AND_EXPR
11915 || TREE_CODE (arg0) == BIT_IOR_EXPR
11916 || TREE_CODE (arg0) == BIT_XOR_EXPR)
11917 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11918 return fold_build2 (TREE_CODE (arg0), type,
11919 fold_build2 (code, type,
11920 TREE_OPERAND (arg0, 0), arg1),
11921 fold_build2 (code, type,
11922 TREE_OPERAND (arg0, 1), arg1));
11924 /* Two consecutive rotates adding up to the precision of the
11925 type can be ignored. */
11926 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
11927 && TREE_CODE (arg0) == RROTATE_EXPR
11928 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11929 && TREE_INT_CST_HIGH (arg1) == 0
11930 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
11931 && ((TREE_INT_CST_LOW (arg1)
11932 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
11933 == (unsigned int) TYPE_PRECISION (type)))
11934 return TREE_OPERAND (arg0, 0);
11936 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
11937 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
11938 if the latter can be further optimized. */
11939 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
11940 && TREE_CODE (arg0) == BIT_AND_EXPR
11941 && TREE_CODE (arg1) == INTEGER_CST
11942 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11944 tree mask = fold_build2 (code, type,
11945 fold_convert (type, TREE_OPERAND (arg0, 1)),
11947 tree shift = fold_build2 (code, type,
11948 fold_convert (type, TREE_OPERAND (arg0, 0)),
11950 tem = fold_binary (BIT_AND_EXPR, type, shift, mask);
11958 if (operand_equal_p (arg0, arg1, 0))
11959 return omit_one_operand (type, arg0, arg1);
11960 if (INTEGRAL_TYPE_P (type)
11961 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
11962 return omit_one_operand (type, arg1, arg0);
11963 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
11969 if (operand_equal_p (arg0, arg1, 0))
11970 return omit_one_operand (type, arg0, arg1);
11971 if (INTEGRAL_TYPE_P (type)
11972 && TYPE_MAX_VALUE (type)
11973 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
11974 return omit_one_operand (type, arg1, arg0);
11975 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
11980 case TRUTH_ANDIF_EXPR:
11981 /* Note that the operands of this must be ints
11982 and their values must be 0 or 1.
11983 ("true" is a fixed value perhaps depending on the language.) */
11984 /* If first arg is constant zero, return it. */
11985 if (integer_zerop (arg0))
11986 return fold_convert (type, arg0);
11987 case TRUTH_AND_EXPR:
11988 /* If either arg is constant true, drop it. */
11989 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
11990 return non_lvalue (fold_convert (type, arg1));
11991 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
11992 /* Preserve sequence points. */
11993 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
11994 return non_lvalue (fold_convert (type, arg0));
11995 /* If second arg is constant zero, result is zero, but first arg
11996 must be evaluated. */
11997 if (integer_zerop (arg1))
11998 return omit_one_operand (type, arg1, arg0);
11999 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12000 case will be handled here. */
12001 if (integer_zerop (arg0))
12002 return omit_one_operand (type, arg0, arg1);
12004 /* !X && X is always false. */
12005 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12006 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12007 return omit_one_operand (type, integer_zero_node, arg1);
12008 /* X && !X is always false. */
12009 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12010 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12011 return omit_one_operand (type, integer_zero_node, arg0);
12013 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12014 means A >= Y && A != MAX, but in this case we know that
12017 if (!TREE_SIDE_EFFECTS (arg0)
12018 && !TREE_SIDE_EFFECTS (arg1))
12020 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
12021 if (tem && !operand_equal_p (tem, arg0, 0))
12022 return fold_build2 (code, type, tem, arg1);
12024 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
12025 if (tem && !operand_equal_p (tem, arg1, 0))
12026 return fold_build2 (code, type, arg0, tem);
12030 /* We only do these simplifications if we are optimizing. */
12034 /* Check for things like (A || B) && (A || C). We can convert this
12035 to A || (B && C). Note that either operator can be any of the four
12036 truth and/or operations and the transformation will still be
12037 valid. Also note that we only care about order for the
12038 ANDIF and ORIF operators. If B contains side effects, this
12039 might change the truth-value of A. */
12040 if (TREE_CODE (arg0) == TREE_CODE (arg1)
12041 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12042 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12043 || TREE_CODE (arg0) == TRUTH_AND_EXPR
12044 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12045 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12047 tree a00 = TREE_OPERAND (arg0, 0);
12048 tree a01 = TREE_OPERAND (arg0, 1);
12049 tree a10 = TREE_OPERAND (arg1, 0);
12050 tree a11 = TREE_OPERAND (arg1, 1);
12051 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12052 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12053 && (code == TRUTH_AND_EXPR
12054 || code == TRUTH_OR_EXPR));
12056 if (operand_equal_p (a00, a10, 0))
12057 return fold_build2 (TREE_CODE (arg0), type, a00,
12058 fold_build2 (code, type, a01, a11));
12059 else if (commutative && operand_equal_p (a00, a11, 0))
12060 return fold_build2 (TREE_CODE (arg0), type, a00,
12061 fold_build2 (code, type, a01, a10));
12062 else if (commutative && operand_equal_p (a01, a10, 0))
12063 return fold_build2 (TREE_CODE (arg0), type, a01,
12064 fold_build2 (code, type, a00, a11));
12066 /* This case if tricky because we must either have commutative
12067 operators or else A10 must not have side-effects. */
12069 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12070 && operand_equal_p (a01, a11, 0))
12071 return fold_build2 (TREE_CODE (arg0), type,
12072 fold_build2 (code, type, a00, a10),
12076 /* See if we can build a range comparison. */
12077 if (0 != (tem = fold_range_test (code, type, op0, op1)))
12080 /* Check for the possibility of merging component references. If our
12081 lhs is another similar operation, try to merge its rhs with our
12082 rhs. Then try to merge our lhs and rhs. */
12083 if (TREE_CODE (arg0) == code
12084 && 0 != (tem = fold_truthop (code, type,
12085 TREE_OPERAND (arg0, 1), arg1)))
12086 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12088 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
12093 case TRUTH_ORIF_EXPR:
12094 /* Note that the operands of this must be ints
12095 and their values must be 0 or true.
12096 ("true" is a fixed value perhaps depending on the language.) */
12097 /* If first arg is constant true, return it. */
12098 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12099 return fold_convert (type, arg0);
12100 case TRUTH_OR_EXPR:
12101 /* If either arg is constant zero, drop it. */
12102 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12103 return non_lvalue (fold_convert (type, arg1));
12104 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12105 /* Preserve sequence points. */
12106 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12107 return non_lvalue (fold_convert (type, arg0));
12108 /* If second arg is constant true, result is true, but we must
12109 evaluate first arg. */
12110 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12111 return omit_one_operand (type, arg1, arg0);
12112 /* Likewise for first arg, but note this only occurs here for
12114 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12115 return omit_one_operand (type, arg0, arg1);
12117 /* !X || X is always true. */
12118 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12119 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12120 return omit_one_operand (type, integer_one_node, arg1);
12121 /* X || !X is always true. */
12122 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12123 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12124 return omit_one_operand (type, integer_one_node, arg0);
12128 case TRUTH_XOR_EXPR:
12129 /* If the second arg is constant zero, drop it. */
12130 if (integer_zerop (arg1))
12131 return non_lvalue (fold_convert (type, arg0));
12132 /* If the second arg is constant true, this is a logical inversion. */
12133 if (integer_onep (arg1))
12135 /* Only call invert_truthvalue if operand is a truth value. */
12136 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12137 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12139 tem = invert_truthvalue (arg0);
12140 return non_lvalue (fold_convert (type, tem));
12142 /* Identical arguments cancel to zero. */
12143 if (operand_equal_p (arg0, arg1, 0))
12144 return omit_one_operand (type, integer_zero_node, arg0);
12146 /* !X ^ X is always true. */
12147 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12148 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12149 return omit_one_operand (type, integer_one_node, arg1);
12151 /* X ^ !X is always true. */
12152 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12153 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12154 return omit_one_operand (type, integer_one_node, arg0);
12160 tem = fold_comparison (code, type, op0, op1);
12161 if (tem != NULL_TREE)
12164 /* bool_var != 0 becomes bool_var. */
12165 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12166 && code == NE_EXPR)
12167 return non_lvalue (fold_convert (type, arg0));
12169 /* bool_var == 1 becomes bool_var. */
12170 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12171 && code == EQ_EXPR)
12172 return non_lvalue (fold_convert (type, arg0));
12174 /* bool_var != 1 becomes !bool_var. */
12175 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12176 && code == NE_EXPR)
12177 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12179 /* bool_var == 0 becomes !bool_var. */
12180 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12181 && code == EQ_EXPR)
12182 return fold_build1 (TRUTH_NOT_EXPR, type, fold_convert (type, arg0));
12184 /* If this is an equality comparison of the address of two non-weak,
12185 unaliased symbols neither of which are extern (since we do not
12186 have access to attributes for externs), then we know the result. */
12187 if (TREE_CODE (arg0) == ADDR_EXPR
12188 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12189 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12190 && ! lookup_attribute ("alias",
12191 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12192 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12193 && TREE_CODE (arg1) == ADDR_EXPR
12194 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12195 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12196 && ! lookup_attribute ("alias",
12197 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12198 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12200 /* We know that we're looking at the address of two
12201 non-weak, unaliased, static _DECL nodes.
12203 It is both wasteful and incorrect to call operand_equal_p
12204 to compare the two ADDR_EXPR nodes. It is wasteful in that
12205 all we need to do is test pointer equality for the arguments
12206 to the two ADDR_EXPR nodes. It is incorrect to use
12207 operand_equal_p as that function is NOT equivalent to a
12208 C equality test. It can in fact return false for two
12209 objects which would test as equal using the C equality
12211 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12212 return constant_boolean_node (equal
12213 ? code == EQ_EXPR : code != EQ_EXPR,
12217 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12218 a MINUS_EXPR of a constant, we can convert it into a comparison with
12219 a revised constant as long as no overflow occurs. */
12220 if (TREE_CODE (arg1) == INTEGER_CST
12221 && (TREE_CODE (arg0) == PLUS_EXPR
12222 || TREE_CODE (arg0) == MINUS_EXPR)
12223 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12224 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12225 ? MINUS_EXPR : PLUS_EXPR,
12226 fold_convert (TREE_TYPE (arg0), arg1),
12227 TREE_OPERAND (arg0, 1), 0))
12228 && !TREE_OVERFLOW (tem))
12229 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12231 /* Similarly for a NEGATE_EXPR. */
12232 if (TREE_CODE (arg0) == NEGATE_EXPR
12233 && TREE_CODE (arg1) == INTEGER_CST
12234 && 0 != (tem = negate_expr (arg1))
12235 && TREE_CODE (tem) == INTEGER_CST
12236 && !TREE_OVERFLOW (tem))
12237 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
12239 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12240 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12241 && TREE_CODE (arg1) == INTEGER_CST
12242 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12243 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12244 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
12245 fold_convert (TREE_TYPE (arg0), arg1),
12246 TREE_OPERAND (arg0, 1)));
12248 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12249 if ((TREE_CODE (arg0) == PLUS_EXPR
12250 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12251 || TREE_CODE (arg0) == MINUS_EXPR)
12252 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12253 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12254 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12256 tree val = TREE_OPERAND (arg0, 1);
12257 return omit_two_operands (type,
12258 fold_build2 (code, type,
12260 build_int_cst (TREE_TYPE (val),
12262 TREE_OPERAND (arg0, 0), arg1);
12265 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12266 if (TREE_CODE (arg0) == MINUS_EXPR
12267 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12268 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12269 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12271 return omit_two_operands (type,
12273 ? boolean_true_node : boolean_false_node,
12274 TREE_OPERAND (arg0, 1), arg1);
12277 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12278 for !=. Don't do this for ordered comparisons due to overflow. */
12279 if (TREE_CODE (arg0) == MINUS_EXPR
12280 && integer_zerop (arg1))
12281 return fold_build2 (code, type,
12282 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12284 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12285 if (TREE_CODE (arg0) == ABS_EXPR
12286 && (integer_zerop (arg1) || real_zerop (arg1)))
12287 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
12289 /* If this is an EQ or NE comparison with zero and ARG0 is
12290 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12291 two operations, but the latter can be done in one less insn
12292 on machines that have only two-operand insns or on which a
12293 constant cannot be the first operand. */
12294 if (TREE_CODE (arg0) == BIT_AND_EXPR
12295 && integer_zerop (arg1))
12297 tree arg00 = TREE_OPERAND (arg0, 0);
12298 tree arg01 = TREE_OPERAND (arg0, 1);
12299 if (TREE_CODE (arg00) == LSHIFT_EXPR
12300 && integer_onep (TREE_OPERAND (arg00, 0)))
12302 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
12303 arg01, TREE_OPERAND (arg00, 1));
12304 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12305 build_int_cst (TREE_TYPE (arg0), 1));
12306 return fold_build2 (code, type,
12307 fold_convert (TREE_TYPE (arg1), tem), arg1);
12309 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12310 && integer_onep (TREE_OPERAND (arg01, 0)))
12312 tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
12313 arg00, TREE_OPERAND (arg01, 1));
12314 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12315 build_int_cst (TREE_TYPE (arg0), 1));
12316 return fold_build2 (code, type,
12317 fold_convert (TREE_TYPE (arg1), tem), arg1);
12321 /* If this is an NE or EQ comparison of zero against the result of a
12322 signed MOD operation whose second operand is a power of 2, make
12323 the MOD operation unsigned since it is simpler and equivalent. */
12324 if (integer_zerop (arg1)
12325 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12326 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12327 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12328 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12329 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12330 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12332 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12333 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
12334 fold_convert (newtype,
12335 TREE_OPERAND (arg0, 0)),
12336 fold_convert (newtype,
12337 TREE_OPERAND (arg0, 1)));
12339 return fold_build2 (code, type, newmod,
12340 fold_convert (newtype, arg1));
12343 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12344 C1 is a valid shift constant, and C2 is a power of two, i.e.
12346 if (TREE_CODE (arg0) == BIT_AND_EXPR
12347 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12348 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12350 && integer_pow2p (TREE_OPERAND (arg0, 1))
12351 && integer_zerop (arg1))
12353 tree itype = TREE_TYPE (arg0);
12354 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12355 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12357 /* Check for a valid shift count. */
12358 if (TREE_INT_CST_HIGH (arg001) == 0
12359 && TREE_INT_CST_LOW (arg001) < prec)
12361 tree arg01 = TREE_OPERAND (arg0, 1);
12362 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12363 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12364 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12365 can be rewritten as (X & (C2 << C1)) != 0. */
12366 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12368 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
12369 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
12370 return fold_build2 (code, type, tem, arg1);
12372 /* Otherwise, for signed (arithmetic) shifts,
12373 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12374 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12375 else if (!TYPE_UNSIGNED (itype))
12376 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12377 arg000, build_int_cst (itype, 0));
12378 /* Otherwise, of unsigned (logical) shifts,
12379 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12380 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12382 return omit_one_operand (type,
12383 code == EQ_EXPR ? integer_one_node
12384 : integer_zero_node,
12389 /* If this is an NE comparison of zero with an AND of one, remove the
12390 comparison since the AND will give the correct value. */
12391 if (code == NE_EXPR
12392 && integer_zerop (arg1)
12393 && TREE_CODE (arg0) == BIT_AND_EXPR
12394 && integer_onep (TREE_OPERAND (arg0, 1)))
12395 return fold_convert (type, arg0);
12397 /* If we have (A & C) == C where C is a power of 2, convert this into
12398 (A & C) != 0. Similarly for NE_EXPR. */
12399 if (TREE_CODE (arg0) == BIT_AND_EXPR
12400 && integer_pow2p (TREE_OPERAND (arg0, 1))
12401 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12402 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12403 arg0, fold_convert (TREE_TYPE (arg0),
12404 integer_zero_node));
12406 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12407 bit, then fold the expression into A < 0 or A >= 0. */
12408 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
12412 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12413 Similarly for NE_EXPR. */
12414 if (TREE_CODE (arg0) == BIT_AND_EXPR
12415 && TREE_CODE (arg1) == INTEGER_CST
12416 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12418 tree notc = fold_build1 (BIT_NOT_EXPR,
12419 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12420 TREE_OPERAND (arg0, 1));
12421 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12423 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12424 if (integer_nonzerop (dandnotc))
12425 return omit_one_operand (type, rslt, arg0);
12428 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12429 Similarly for NE_EXPR. */
12430 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12431 && TREE_CODE (arg1) == INTEGER_CST
12432 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12434 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12435 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12436 TREE_OPERAND (arg0, 1), notd);
12437 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12438 if (integer_nonzerop (candnotd))
12439 return omit_one_operand (type, rslt, arg0);
12442 /* If this is a comparison of a field, we may be able to simplify it. */
12443 if ((TREE_CODE (arg0) == COMPONENT_REF
12444 || TREE_CODE (arg0) == BIT_FIELD_REF)
12445 /* Handle the constant case even without -O
12446 to make sure the warnings are given. */
12447 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12449 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
12454 /* Optimize comparisons of strlen vs zero to a compare of the
12455 first character of the string vs zero. To wit,
12456 strlen(ptr) == 0 => *ptr == 0
12457 strlen(ptr) != 0 => *ptr != 0
12458 Other cases should reduce to one of these two (or a constant)
12459 due to the return value of strlen being unsigned. */
12460 if (TREE_CODE (arg0) == CALL_EXPR
12461 && integer_zerop (arg1))
12463 tree fndecl = get_callee_fndecl (arg0);
12466 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12467 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12468 && call_expr_nargs (arg0) == 1
12469 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12471 tree iref = build_fold_indirect_ref (CALL_EXPR_ARG (arg0, 0));
12472 return fold_build2 (code, type, iref,
12473 build_int_cst (TREE_TYPE (iref), 0));
12477 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12478 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12479 if (TREE_CODE (arg0) == RSHIFT_EXPR
12480 && integer_zerop (arg1)
12481 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12483 tree arg00 = TREE_OPERAND (arg0, 0);
12484 tree arg01 = TREE_OPERAND (arg0, 1);
12485 tree itype = TREE_TYPE (arg00);
12486 if (TREE_INT_CST_HIGH (arg01) == 0
12487 && TREE_INT_CST_LOW (arg01)
12488 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12490 if (TYPE_UNSIGNED (itype))
12492 itype = signed_type_for (itype);
12493 arg00 = fold_convert (itype, arg00);
12495 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12496 type, arg00, build_int_cst (itype, 0));
12500 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12501 if (integer_zerop (arg1)
12502 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12503 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12504 TREE_OPERAND (arg0, 1));
12506 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12507 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12508 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12509 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12510 build_int_cst (TREE_TYPE (arg1), 0));
12511 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12512 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12513 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12514 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12515 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
12516 build_int_cst (TREE_TYPE (arg1), 0));
12518 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12519 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12520 && TREE_CODE (arg1) == INTEGER_CST
12521 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12522 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
12523 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
12524 TREE_OPERAND (arg0, 1), arg1));
12526 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12527 (X & C) == 0 when C is a single bit. */
12528 if (TREE_CODE (arg0) == BIT_AND_EXPR
12529 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12530 && integer_zerop (arg1)
12531 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12533 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
12534 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12535 TREE_OPERAND (arg0, 1));
12536 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12540 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12541 constant C is a power of two, i.e. a single bit. */
12542 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12543 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12544 && integer_zerop (arg1)
12545 && integer_pow2p (TREE_OPERAND (arg0, 1))
12546 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12547 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12549 tree arg00 = TREE_OPERAND (arg0, 0);
12550 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12551 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12554 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12555 when is C is a power of two, i.e. a single bit. */
12556 if (TREE_CODE (arg0) == BIT_AND_EXPR
12557 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12558 && integer_zerop (arg1)
12559 && integer_pow2p (TREE_OPERAND (arg0, 1))
12560 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12561 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12563 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12564 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
12565 arg000, TREE_OPERAND (arg0, 1));
12566 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12567 tem, build_int_cst (TREE_TYPE (tem), 0));
12570 if (integer_zerop (arg1)
12571 && tree_expr_nonzero_p (arg0))
12573 tree res = constant_boolean_node (code==NE_EXPR, type);
12574 return omit_one_operand (type, res, arg0);
12577 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12578 if (TREE_CODE (arg0) == NEGATE_EXPR
12579 && TREE_CODE (arg1) == NEGATE_EXPR)
12580 return fold_build2 (code, type,
12581 TREE_OPERAND (arg0, 0),
12582 TREE_OPERAND (arg1, 0));
12584 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12585 if (TREE_CODE (arg0) == BIT_AND_EXPR
12586 && TREE_CODE (arg1) == BIT_AND_EXPR)
12588 tree arg00 = TREE_OPERAND (arg0, 0);
12589 tree arg01 = TREE_OPERAND (arg0, 1);
12590 tree arg10 = TREE_OPERAND (arg1, 0);
12591 tree arg11 = TREE_OPERAND (arg1, 1);
12592 tree itype = TREE_TYPE (arg0);
12594 if (operand_equal_p (arg01, arg11, 0))
12595 return fold_build2 (code, type,
12596 fold_build2 (BIT_AND_EXPR, itype,
12597 fold_build2 (BIT_XOR_EXPR, itype,
12600 build_int_cst (itype, 0));
12602 if (operand_equal_p (arg01, arg10, 0))
12603 return fold_build2 (code, type,
12604 fold_build2 (BIT_AND_EXPR, itype,
12605 fold_build2 (BIT_XOR_EXPR, itype,
12608 build_int_cst (itype, 0));
12610 if (operand_equal_p (arg00, arg11, 0))
12611 return fold_build2 (code, type,
12612 fold_build2 (BIT_AND_EXPR, itype,
12613 fold_build2 (BIT_XOR_EXPR, itype,
12616 build_int_cst (itype, 0));
12618 if (operand_equal_p (arg00, arg10, 0))
12619 return fold_build2 (code, type,
12620 fold_build2 (BIT_AND_EXPR, itype,
12621 fold_build2 (BIT_XOR_EXPR, itype,
12624 build_int_cst (itype, 0));
12627 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12628 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12630 tree arg00 = TREE_OPERAND (arg0, 0);
12631 tree arg01 = TREE_OPERAND (arg0, 1);
12632 tree arg10 = TREE_OPERAND (arg1, 0);
12633 tree arg11 = TREE_OPERAND (arg1, 1);
12634 tree itype = TREE_TYPE (arg0);
12636 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12637 operand_equal_p guarantees no side-effects so we don't need
12638 to use omit_one_operand on Z. */
12639 if (operand_equal_p (arg01, arg11, 0))
12640 return fold_build2 (code, type, arg00, arg10);
12641 if (operand_equal_p (arg01, arg10, 0))
12642 return fold_build2 (code, type, arg00, arg11);
12643 if (operand_equal_p (arg00, arg11, 0))
12644 return fold_build2 (code, type, arg01, arg10);
12645 if (operand_equal_p (arg00, arg10, 0))
12646 return fold_build2 (code, type, arg01, arg11);
12648 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12649 if (TREE_CODE (arg01) == INTEGER_CST
12650 && TREE_CODE (arg11) == INTEGER_CST)
12651 return fold_build2 (code, type,
12652 fold_build2 (BIT_XOR_EXPR, itype, arg00,
12653 fold_build2 (BIT_XOR_EXPR, itype,
12658 /* Attempt to simplify equality/inequality comparisons of complex
12659 values. Only lower the comparison if the result is known or
12660 can be simplified to a single scalar comparison. */
12661 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12662 || TREE_CODE (arg0) == COMPLEX_CST)
12663 && (TREE_CODE (arg1) == COMPLEX_EXPR
12664 || TREE_CODE (arg1) == COMPLEX_CST))
12666 tree real0, imag0, real1, imag1;
12669 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12671 real0 = TREE_OPERAND (arg0, 0);
12672 imag0 = TREE_OPERAND (arg0, 1);
12676 real0 = TREE_REALPART (arg0);
12677 imag0 = TREE_IMAGPART (arg0);
12680 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12682 real1 = TREE_OPERAND (arg1, 0);
12683 imag1 = TREE_OPERAND (arg1, 1);
12687 real1 = TREE_REALPART (arg1);
12688 imag1 = TREE_IMAGPART (arg1);
12691 rcond = fold_binary (code, type, real0, real1);
12692 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12694 if (integer_zerop (rcond))
12696 if (code == EQ_EXPR)
12697 return omit_two_operands (type, boolean_false_node,
12699 return fold_build2 (NE_EXPR, type, imag0, imag1);
12703 if (code == NE_EXPR)
12704 return omit_two_operands (type, boolean_true_node,
12706 return fold_build2 (EQ_EXPR, type, imag0, imag1);
12710 icond = fold_binary (code, type, imag0, imag1);
12711 if (icond && TREE_CODE (icond) == INTEGER_CST)
12713 if (integer_zerop (icond))
12715 if (code == EQ_EXPR)
12716 return omit_two_operands (type, boolean_false_node,
12718 return fold_build2 (NE_EXPR, type, real0, real1);
12722 if (code == NE_EXPR)
12723 return omit_two_operands (type, boolean_true_node,
12725 return fold_build2 (EQ_EXPR, type, real0, real1);
12736 tem = fold_comparison (code, type, op0, op1);
12737 if (tem != NULL_TREE)
12740 /* Transform comparisons of the form X +- C CMP X. */
12741 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12742 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12743 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12744 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
12745 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12746 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12748 tree arg01 = TREE_OPERAND (arg0, 1);
12749 enum tree_code code0 = TREE_CODE (arg0);
12752 if (TREE_CODE (arg01) == REAL_CST)
12753 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12755 is_positive = tree_int_cst_sgn (arg01);
12757 /* (X - c) > X becomes false. */
12758 if (code == GT_EXPR
12759 && ((code0 == MINUS_EXPR && is_positive >= 0)
12760 || (code0 == PLUS_EXPR && is_positive <= 0)))
12762 if (TREE_CODE (arg01) == INTEGER_CST
12763 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12764 fold_overflow_warning (("assuming signed overflow does not "
12765 "occur when assuming that (X - c) > X "
12766 "is always false"),
12767 WARN_STRICT_OVERFLOW_ALL);
12768 return constant_boolean_node (0, type);
12771 /* Likewise (X + c) < X becomes false. */
12772 if (code == LT_EXPR
12773 && ((code0 == PLUS_EXPR && is_positive >= 0)
12774 || (code0 == MINUS_EXPR && is_positive <= 0)))
12776 if (TREE_CODE (arg01) == INTEGER_CST
12777 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12778 fold_overflow_warning (("assuming signed overflow does not "
12779 "occur when assuming that "
12780 "(X + c) < X is always false"),
12781 WARN_STRICT_OVERFLOW_ALL);
12782 return constant_boolean_node (0, type);
12785 /* Convert (X - c) <= X to true. */
12786 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12788 && ((code0 == MINUS_EXPR && is_positive >= 0)
12789 || (code0 == PLUS_EXPR && is_positive <= 0)))
12791 if (TREE_CODE (arg01) == INTEGER_CST
12792 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12793 fold_overflow_warning (("assuming signed overflow does not "
12794 "occur when assuming that "
12795 "(X - c) <= X is always true"),
12796 WARN_STRICT_OVERFLOW_ALL);
12797 return constant_boolean_node (1, type);
12800 /* Convert (X + c) >= X to true. */
12801 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
12803 && ((code0 == PLUS_EXPR && is_positive >= 0)
12804 || (code0 == MINUS_EXPR && is_positive <= 0)))
12806 if (TREE_CODE (arg01) == INTEGER_CST
12807 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12808 fold_overflow_warning (("assuming signed overflow does not "
12809 "occur when assuming that "
12810 "(X + c) >= X is always true"),
12811 WARN_STRICT_OVERFLOW_ALL);
12812 return constant_boolean_node (1, type);
12815 if (TREE_CODE (arg01) == INTEGER_CST)
12817 /* Convert X + c > X and X - c < X to true for integers. */
12818 if (code == GT_EXPR
12819 && ((code0 == PLUS_EXPR && is_positive > 0)
12820 || (code0 == MINUS_EXPR && is_positive < 0)))
12822 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12823 fold_overflow_warning (("assuming signed overflow does "
12824 "not occur when assuming that "
12825 "(X + c) > X is always true"),
12826 WARN_STRICT_OVERFLOW_ALL);
12827 return constant_boolean_node (1, type);
12830 if (code == LT_EXPR
12831 && ((code0 == MINUS_EXPR && is_positive > 0)
12832 || (code0 == PLUS_EXPR && is_positive < 0)))
12834 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12835 fold_overflow_warning (("assuming signed overflow does "
12836 "not occur when assuming that "
12837 "(X - c) < X is always true"),
12838 WARN_STRICT_OVERFLOW_ALL);
12839 return constant_boolean_node (1, type);
12842 /* Convert X + c <= X and X - c >= X to false for integers. */
12843 if (code == LE_EXPR
12844 && ((code0 == PLUS_EXPR && is_positive > 0)
12845 || (code0 == MINUS_EXPR && is_positive < 0)))
12847 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12848 fold_overflow_warning (("assuming signed overflow does "
12849 "not occur when assuming that "
12850 "(X + c) <= X is always false"),
12851 WARN_STRICT_OVERFLOW_ALL);
12852 return constant_boolean_node (0, type);
12855 if (code == GE_EXPR
12856 && ((code0 == MINUS_EXPR && is_positive > 0)
12857 || (code0 == PLUS_EXPR && is_positive < 0)))
12859 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12860 fold_overflow_warning (("assuming signed overflow does "
12861 "not occur when assuming that "
12862 "(X - c) >= X is always false"),
12863 WARN_STRICT_OVERFLOW_ALL);
12864 return constant_boolean_node (0, type);
12869 /* Comparisons with the highest or lowest possible integer of
12870 the specified precision will have known values. */
12872 tree arg1_type = TREE_TYPE (arg1);
12873 unsigned int width = TYPE_PRECISION (arg1_type);
12875 if (TREE_CODE (arg1) == INTEGER_CST
12876 && width <= 2 * HOST_BITS_PER_WIDE_INT
12877 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12879 HOST_WIDE_INT signed_max_hi;
12880 unsigned HOST_WIDE_INT signed_max_lo;
12881 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
12883 if (width <= HOST_BITS_PER_WIDE_INT)
12885 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12890 if (TYPE_UNSIGNED (arg1_type))
12892 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12898 max_lo = signed_max_lo;
12899 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12905 width -= HOST_BITS_PER_WIDE_INT;
12906 signed_max_lo = -1;
12907 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
12912 if (TYPE_UNSIGNED (arg1_type))
12914 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
12919 max_hi = signed_max_hi;
12920 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
12924 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
12925 && TREE_INT_CST_LOW (arg1) == max_lo)
12929 return omit_one_operand (type, integer_zero_node, arg0);
12932 return fold_build2 (EQ_EXPR, type, op0, op1);
12935 return omit_one_operand (type, integer_one_node, arg0);
12938 return fold_build2 (NE_EXPR, type, op0, op1);
12940 /* The GE_EXPR and LT_EXPR cases above are not normally
12941 reached because of previous transformations. */
12946 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12948 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
12952 arg1 = const_binop (PLUS_EXPR, arg1,
12953 build_int_cst (TREE_TYPE (arg1), 1), 0);
12954 return fold_build2 (EQ_EXPR, type,
12955 fold_convert (TREE_TYPE (arg1), arg0),
12958 arg1 = const_binop (PLUS_EXPR, arg1,
12959 build_int_cst (TREE_TYPE (arg1), 1), 0);
12960 return fold_build2 (NE_EXPR, type,
12961 fold_convert (TREE_TYPE (arg1), arg0),
12966 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12968 && TREE_INT_CST_LOW (arg1) == min_lo)
12972 return omit_one_operand (type, integer_zero_node, arg0);
12975 return fold_build2 (EQ_EXPR, type, op0, op1);
12978 return omit_one_operand (type, integer_one_node, arg0);
12981 return fold_build2 (NE_EXPR, type, op0, op1);
12986 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
12988 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
12992 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12993 return fold_build2 (NE_EXPR, type,
12994 fold_convert (TREE_TYPE (arg1), arg0),
12997 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
12998 return fold_build2 (EQ_EXPR, type,
12999 fold_convert (TREE_TYPE (arg1), arg0),
13005 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13006 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13007 && TYPE_UNSIGNED (arg1_type)
13008 /* We will flip the signedness of the comparison operator
13009 associated with the mode of arg1, so the sign bit is
13010 specified by this mode. Check that arg1 is the signed
13011 max associated with this sign bit. */
13012 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13013 /* signed_type does not work on pointer types. */
13014 && INTEGRAL_TYPE_P (arg1_type))
13016 /* The following case also applies to X < signed_max+1
13017 and X >= signed_max+1 because previous transformations. */
13018 if (code == LE_EXPR || code == GT_EXPR)
13021 st = signed_type_for (TREE_TYPE (arg1));
13022 return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
13023 type, fold_convert (st, arg0),
13024 build_int_cst (st, 0));
13030 /* If we are comparing an ABS_EXPR with a constant, we can
13031 convert all the cases into explicit comparisons, but they may
13032 well not be faster than doing the ABS and one comparison.
13033 But ABS (X) <= C is a range comparison, which becomes a subtraction
13034 and a comparison, and is probably faster. */
13035 if (code == LE_EXPR
13036 && TREE_CODE (arg1) == INTEGER_CST
13037 && TREE_CODE (arg0) == ABS_EXPR
13038 && ! TREE_SIDE_EFFECTS (arg0)
13039 && (0 != (tem = negate_expr (arg1)))
13040 && TREE_CODE (tem) == INTEGER_CST
13041 && !TREE_OVERFLOW (tem))
13042 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13043 build2 (GE_EXPR, type,
13044 TREE_OPERAND (arg0, 0), tem),
13045 build2 (LE_EXPR, type,
13046 TREE_OPERAND (arg0, 0), arg1));
13048 /* Convert ABS_EXPR<x> >= 0 to true. */
13049 strict_overflow_p = false;
13050 if (code == GE_EXPR
13051 && (integer_zerop (arg1)
13052 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13053 && real_zerop (arg1)))
13054 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13056 if (strict_overflow_p)
13057 fold_overflow_warning (("assuming signed overflow does not occur "
13058 "when simplifying comparison of "
13059 "absolute value and zero"),
13060 WARN_STRICT_OVERFLOW_CONDITIONAL);
13061 return omit_one_operand (type, integer_one_node, arg0);
13064 /* Convert ABS_EXPR<x> < 0 to false. */
13065 strict_overflow_p = false;
13066 if (code == LT_EXPR
13067 && (integer_zerop (arg1) || real_zerop (arg1))
13068 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13070 if (strict_overflow_p)
13071 fold_overflow_warning (("assuming signed overflow does not occur "
13072 "when simplifying comparison of "
13073 "absolute value and zero"),
13074 WARN_STRICT_OVERFLOW_CONDITIONAL);
13075 return omit_one_operand (type, integer_zero_node, arg0);
13078 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13079 and similarly for >= into !=. */
13080 if ((code == LT_EXPR || code == GE_EXPR)
13081 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13082 && TREE_CODE (arg1) == LSHIFT_EXPR
13083 && integer_onep (TREE_OPERAND (arg1, 0)))
13084 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13085 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13086 TREE_OPERAND (arg1, 1)),
13087 build_int_cst (TREE_TYPE (arg0), 0));
13089 if ((code == LT_EXPR || code == GE_EXPR)
13090 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13091 && CONVERT_EXPR_P (arg1)
13092 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13093 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13095 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13096 fold_convert (TREE_TYPE (arg0),
13097 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13098 TREE_OPERAND (TREE_OPERAND (arg1, 0),
13100 build_int_cst (TREE_TYPE (arg0), 0));
13104 case UNORDERED_EXPR:
13112 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13114 t1 = fold_relational_const (code, type, arg0, arg1);
13115 if (t1 != NULL_TREE)
13119 /* If the first operand is NaN, the result is constant. */
13120 if (TREE_CODE (arg0) == REAL_CST
13121 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13122 && (code != LTGT_EXPR || ! flag_trapping_math))
13124 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13125 ? integer_zero_node
13126 : integer_one_node;
13127 return omit_one_operand (type, t1, arg1);
13130 /* If the second operand is NaN, the result is constant. */
13131 if (TREE_CODE (arg1) == REAL_CST
13132 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13133 && (code != LTGT_EXPR || ! flag_trapping_math))
13135 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13136 ? integer_zero_node
13137 : integer_one_node;
13138 return omit_one_operand (type, t1, arg0);
13141 /* Simplify unordered comparison of something with itself. */
13142 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13143 && operand_equal_p (arg0, arg1, 0))
13144 return constant_boolean_node (1, type);
13146 if (code == LTGT_EXPR
13147 && !flag_trapping_math
13148 && operand_equal_p (arg0, arg1, 0))
13149 return constant_boolean_node (0, type);
13151 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13153 tree targ0 = strip_float_extensions (arg0);
13154 tree targ1 = strip_float_extensions (arg1);
13155 tree newtype = TREE_TYPE (targ0);
13157 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13158 newtype = TREE_TYPE (targ1);
13160 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13161 return fold_build2 (code, type, fold_convert (newtype, targ0),
13162 fold_convert (newtype, targ1));
13167 case COMPOUND_EXPR:
13168 /* When pedantic, a compound expression can be neither an lvalue
13169 nor an integer constant expression. */
13170 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13172 /* Don't let (0, 0) be null pointer constant. */
13173 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13174 : fold_convert (type, arg1);
13175 return pedantic_non_lvalue (tem);
13178 if ((TREE_CODE (arg0) == REAL_CST
13179 && TREE_CODE (arg1) == REAL_CST)
13180 || (TREE_CODE (arg0) == INTEGER_CST
13181 && TREE_CODE (arg1) == INTEGER_CST))
13182 return build_complex (type, arg0, arg1);
13186 /* An ASSERT_EXPR should never be passed to fold_binary. */
13187 gcc_unreachable ();
13191 } /* switch (code) */
13194 /* Callback for walk_tree, looking for LABEL_EXPR.
13195 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
13196 Do not check the sub-tree of GOTO_EXPR. */
13199 contains_label_1 (tree *tp,
13200 int *walk_subtrees,
13201 void *data ATTRIBUTE_UNUSED)
13203 switch (TREE_CODE (*tp))
13208 *walk_subtrees = 0;
13215 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
13216 accessible from outside the sub-tree. Returns NULL_TREE if no
13217 addressable label is found. */
13220 contains_label_p (tree st)
13222 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
13225 /* Fold a ternary expression of code CODE and type TYPE with operands
13226 OP0, OP1, and OP2. Return the folded expression if folding is
13227 successful. Otherwise, return NULL_TREE. */
13230 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
13233 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13234 enum tree_code_class kind = TREE_CODE_CLASS (code);
13236 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13237 && TREE_CODE_LENGTH (code) == 3);
13239 /* Strip any conversions that don't change the mode. This is safe
13240 for every expression, except for a comparison expression because
13241 its signedness is derived from its operands. So, in the latter
13242 case, only strip conversions that don't change the signedness.
13244 Note that this is done as an internal manipulation within the
13245 constant folder, in order to find the simplest representation of
13246 the arguments so that their form can be studied. In any cases,
13247 the appropriate type conversions should be put back in the tree
13248 that will get out of the constant folder. */
13263 case COMPONENT_REF:
13264 if (TREE_CODE (arg0) == CONSTRUCTOR
13265 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13267 unsigned HOST_WIDE_INT idx;
13269 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13276 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13277 so all simple results must be passed through pedantic_non_lvalue. */
13278 if (TREE_CODE (arg0) == INTEGER_CST)
13280 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13281 tem = integer_zerop (arg0) ? op2 : op1;
13282 /* Only optimize constant conditions when the selected branch
13283 has the same type as the COND_EXPR. This avoids optimizing
13284 away "c ? x : throw", where the throw has a void type.
13285 Avoid throwing away that operand which contains label. */
13286 if ((!TREE_SIDE_EFFECTS (unused_op)
13287 || !contains_label_p (unused_op))
13288 && (! VOID_TYPE_P (TREE_TYPE (tem))
13289 || VOID_TYPE_P (type)))
13290 return pedantic_non_lvalue (tem);
13293 if (operand_equal_p (arg1, op2, 0))
13294 return pedantic_omit_one_operand (type, arg1, arg0);
13296 /* If we have A op B ? A : C, we may be able to convert this to a
13297 simpler expression, depending on the operation and the values
13298 of B and C. Signed zeros prevent all of these transformations,
13299 for reasons given above each one.
13301 Also try swapping the arguments and inverting the conditional. */
13302 if (COMPARISON_CLASS_P (arg0)
13303 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13304 arg1, TREE_OPERAND (arg0, 1))
13305 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13307 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
13312 if (COMPARISON_CLASS_P (arg0)
13313 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13315 TREE_OPERAND (arg0, 1))
13316 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13318 tem = fold_truth_not_expr (arg0);
13319 if (tem && COMPARISON_CLASS_P (tem))
13321 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
13327 /* If the second operand is simpler than the third, swap them
13328 since that produces better jump optimization results. */
13329 if (truth_value_p (TREE_CODE (arg0))
13330 && tree_swap_operands_p (op1, op2, false))
13332 /* See if this can be inverted. If it can't, possibly because
13333 it was a floating-point inequality comparison, don't do
13335 tem = fold_truth_not_expr (arg0);
13337 return fold_build3 (code, type, tem, op2, op1);
13340 /* Convert A ? 1 : 0 to simply A. */
13341 if (integer_onep (op1)
13342 && integer_zerop (op2)
13343 /* If we try to convert OP0 to our type, the
13344 call to fold will try to move the conversion inside
13345 a COND, which will recurse. In that case, the COND_EXPR
13346 is probably the best choice, so leave it alone. */
13347 && type == TREE_TYPE (arg0))
13348 return pedantic_non_lvalue (arg0);
13350 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13351 over COND_EXPR in cases such as floating point comparisons. */
13352 if (integer_zerop (op1)
13353 && integer_onep (op2)
13354 && truth_value_p (TREE_CODE (arg0)))
13355 return pedantic_non_lvalue (fold_convert (type,
13356 invert_truthvalue (arg0)));
13358 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13359 if (TREE_CODE (arg0) == LT_EXPR
13360 && integer_zerop (TREE_OPERAND (arg0, 1))
13361 && integer_zerop (op2)
13362 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13364 /* sign_bit_p only checks ARG1 bits within A's precision.
13365 If <sign bit of A> has wider type than A, bits outside
13366 of A's precision in <sign bit of A> need to be checked.
13367 If they are all 0, this optimization needs to be done
13368 in unsigned A's type, if they are all 1 in signed A's type,
13369 otherwise this can't be done. */
13370 if (TYPE_PRECISION (TREE_TYPE (tem))
13371 < TYPE_PRECISION (TREE_TYPE (arg1))
13372 && TYPE_PRECISION (TREE_TYPE (tem))
13373 < TYPE_PRECISION (type))
13375 unsigned HOST_WIDE_INT mask_lo;
13376 HOST_WIDE_INT mask_hi;
13377 int inner_width, outer_width;
13380 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13381 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13382 if (outer_width > TYPE_PRECISION (type))
13383 outer_width = TYPE_PRECISION (type);
13385 if (outer_width > HOST_BITS_PER_WIDE_INT)
13387 mask_hi = ((unsigned HOST_WIDE_INT) -1
13388 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13394 mask_lo = ((unsigned HOST_WIDE_INT) -1
13395 >> (HOST_BITS_PER_WIDE_INT - outer_width));
13397 if (inner_width > HOST_BITS_PER_WIDE_INT)
13399 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13400 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13404 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13405 >> (HOST_BITS_PER_WIDE_INT - inner_width));
13407 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13408 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13410 tem_type = signed_type_for (TREE_TYPE (tem));
13411 tem = fold_convert (tem_type, tem);
13413 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13414 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13416 tem_type = unsigned_type_for (TREE_TYPE (tem));
13417 tem = fold_convert (tem_type, tem);
13424 return fold_convert (type,
13425 fold_build2 (BIT_AND_EXPR,
13426 TREE_TYPE (tem), tem,
13427 fold_convert (TREE_TYPE (tem),
13431 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13432 already handled above. */
13433 if (TREE_CODE (arg0) == BIT_AND_EXPR
13434 && integer_onep (TREE_OPERAND (arg0, 1))
13435 && integer_zerop (op2)
13436 && integer_pow2p (arg1))
13438 tree tem = TREE_OPERAND (arg0, 0);
13440 if (TREE_CODE (tem) == RSHIFT_EXPR
13441 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13442 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13443 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13444 return fold_build2 (BIT_AND_EXPR, type,
13445 TREE_OPERAND (tem, 0), arg1);
13448 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13449 is probably obsolete because the first operand should be a
13450 truth value (that's why we have the two cases above), but let's
13451 leave it in until we can confirm this for all front-ends. */
13452 if (integer_zerop (op2)
13453 && TREE_CODE (arg0) == NE_EXPR
13454 && integer_zerop (TREE_OPERAND (arg0, 1))
13455 && integer_pow2p (arg1)
13456 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13457 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13458 arg1, OEP_ONLY_CONST))
13459 return pedantic_non_lvalue (fold_convert (type,
13460 TREE_OPERAND (arg0, 0)));
13462 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13463 if (integer_zerop (op2)
13464 && truth_value_p (TREE_CODE (arg0))
13465 && truth_value_p (TREE_CODE (arg1)))
13466 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13467 fold_convert (type, arg0),
13470 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13471 if (integer_onep (op2)
13472 && truth_value_p (TREE_CODE (arg0))
13473 && truth_value_p (TREE_CODE (arg1)))
13475 /* Only perform transformation if ARG0 is easily inverted. */
13476 tem = fold_truth_not_expr (arg0);
13478 return fold_build2 (TRUTH_ORIF_EXPR, type,
13479 fold_convert (type, tem),
13483 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13484 if (integer_zerop (arg1)
13485 && truth_value_p (TREE_CODE (arg0))
13486 && truth_value_p (TREE_CODE (op2)))
13488 /* Only perform transformation if ARG0 is easily inverted. */
13489 tem = fold_truth_not_expr (arg0);
13491 return fold_build2 (TRUTH_ANDIF_EXPR, type,
13492 fold_convert (type, tem),
13496 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13497 if (integer_onep (arg1)
13498 && truth_value_p (TREE_CODE (arg0))
13499 && truth_value_p (TREE_CODE (op2)))
13500 return fold_build2 (TRUTH_ORIF_EXPR, type,
13501 fold_convert (type, arg0),
13507 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13508 of fold_ternary on them. */
13509 gcc_unreachable ();
13511 case BIT_FIELD_REF:
13512 if ((TREE_CODE (arg0) == VECTOR_CST
13513 || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
13514 && type == TREE_TYPE (TREE_TYPE (arg0)))
13516 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
13517 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
13520 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
13521 && (idx % width) == 0
13522 && (idx = idx / width)
13523 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13525 tree elements = NULL_TREE;
13527 if (TREE_CODE (arg0) == VECTOR_CST)
13528 elements = TREE_VECTOR_CST_ELTS (arg0);
13531 unsigned HOST_WIDE_INT idx;
13534 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
13535 elements = tree_cons (NULL_TREE, value, elements);
13537 while (idx-- > 0 && elements)
13538 elements = TREE_CHAIN (elements);
13540 return TREE_VALUE (elements);
13542 return fold_convert (type, integer_zero_node);
13546 /* A bit-field-ref that referenced the full argument can be stripped. */
13547 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13548 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
13549 && integer_zerop (op2))
13550 return fold_convert (type, arg0);
13556 } /* switch (code) */
13559 /* Perform constant folding and related simplification of EXPR.
13560 The related simplifications include x*1 => x, x*0 => 0, etc.,
13561 and application of the associative law.
13562 NOP_EXPR conversions may be removed freely (as long as we
13563 are careful not to change the type of the overall expression).
13564 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13565 but we can constant-fold them if they have constant operands. */
13567 #ifdef ENABLE_FOLD_CHECKING
13568 # define fold(x) fold_1 (x)
13569 static tree fold_1 (tree);
13575 const tree t = expr;
13576 enum tree_code code = TREE_CODE (t);
13577 enum tree_code_class kind = TREE_CODE_CLASS (code);
13580 /* Return right away if a constant. */
13581 if (kind == tcc_constant)
13584 /* CALL_EXPR-like objects with variable numbers of operands are
13585 treated specially. */
13586 if (kind == tcc_vl_exp)
13588 if (code == CALL_EXPR)
13590 tem = fold_call_expr (expr, false);
13591 return tem ? tem : expr;
13596 if (IS_EXPR_CODE_CLASS (kind))
13598 tree type = TREE_TYPE (t);
13599 tree op0, op1, op2;
13601 switch (TREE_CODE_LENGTH (code))
13604 op0 = TREE_OPERAND (t, 0);
13605 tem = fold_unary (code, type, op0);
13606 return tem ? tem : expr;
13608 op0 = TREE_OPERAND (t, 0);
13609 op1 = TREE_OPERAND (t, 1);
13610 tem = fold_binary (code, type, op0, op1);
13611 return tem ? tem : expr;
13613 op0 = TREE_OPERAND (t, 0);
13614 op1 = TREE_OPERAND (t, 1);
13615 op2 = TREE_OPERAND (t, 2);
13616 tem = fold_ternary (code, type, op0, op1, op2);
13617 return tem ? tem : expr;
13627 tree op0 = TREE_OPERAND (t, 0);
13628 tree op1 = TREE_OPERAND (t, 1);
13630 if (TREE_CODE (op1) == INTEGER_CST
13631 && TREE_CODE (op0) == CONSTRUCTOR
13632 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13634 VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
13635 unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
13636 unsigned HOST_WIDE_INT begin = 0;
13638 /* Find a matching index by means of a binary search. */
13639 while (begin != end)
13641 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13642 tree index = VEC_index (constructor_elt, elts, middle)->index;
13644 if (TREE_CODE (index) == INTEGER_CST
13645 && tree_int_cst_lt (index, op1))
13646 begin = middle + 1;
13647 else if (TREE_CODE (index) == INTEGER_CST
13648 && tree_int_cst_lt (op1, index))
13650 else if (TREE_CODE (index) == RANGE_EXPR
13651 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13652 begin = middle + 1;
13653 else if (TREE_CODE (index) == RANGE_EXPR
13654 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13657 return VEC_index (constructor_elt, elts, middle)->value;
13665 return fold (DECL_INITIAL (t));
13669 } /* switch (code) */
13672 #ifdef ENABLE_FOLD_CHECKING
13675 static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
13676 static void fold_check_failed (const_tree, const_tree);
13677 void print_fold_checksum (const_tree);
13679 /* When --enable-checking=fold, compute a digest of expr before
13680 and after actual fold call to see if fold did not accidentally
13681 change original expr. */
13687 struct md5_ctx ctx;
13688 unsigned char checksum_before[16], checksum_after[16];
13691 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13692 md5_init_ctx (&ctx);
13693 fold_checksum_tree (expr, &ctx, ht);
13694 md5_finish_ctx (&ctx, checksum_before);
13697 ret = fold_1 (expr);
13699 md5_init_ctx (&ctx);
13700 fold_checksum_tree (expr, &ctx, ht);
13701 md5_finish_ctx (&ctx, checksum_after);
13704 if (memcmp (checksum_before, checksum_after, 16))
13705 fold_check_failed (expr, ret);
13711 print_fold_checksum (const_tree expr)
13713 struct md5_ctx ctx;
13714 unsigned char checksum[16], cnt;
13717 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13718 md5_init_ctx (&ctx);
13719 fold_checksum_tree (expr, &ctx, ht);
13720 md5_finish_ctx (&ctx, checksum);
13722 for (cnt = 0; cnt < 16; ++cnt)
13723 fprintf (stderr, "%02x", checksum[cnt]);
13724 putc ('\n', stderr);
13728 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
13730 internal_error ("fold check: original tree changed by fold");
13734 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
13737 enum tree_code code;
13738 union tree_node buf;
13743 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
13744 <= sizeof (struct tree_function_decl))
13745 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
13748 slot = (const void **) htab_find_slot (ht, expr, INSERT);
13752 code = TREE_CODE (expr);
13753 if (TREE_CODE_CLASS (code) == tcc_declaration
13754 && DECL_ASSEMBLER_NAME_SET_P (expr))
13756 /* Allow DECL_ASSEMBLER_NAME to be modified. */
13757 memcpy ((char *) &buf, expr, tree_size (expr));
13758 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
13759 expr = (tree) &buf;
13761 else if (TREE_CODE_CLASS (code) == tcc_type
13762 && (TYPE_POINTER_TO (expr)
13763 || TYPE_REFERENCE_TO (expr)
13764 || TYPE_CACHED_VALUES_P (expr)
13765 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
13766 || TYPE_NEXT_VARIANT (expr)))
13768 /* Allow these fields to be modified. */
13770 memcpy ((char *) &buf, expr, tree_size (expr));
13771 expr = tmp = (tree) &buf;
13772 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
13773 TYPE_POINTER_TO (tmp) = NULL;
13774 TYPE_REFERENCE_TO (tmp) = NULL;
13775 TYPE_NEXT_VARIANT (tmp) = NULL;
13776 if (TYPE_CACHED_VALUES_P (tmp))
13778 TYPE_CACHED_VALUES_P (tmp) = 0;
13779 TYPE_CACHED_VALUES (tmp) = NULL;
13782 md5_process_bytes (expr, tree_size (expr), ctx);
13783 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
13784 if (TREE_CODE_CLASS (code) != tcc_type
13785 && TREE_CODE_CLASS (code) != tcc_declaration
13786 && code != TREE_LIST
13787 && code != SSA_NAME)
13788 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
13789 switch (TREE_CODE_CLASS (code))
13795 md5_process_bytes (TREE_STRING_POINTER (expr),
13796 TREE_STRING_LENGTH (expr), ctx);
13799 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
13800 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
13803 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
13809 case tcc_exceptional:
13813 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
13814 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
13815 expr = TREE_CHAIN (expr);
13816 goto recursive_label;
13819 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
13820 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
13826 case tcc_expression:
13827 case tcc_reference:
13828 case tcc_comparison:
13831 case tcc_statement:
13833 len = TREE_OPERAND_LENGTH (expr);
13834 for (i = 0; i < len; ++i)
13835 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
13837 case tcc_declaration:
13838 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
13839 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
13840 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
13842 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
13843 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
13844 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
13845 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
13846 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
13848 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
13849 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
13851 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
13853 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
13854 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
13855 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
13859 if (TREE_CODE (expr) == ENUMERAL_TYPE)
13860 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
13861 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
13862 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
13863 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
13864 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
13865 if (INTEGRAL_TYPE_P (expr)
13866 || SCALAR_FLOAT_TYPE_P (expr))
13868 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
13869 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
13871 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
13872 if (TREE_CODE (expr) == RECORD_TYPE
13873 || TREE_CODE (expr) == UNION_TYPE
13874 || TREE_CODE (expr) == QUAL_UNION_TYPE)
13875 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
13876 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
13883 /* Helper function for outputting the checksum of a tree T. When
13884 debugging with gdb, you can "define mynext" to be "next" followed
13885 by "call debug_fold_checksum (op0)", then just trace down till the
13889 debug_fold_checksum (const_tree t)
13892 unsigned char checksum[16];
13893 struct md5_ctx ctx;
13894 htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13896 md5_init_ctx (&ctx);
13897 fold_checksum_tree (t, &ctx, ht);
13898 md5_finish_ctx (&ctx, checksum);
13901 for (i = 0; i < 16; i++)
13902 fprintf (stderr, "%d ", checksum[i]);
13904 fprintf (stderr, "\n");
13909 /* Fold a unary tree expression with code CODE of type TYPE with an
13910 operand OP0. Return a folded expression if successful. Otherwise,
13911 return a tree expression with code CODE of type TYPE with an
13915 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
13918 #ifdef ENABLE_FOLD_CHECKING
13919 unsigned char checksum_before[16], checksum_after[16];
13920 struct md5_ctx ctx;
13923 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13924 md5_init_ctx (&ctx);
13925 fold_checksum_tree (op0, &ctx, ht);
13926 md5_finish_ctx (&ctx, checksum_before);
13930 tem = fold_unary (code, type, op0);
13932 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
13934 #ifdef ENABLE_FOLD_CHECKING
13935 md5_init_ctx (&ctx);
13936 fold_checksum_tree (op0, &ctx, ht);
13937 md5_finish_ctx (&ctx, checksum_after);
13940 if (memcmp (checksum_before, checksum_after, 16))
13941 fold_check_failed (op0, tem);
13946 /* Fold a binary tree expression with code CODE of type TYPE with
13947 operands OP0 and OP1. Return a folded expression if successful.
13948 Otherwise, return a tree expression with code CODE of type TYPE
13949 with operands OP0 and OP1. */
13952 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
13956 #ifdef ENABLE_FOLD_CHECKING
13957 unsigned char checksum_before_op0[16],
13958 checksum_before_op1[16],
13959 checksum_after_op0[16],
13960 checksum_after_op1[16];
13961 struct md5_ctx ctx;
13964 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
13965 md5_init_ctx (&ctx);
13966 fold_checksum_tree (op0, &ctx, ht);
13967 md5_finish_ctx (&ctx, checksum_before_op0);
13970 md5_init_ctx (&ctx);
13971 fold_checksum_tree (op1, &ctx, ht);
13972 md5_finish_ctx (&ctx, checksum_before_op1);
13976 tem = fold_binary (code, type, op0, op1);
13978 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
13980 #ifdef ENABLE_FOLD_CHECKING
13981 md5_init_ctx (&ctx);
13982 fold_checksum_tree (op0, &ctx, ht);
13983 md5_finish_ctx (&ctx, checksum_after_op0);
13986 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
13987 fold_check_failed (op0, tem);
13989 md5_init_ctx (&ctx);
13990 fold_checksum_tree (op1, &ctx, ht);
13991 md5_finish_ctx (&ctx, checksum_after_op1);
13994 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
13995 fold_check_failed (op1, tem);
14000 /* Fold a ternary tree expression with code CODE of type TYPE with
14001 operands OP0, OP1, and OP2. Return a folded expression if
14002 successful. Otherwise, return a tree expression with code CODE of
14003 type TYPE with operands OP0, OP1, and OP2. */
14006 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
14010 #ifdef ENABLE_FOLD_CHECKING
14011 unsigned char checksum_before_op0[16],
14012 checksum_before_op1[16],
14013 checksum_before_op2[16],
14014 checksum_after_op0[16],
14015 checksum_after_op1[16],
14016 checksum_after_op2[16];
14017 struct md5_ctx ctx;
14020 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14021 md5_init_ctx (&ctx);
14022 fold_checksum_tree (op0, &ctx, ht);
14023 md5_finish_ctx (&ctx, checksum_before_op0);
14026 md5_init_ctx (&ctx);
14027 fold_checksum_tree (op1, &ctx, ht);
14028 md5_finish_ctx (&ctx, checksum_before_op1);
14031 md5_init_ctx (&ctx);
14032 fold_checksum_tree (op2, &ctx, ht);
14033 md5_finish_ctx (&ctx, checksum_before_op2);
14037 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14038 tem = fold_ternary (code, type, op0, op1, op2);
14040 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14042 #ifdef ENABLE_FOLD_CHECKING
14043 md5_init_ctx (&ctx);
14044 fold_checksum_tree (op0, &ctx, ht);
14045 md5_finish_ctx (&ctx, checksum_after_op0);
14048 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14049 fold_check_failed (op0, tem);
14051 md5_init_ctx (&ctx);
14052 fold_checksum_tree (op1, &ctx, ht);
14053 md5_finish_ctx (&ctx, checksum_after_op1);
14056 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14057 fold_check_failed (op1, tem);
14059 md5_init_ctx (&ctx);
14060 fold_checksum_tree (op2, &ctx, ht);
14061 md5_finish_ctx (&ctx, checksum_after_op2);
14064 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14065 fold_check_failed (op2, tem);
14070 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14071 arguments in ARGARRAY, and a null static chain.
14072 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14073 of type TYPE from the given operands as constructed by build_call_array. */
14076 fold_build_call_array (tree type, tree fn, int nargs, tree *argarray)
14079 #ifdef ENABLE_FOLD_CHECKING
14080 unsigned char checksum_before_fn[16],
14081 checksum_before_arglist[16],
14082 checksum_after_fn[16],
14083 checksum_after_arglist[16];
14084 struct md5_ctx ctx;
14088 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14089 md5_init_ctx (&ctx);
14090 fold_checksum_tree (fn, &ctx, ht);
14091 md5_finish_ctx (&ctx, checksum_before_fn);
14094 md5_init_ctx (&ctx);
14095 for (i = 0; i < nargs; i++)
14096 fold_checksum_tree (argarray[i], &ctx, ht);
14097 md5_finish_ctx (&ctx, checksum_before_arglist);
14101 tem = fold_builtin_call_array (type, fn, nargs, argarray);
14103 #ifdef ENABLE_FOLD_CHECKING
14104 md5_init_ctx (&ctx);
14105 fold_checksum_tree (fn, &ctx, ht);
14106 md5_finish_ctx (&ctx, checksum_after_fn);
14109 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14110 fold_check_failed (fn, tem);
14112 md5_init_ctx (&ctx);
14113 for (i = 0; i < nargs; i++)
14114 fold_checksum_tree (argarray[i], &ctx, ht);
14115 md5_finish_ctx (&ctx, checksum_after_arglist);
14118 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14119 fold_check_failed (NULL_TREE, tem);
14124 /* Perform constant folding and related simplification of initializer
14125 expression EXPR. These behave identically to "fold_buildN" but ignore
14126 potential run-time traps and exceptions that fold must preserve. */
14128 #define START_FOLD_INIT \
14129 int saved_signaling_nans = flag_signaling_nans;\
14130 int saved_trapping_math = flag_trapping_math;\
14131 int saved_rounding_math = flag_rounding_math;\
14132 int saved_trapv = flag_trapv;\
14133 int saved_folding_initializer = folding_initializer;\
14134 flag_signaling_nans = 0;\
14135 flag_trapping_math = 0;\
14136 flag_rounding_math = 0;\
14138 folding_initializer = 1;
14140 #define END_FOLD_INIT \
14141 flag_signaling_nans = saved_signaling_nans;\
14142 flag_trapping_math = saved_trapping_math;\
14143 flag_rounding_math = saved_rounding_math;\
14144 flag_trapv = saved_trapv;\
14145 folding_initializer = saved_folding_initializer;
14148 fold_build1_initializer (enum tree_code code, tree type, tree op)
14153 result = fold_build1 (code, type, op);
14160 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
14165 result = fold_build2 (code, type, op0, op1);
14172 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
14178 result = fold_build3 (code, type, op0, op1, op2);
14185 fold_build_call_array_initializer (tree type, tree fn,
14186 int nargs, tree *argarray)
14191 result = fold_build_call_array (type, fn, nargs, argarray);
14197 #undef START_FOLD_INIT
14198 #undef END_FOLD_INIT
14200 /* Determine if first argument is a multiple of second argument. Return 0 if
14201 it is not, or we cannot easily determined it to be.
14203 An example of the sort of thing we care about (at this point; this routine
14204 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14205 fold cases do now) is discovering that
14207 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14213 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14215 This code also handles discovering that
14217 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14219 is a multiple of 8 so we don't have to worry about dealing with a
14220 possible remainder.
14222 Note that we *look* inside a SAVE_EXPR only to determine how it was
14223 calculated; it is not safe for fold to do much of anything else with the
14224 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14225 at run time. For example, the latter example above *cannot* be implemented
14226 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14227 evaluation time of the original SAVE_EXPR is not necessarily the same at
14228 the time the new expression is evaluated. The only optimization of this
14229 sort that would be valid is changing
14231 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14235 SAVE_EXPR (I) * SAVE_EXPR (J)
14237 (where the same SAVE_EXPR (J) is used in the original and the
14238 transformed version). */
14241 multiple_of_p (tree type, const_tree top, const_tree bottom)
14243 if (operand_equal_p (top, bottom, 0))
14246 if (TREE_CODE (type) != INTEGER_TYPE)
14249 switch (TREE_CODE (top))
14252 /* Bitwise and provides a power of two multiple. If the mask is
14253 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14254 if (!integer_pow2p (bottom))
14259 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14260 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14264 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14265 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14268 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14272 op1 = TREE_OPERAND (top, 1);
14273 /* const_binop may not detect overflow correctly,
14274 so check for it explicitly here. */
14275 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14276 > TREE_INT_CST_LOW (op1)
14277 && TREE_INT_CST_HIGH (op1) == 0
14278 && 0 != (t1 = fold_convert (type,
14279 const_binop (LSHIFT_EXPR,
14282 && !TREE_OVERFLOW (t1))
14283 return multiple_of_p (type, t1, bottom);
14288 /* Can't handle conversions from non-integral or wider integral type. */
14289 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14290 || (TYPE_PRECISION (type)
14291 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14294 /* .. fall through ... */
14297 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14300 if (TREE_CODE (bottom) != INTEGER_CST
14301 || integer_zerop (bottom)
14302 || (TYPE_UNSIGNED (type)
14303 && (tree_int_cst_sgn (top) < 0
14304 || tree_int_cst_sgn (bottom) < 0)))
14306 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14314 /* Return true if CODE or TYPE is known to be non-negative. */
14317 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14319 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14320 && truth_value_p (code))
14321 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14322 have a signed:1 type (where the value is -1 and 0). */
14327 /* Return true if (CODE OP0) is known to be non-negative. If the return
14328 value is based on the assumption that signed overflow is undefined,
14329 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14330 *STRICT_OVERFLOW_P. */
14333 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14334 bool *strict_overflow_p)
14336 if (TYPE_UNSIGNED (type))
14342 /* We can't return 1 if flag_wrapv is set because
14343 ABS_EXPR<INT_MIN> = INT_MIN. */
14344 if (!INTEGRAL_TYPE_P (type))
14346 if (TYPE_OVERFLOW_UNDEFINED (type))
14348 *strict_overflow_p = true;
14353 case NON_LVALUE_EXPR:
14355 case FIX_TRUNC_EXPR:
14356 return tree_expr_nonnegative_warnv_p (op0,
14357 strict_overflow_p);
14361 tree inner_type = TREE_TYPE (op0);
14362 tree outer_type = type;
14364 if (TREE_CODE (outer_type) == REAL_TYPE)
14366 if (TREE_CODE (inner_type) == REAL_TYPE)
14367 return tree_expr_nonnegative_warnv_p (op0,
14368 strict_overflow_p);
14369 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14371 if (TYPE_UNSIGNED (inner_type))
14373 return tree_expr_nonnegative_warnv_p (op0,
14374 strict_overflow_p);
14377 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14379 if (TREE_CODE (inner_type) == REAL_TYPE)
14380 return tree_expr_nonnegative_warnv_p (op0,
14381 strict_overflow_p);
14382 if (TREE_CODE (inner_type) == INTEGER_TYPE)
14383 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14384 && TYPE_UNSIGNED (inner_type);
14390 return tree_simple_nonnegative_warnv_p (code, type);
14393 /* We don't know sign of `t', so be conservative and return false. */
14397 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14398 value is based on the assumption that signed overflow is undefined,
14399 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14400 *STRICT_OVERFLOW_P. */
14403 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14404 tree op1, bool *strict_overflow_p)
14406 if (TYPE_UNSIGNED (type))
14411 case POINTER_PLUS_EXPR:
14413 if (FLOAT_TYPE_P (type))
14414 return (tree_expr_nonnegative_warnv_p (op0,
14416 && tree_expr_nonnegative_warnv_p (op1,
14417 strict_overflow_p));
14419 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14420 both unsigned and at least 2 bits shorter than the result. */
14421 if (TREE_CODE (type) == INTEGER_TYPE
14422 && TREE_CODE (op0) == NOP_EXPR
14423 && TREE_CODE (op1) == NOP_EXPR)
14425 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14426 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14427 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14428 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14430 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14431 TYPE_PRECISION (inner2)) + 1;
14432 return prec < TYPE_PRECISION (type);
14438 if (FLOAT_TYPE_P (type))
14440 /* x * x for floating point x is always non-negative. */
14441 if (operand_equal_p (op0, op1, 0))
14443 return (tree_expr_nonnegative_warnv_p (op0,
14445 && tree_expr_nonnegative_warnv_p (op1,
14446 strict_overflow_p));
14449 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14450 both unsigned and their total bits is shorter than the result. */
14451 if (TREE_CODE (type) == INTEGER_TYPE
14452 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14453 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14455 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14456 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14458 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14459 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14462 bool unsigned0 = TYPE_UNSIGNED (inner0);
14463 bool unsigned1 = TYPE_UNSIGNED (inner1);
14465 if (TREE_CODE (op0) == INTEGER_CST)
14466 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14468 if (TREE_CODE (op1) == INTEGER_CST)
14469 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14471 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14472 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14474 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14475 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
14476 : TYPE_PRECISION (inner0);
14478 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14479 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
14480 : TYPE_PRECISION (inner1);
14482 return precision0 + precision1 < TYPE_PRECISION (type);
14489 return (tree_expr_nonnegative_warnv_p (op0,
14491 || tree_expr_nonnegative_warnv_p (op1,
14492 strict_overflow_p));
14498 case TRUNC_DIV_EXPR:
14499 case CEIL_DIV_EXPR:
14500 case FLOOR_DIV_EXPR:
14501 case ROUND_DIV_EXPR:
14502 return (tree_expr_nonnegative_warnv_p (op0,
14504 && tree_expr_nonnegative_warnv_p (op1,
14505 strict_overflow_p));
14507 case TRUNC_MOD_EXPR:
14508 case CEIL_MOD_EXPR:
14509 case FLOOR_MOD_EXPR:
14510 case ROUND_MOD_EXPR:
14511 return tree_expr_nonnegative_warnv_p (op0,
14512 strict_overflow_p);
14514 return tree_simple_nonnegative_warnv_p (code, type);
14517 /* We don't know sign of `t', so be conservative and return false. */
14521 /* Return true if T is known to be non-negative. If the return
14522 value is based on the assumption that signed overflow is undefined,
14523 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14524 *STRICT_OVERFLOW_P. */
14527 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14529 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14532 switch (TREE_CODE (t))
14535 return tree_int_cst_sgn (t) >= 0;
14538 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14541 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14544 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14546 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14547 strict_overflow_p));
14549 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14552 /* We don't know sign of `t', so be conservative and return false. */
14556 /* Return true if T is known to be non-negative. If the return
14557 value is based on the assumption that signed overflow is undefined,
14558 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14559 *STRICT_OVERFLOW_P. */
14562 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14563 tree arg0, tree arg1, bool *strict_overflow_p)
14565 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14566 switch (DECL_FUNCTION_CODE (fndecl))
14568 CASE_FLT_FN (BUILT_IN_ACOS):
14569 CASE_FLT_FN (BUILT_IN_ACOSH):
14570 CASE_FLT_FN (BUILT_IN_CABS):
14571 CASE_FLT_FN (BUILT_IN_COSH):
14572 CASE_FLT_FN (BUILT_IN_ERFC):
14573 CASE_FLT_FN (BUILT_IN_EXP):
14574 CASE_FLT_FN (BUILT_IN_EXP10):
14575 CASE_FLT_FN (BUILT_IN_EXP2):
14576 CASE_FLT_FN (BUILT_IN_FABS):
14577 CASE_FLT_FN (BUILT_IN_FDIM):
14578 CASE_FLT_FN (BUILT_IN_HYPOT):
14579 CASE_FLT_FN (BUILT_IN_POW10):
14580 CASE_INT_FN (BUILT_IN_FFS):
14581 CASE_INT_FN (BUILT_IN_PARITY):
14582 CASE_INT_FN (BUILT_IN_POPCOUNT):
14583 case BUILT_IN_BSWAP32:
14584 case BUILT_IN_BSWAP64:
14588 CASE_FLT_FN (BUILT_IN_SQRT):
14589 /* sqrt(-0.0) is -0.0. */
14590 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
14592 return tree_expr_nonnegative_warnv_p (arg0,
14593 strict_overflow_p);
14595 CASE_FLT_FN (BUILT_IN_ASINH):
14596 CASE_FLT_FN (BUILT_IN_ATAN):
14597 CASE_FLT_FN (BUILT_IN_ATANH):
14598 CASE_FLT_FN (BUILT_IN_CBRT):
14599 CASE_FLT_FN (BUILT_IN_CEIL):
14600 CASE_FLT_FN (BUILT_IN_ERF):
14601 CASE_FLT_FN (BUILT_IN_EXPM1):
14602 CASE_FLT_FN (BUILT_IN_FLOOR):
14603 CASE_FLT_FN (BUILT_IN_FMOD):
14604 CASE_FLT_FN (BUILT_IN_FREXP):
14605 CASE_FLT_FN (BUILT_IN_LCEIL):
14606 CASE_FLT_FN (BUILT_IN_LDEXP):
14607 CASE_FLT_FN (BUILT_IN_LFLOOR):
14608 CASE_FLT_FN (BUILT_IN_LLCEIL):
14609 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14610 CASE_FLT_FN (BUILT_IN_LLRINT):
14611 CASE_FLT_FN (BUILT_IN_LLROUND):
14612 CASE_FLT_FN (BUILT_IN_LRINT):
14613 CASE_FLT_FN (BUILT_IN_LROUND):
14614 CASE_FLT_FN (BUILT_IN_MODF):
14615 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14616 CASE_FLT_FN (BUILT_IN_RINT):
14617 CASE_FLT_FN (BUILT_IN_ROUND):
14618 CASE_FLT_FN (BUILT_IN_SCALB):
14619 CASE_FLT_FN (BUILT_IN_SCALBLN):
14620 CASE_FLT_FN (BUILT_IN_SCALBN):
14621 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14622 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14623 CASE_FLT_FN (BUILT_IN_SINH):
14624 CASE_FLT_FN (BUILT_IN_TANH):
14625 CASE_FLT_FN (BUILT_IN_TRUNC):
14626 /* True if the 1st argument is nonnegative. */
14627 return tree_expr_nonnegative_warnv_p (arg0,
14628 strict_overflow_p);
14630 CASE_FLT_FN (BUILT_IN_FMAX):
14631 /* True if the 1st OR 2nd arguments are nonnegative. */
14632 return (tree_expr_nonnegative_warnv_p (arg0,
14634 || (tree_expr_nonnegative_warnv_p (arg1,
14635 strict_overflow_p)));
14637 CASE_FLT_FN (BUILT_IN_FMIN):
14638 /* True if the 1st AND 2nd arguments are nonnegative. */
14639 return (tree_expr_nonnegative_warnv_p (arg0,
14641 && (tree_expr_nonnegative_warnv_p (arg1,
14642 strict_overflow_p)));
14644 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14645 /* True if the 2nd argument is nonnegative. */
14646 return tree_expr_nonnegative_warnv_p (arg1,
14647 strict_overflow_p);
14649 CASE_FLT_FN (BUILT_IN_POWI):
14650 /* True if the 1st argument is nonnegative or the second
14651 argument is an even integer. */
14652 if (TREE_CODE (arg1) == INTEGER_CST
14653 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14655 return tree_expr_nonnegative_warnv_p (arg0,
14656 strict_overflow_p);
14658 CASE_FLT_FN (BUILT_IN_POW):
14659 /* True if the 1st argument is nonnegative or the second
14660 argument is an even integer valued real. */
14661 if (TREE_CODE (arg1) == REAL_CST)
14666 c = TREE_REAL_CST (arg1);
14667 n = real_to_integer (&c);
14670 REAL_VALUE_TYPE cint;
14671 real_from_integer (&cint, VOIDmode, n,
14672 n < 0 ? -1 : 0, 0);
14673 if (real_identical (&c, &cint))
14677 return tree_expr_nonnegative_warnv_p (arg0,
14678 strict_overflow_p);
14683 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14687 /* Return true if T is known to be non-negative. If the return
14688 value is based on the assumption that signed overflow is undefined,
14689 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14690 *STRICT_OVERFLOW_P. */
14693 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14695 enum tree_code code = TREE_CODE (t);
14696 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14703 tree temp = TARGET_EXPR_SLOT (t);
14704 t = TARGET_EXPR_INITIAL (t);
14706 /* If the initializer is non-void, then it's a normal expression
14707 that will be assigned to the slot. */
14708 if (!VOID_TYPE_P (t))
14709 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
14711 /* Otherwise, the initializer sets the slot in some way. One common
14712 way is an assignment statement at the end of the initializer. */
14715 if (TREE_CODE (t) == BIND_EXPR)
14716 t = expr_last (BIND_EXPR_BODY (t));
14717 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
14718 || TREE_CODE (t) == TRY_CATCH_EXPR)
14719 t = expr_last (TREE_OPERAND (t, 0));
14720 else if (TREE_CODE (t) == STATEMENT_LIST)
14725 if (TREE_CODE (t) == MODIFY_EXPR
14726 && TREE_OPERAND (t, 0) == temp)
14727 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14728 strict_overflow_p);
14735 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
14736 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
14738 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
14739 get_callee_fndecl (t),
14742 strict_overflow_p);
14744 case COMPOUND_EXPR:
14746 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14747 strict_overflow_p);
14749 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
14750 strict_overflow_p);
14752 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
14753 strict_overflow_p);
14756 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14760 /* We don't know sign of `t', so be conservative and return false. */
14764 /* Return true if T is known to be non-negative. If the return
14765 value is based on the assumption that signed overflow is undefined,
14766 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14767 *STRICT_OVERFLOW_P. */
14770 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14772 enum tree_code code;
14773 if (t == error_mark_node)
14776 code = TREE_CODE (t);
14777 switch (TREE_CODE_CLASS (code))
14780 case tcc_comparison:
14781 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14783 TREE_OPERAND (t, 0),
14784 TREE_OPERAND (t, 1),
14785 strict_overflow_p);
14788 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14790 TREE_OPERAND (t, 0),
14791 strict_overflow_p);
14794 case tcc_declaration:
14795 case tcc_reference:
14796 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14804 case TRUTH_AND_EXPR:
14805 case TRUTH_OR_EXPR:
14806 case TRUTH_XOR_EXPR:
14807 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
14809 TREE_OPERAND (t, 0),
14810 TREE_OPERAND (t, 1),
14811 strict_overflow_p);
14812 case TRUTH_NOT_EXPR:
14813 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
14815 TREE_OPERAND (t, 0),
14816 strict_overflow_p);
14823 case WITH_SIZE_EXPR:
14827 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
14830 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
14834 /* Return true if `t' is known to be non-negative. Handle warnings
14835 about undefined signed overflow. */
14838 tree_expr_nonnegative_p (tree t)
14840 bool ret, strict_overflow_p;
14842 strict_overflow_p = false;
14843 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
14844 if (strict_overflow_p)
14845 fold_overflow_warning (("assuming signed overflow does not occur when "
14846 "determining that expression is always "
14848 WARN_STRICT_OVERFLOW_MISC);
14853 /* Return true when (CODE OP0) is an address and is known to be nonzero.
14854 For floating point we further ensure that T is not denormal.
14855 Similar logic is present in nonzero_address in rtlanal.h.
14857 If the return value is based on the assumption that signed overflow
14858 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14859 change *STRICT_OVERFLOW_P. */
14862 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
14863 bool *strict_overflow_p)
14868 return tree_expr_nonzero_warnv_p (op0,
14869 strict_overflow_p);
14873 tree inner_type = TREE_TYPE (op0);
14874 tree outer_type = type;
14876 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
14877 && tree_expr_nonzero_warnv_p (op0,
14878 strict_overflow_p));
14882 case NON_LVALUE_EXPR:
14883 return tree_expr_nonzero_warnv_p (op0,
14884 strict_overflow_p);
14893 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
14894 For floating point we further ensure that T is not denormal.
14895 Similar logic is present in nonzero_address in rtlanal.h.
14897 If the return value is based on the assumption that signed overflow
14898 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
14899 change *STRICT_OVERFLOW_P. */
14902 tree_binary_nonzero_warnv_p (enum tree_code code,
14905 tree op1, bool *strict_overflow_p)
14907 bool sub_strict_overflow_p;
14910 case POINTER_PLUS_EXPR:
14912 if (TYPE_OVERFLOW_UNDEFINED (type))
14914 /* With the presence of negative values it is hard
14915 to say something. */
14916 sub_strict_overflow_p = false;
14917 if (!tree_expr_nonnegative_warnv_p (op0,
14918 &sub_strict_overflow_p)
14919 || !tree_expr_nonnegative_warnv_p (op1,
14920 &sub_strict_overflow_p))
14922 /* One of operands must be positive and the other non-negative. */
14923 /* We don't set *STRICT_OVERFLOW_P here: even if this value
14924 overflows, on a twos-complement machine the sum of two
14925 nonnegative numbers can never be zero. */
14926 return (tree_expr_nonzero_warnv_p (op0,
14928 || tree_expr_nonzero_warnv_p (op1,
14929 strict_overflow_p));
14934 if (TYPE_OVERFLOW_UNDEFINED (type))
14936 if (tree_expr_nonzero_warnv_p (op0,
14938 && tree_expr_nonzero_warnv_p (op1,
14939 strict_overflow_p))
14941 *strict_overflow_p = true;
14948 sub_strict_overflow_p = false;
14949 if (tree_expr_nonzero_warnv_p (op0,
14950 &sub_strict_overflow_p)
14951 && tree_expr_nonzero_warnv_p (op1,
14952 &sub_strict_overflow_p))
14954 if (sub_strict_overflow_p)
14955 *strict_overflow_p = true;
14960 sub_strict_overflow_p = false;
14961 if (tree_expr_nonzero_warnv_p (op0,
14962 &sub_strict_overflow_p))
14964 if (sub_strict_overflow_p)
14965 *strict_overflow_p = true;
14967 /* When both operands are nonzero, then MAX must be too. */
14968 if (tree_expr_nonzero_warnv_p (op1,
14969 strict_overflow_p))
14972 /* MAX where operand 0 is positive is positive. */
14973 return tree_expr_nonnegative_warnv_p (op0,
14974 strict_overflow_p);
14976 /* MAX where operand 1 is positive is positive. */
14977 else if (tree_expr_nonzero_warnv_p (op1,
14978 &sub_strict_overflow_p)
14979 && tree_expr_nonnegative_warnv_p (op1,
14980 &sub_strict_overflow_p))
14982 if (sub_strict_overflow_p)
14983 *strict_overflow_p = true;
14989 return (tree_expr_nonzero_warnv_p (op1,
14991 || tree_expr_nonzero_warnv_p (op0,
14992 strict_overflow_p));
15001 /* Return true when T is an address and is known to be nonzero.
15002 For floating point we further ensure that T is not denormal.
15003 Similar logic is present in nonzero_address in rtlanal.h.
15005 If the return value is based on the assumption that signed overflow
15006 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15007 change *STRICT_OVERFLOW_P. */
15010 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15012 bool sub_strict_overflow_p;
15013 switch (TREE_CODE (t))
15016 return !integer_zerop (t);
15020 tree base = get_base_address (TREE_OPERAND (t, 0));
15025 /* Weak declarations may link to NULL. Other things may also be NULL
15026 so protect with -fdelete-null-pointer-checks; but not variables
15027 allocated on the stack. */
15029 && (flag_delete_null_pointer_checks
15030 || (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15031 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15033 /* Constants are never weak. */
15034 if (CONSTANT_CLASS_P (base))
15041 sub_strict_overflow_p = false;
15042 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15043 &sub_strict_overflow_p)
15044 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15045 &sub_strict_overflow_p))
15047 if (sub_strict_overflow_p)
15048 *strict_overflow_p = true;
15059 /* Return true when T is an address and is known to be nonzero.
15060 For floating point we further ensure that T is not denormal.
15061 Similar logic is present in nonzero_address in rtlanal.h.
15063 If the return value is based on the assumption that signed overflow
15064 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15065 change *STRICT_OVERFLOW_P. */
15068 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15070 tree type = TREE_TYPE (t);
15071 enum tree_code code;
15073 /* Doing something useful for floating point would need more work. */
15074 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15077 code = TREE_CODE (t);
15078 switch (TREE_CODE_CLASS (code))
15081 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15082 strict_overflow_p);
15084 case tcc_comparison:
15085 return tree_binary_nonzero_warnv_p (code, type,
15086 TREE_OPERAND (t, 0),
15087 TREE_OPERAND (t, 1),
15088 strict_overflow_p);
15090 case tcc_declaration:
15091 case tcc_reference:
15092 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15100 case TRUTH_NOT_EXPR:
15101 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15102 strict_overflow_p);
15104 case TRUTH_AND_EXPR:
15105 case TRUTH_OR_EXPR:
15106 case TRUTH_XOR_EXPR:
15107 return tree_binary_nonzero_warnv_p (code, type,
15108 TREE_OPERAND (t, 0),
15109 TREE_OPERAND (t, 1),
15110 strict_overflow_p);
15117 case WITH_SIZE_EXPR:
15121 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15123 case COMPOUND_EXPR:
15126 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15127 strict_overflow_p);
15130 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15131 strict_overflow_p);
15134 return alloca_call_p (t);
15142 /* Return true when T is an address and is known to be nonzero.
15143 Handle warnings about undefined signed overflow. */
15146 tree_expr_nonzero_p (tree t)
15148 bool ret, strict_overflow_p;
15150 strict_overflow_p = false;
15151 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15152 if (strict_overflow_p)
15153 fold_overflow_warning (("assuming signed overflow does not occur when "
15154 "determining that expression is always "
15156 WARN_STRICT_OVERFLOW_MISC);
15160 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15161 attempt to fold the expression to a constant without modifying TYPE,
15164 If the expression could be simplified to a constant, then return
15165 the constant. If the expression would not be simplified to a
15166 constant, then return NULL_TREE. */
15169 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15171 tree tem = fold_binary (code, type, op0, op1);
15172 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15175 /* Given the components of a unary expression CODE, TYPE and OP0,
15176 attempt to fold the expression to a constant without modifying
15179 If the expression could be simplified to a constant, then return
15180 the constant. If the expression would not be simplified to a
15181 constant, then return NULL_TREE. */
15184 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15186 tree tem = fold_unary (code, type, op0);
15187 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15190 /* If EXP represents referencing an element in a constant string
15191 (either via pointer arithmetic or array indexing), return the
15192 tree representing the value accessed, otherwise return NULL. */
15195 fold_read_from_constant_string (tree exp)
15197 if ((TREE_CODE (exp) == INDIRECT_REF
15198 || TREE_CODE (exp) == ARRAY_REF)
15199 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15201 tree exp1 = TREE_OPERAND (exp, 0);
15205 if (TREE_CODE (exp) == INDIRECT_REF)
15206 string = string_constant (exp1, &index);
15209 tree low_bound = array_ref_low_bound (exp);
15210 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
15212 /* Optimize the special-case of a zero lower bound.
15214 We convert the low_bound to sizetype to avoid some problems
15215 with constant folding. (E.g. suppose the lower bound is 1,
15216 and its mode is QI. Without the conversion,l (ARRAY
15217 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15218 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15219 if (! integer_zerop (low_bound))
15220 index = size_diffop (index, fold_convert (sizetype, low_bound));
15226 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15227 && TREE_CODE (string) == STRING_CST
15228 && TREE_CODE (index) == INTEGER_CST
15229 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15230 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15232 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15233 return build_int_cst_type (TREE_TYPE (exp),
15234 (TREE_STRING_POINTER (string)
15235 [TREE_INT_CST_LOW (index)]));
15240 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15241 an integer constant, real, or fixed-point constant.
15243 TYPE is the type of the result. */
15246 fold_negate_const (tree arg0, tree type)
15248 tree t = NULL_TREE;
15250 switch (TREE_CODE (arg0))
15254 unsigned HOST_WIDE_INT low;
15255 HOST_WIDE_INT high;
15256 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15257 TREE_INT_CST_HIGH (arg0),
15259 t = force_fit_type_double (type, low, high, 1,
15260 (overflow | TREE_OVERFLOW (arg0))
15261 && !TYPE_UNSIGNED (type));
15266 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15271 FIXED_VALUE_TYPE f;
15272 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15273 &(TREE_FIXED_CST (arg0)), NULL,
15274 TYPE_SATURATING (type));
15275 t = build_fixed (type, f);
15276 /* Propagate overflow flags. */
15277 if (overflow_p | TREE_OVERFLOW (arg0))
15278 TREE_OVERFLOW (t) = 1;
15283 gcc_unreachable ();
15289 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15290 an integer constant or real constant.
15292 TYPE is the type of the result. */
15295 fold_abs_const (tree arg0, tree type)
15297 tree t = NULL_TREE;
15299 switch (TREE_CODE (arg0))
15302 /* If the value is unsigned, then the absolute value is
15303 the same as the ordinary value. */
15304 if (TYPE_UNSIGNED (type))
15306 /* Similarly, if the value is non-negative. */
15307 else if (INT_CST_LT (integer_minus_one_node, arg0))
15309 /* If the value is negative, then the absolute value is
15313 unsigned HOST_WIDE_INT low;
15314 HOST_WIDE_INT high;
15315 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15316 TREE_INT_CST_HIGH (arg0),
15318 t = force_fit_type_double (type, low, high, -1,
15319 overflow | TREE_OVERFLOW (arg0));
15324 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15325 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15331 gcc_unreachable ();
15337 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15338 constant. TYPE is the type of the result. */
15341 fold_not_const (tree arg0, tree type)
15343 tree t = NULL_TREE;
15345 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15347 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15348 ~TREE_INT_CST_HIGH (arg0), 0,
15349 TREE_OVERFLOW (arg0));
15354 /* Given CODE, a relational operator, the target type, TYPE and two
15355 constant operands OP0 and OP1, return the result of the
15356 relational operation. If the result is not a compile time
15357 constant, then return NULL_TREE. */
15360 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15362 int result, invert;
15364 /* From here on, the only cases we handle are when the result is
15365 known to be a constant. */
15367 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15369 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15370 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15372 /* Handle the cases where either operand is a NaN. */
15373 if (real_isnan (c0) || real_isnan (c1))
15383 case UNORDERED_EXPR:
15397 if (flag_trapping_math)
15403 gcc_unreachable ();
15406 return constant_boolean_node (result, type);
15409 return constant_boolean_node (real_compare (code, c0, c1), type);
15412 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15414 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15415 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15416 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15419 /* Handle equality/inequality of complex constants. */
15420 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15422 tree rcond = fold_relational_const (code, type,
15423 TREE_REALPART (op0),
15424 TREE_REALPART (op1));
15425 tree icond = fold_relational_const (code, type,
15426 TREE_IMAGPART (op0),
15427 TREE_IMAGPART (op1));
15428 if (code == EQ_EXPR)
15429 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15430 else if (code == NE_EXPR)
15431 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15436 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15438 To compute GT, swap the arguments and do LT.
15439 To compute GE, do LT and invert the result.
15440 To compute LE, swap the arguments, do LT and invert the result.
15441 To compute NE, do EQ and invert the result.
15443 Therefore, the code below must handle only EQ and LT. */
15445 if (code == LE_EXPR || code == GT_EXPR)
15450 code = swap_tree_comparison (code);
15453 /* Note that it is safe to invert for real values here because we
15454 have already handled the one case that it matters. */
15457 if (code == NE_EXPR || code == GE_EXPR)
15460 code = invert_tree_comparison (code, false);
15463 /* Compute a result for LT or EQ if args permit;
15464 Otherwise return T. */
15465 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15467 if (code == EQ_EXPR)
15468 result = tree_int_cst_equal (op0, op1);
15469 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
15470 result = INT_CST_LT_UNSIGNED (op0, op1);
15472 result = INT_CST_LT (op0, op1);
15479 return constant_boolean_node (result, type);
15482 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15483 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15487 fold_build_cleanup_point_expr (tree type, tree expr)
15489 /* If the expression does not have side effects then we don't have to wrap
15490 it with a cleanup point expression. */
15491 if (!TREE_SIDE_EFFECTS (expr))
15494 /* If the expression is a return, check to see if the expression inside the
15495 return has no side effects or the right hand side of the modify expression
15496 inside the return. If either don't have side effects set we don't need to
15497 wrap the expression in a cleanup point expression. Note we don't check the
15498 left hand side of the modify because it should always be a return decl. */
15499 if (TREE_CODE (expr) == RETURN_EXPR)
15501 tree op = TREE_OPERAND (expr, 0);
15502 if (!op || !TREE_SIDE_EFFECTS (op))
15504 op = TREE_OPERAND (op, 1);
15505 if (!TREE_SIDE_EFFECTS (op))
15509 return build1 (CLEANUP_POINT_EXPR, type, expr);
15512 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15513 of an indirection through OP0, or NULL_TREE if no simplification is
15517 fold_indirect_ref_1 (tree type, tree op0)
15523 subtype = TREE_TYPE (sub);
15524 if (!POINTER_TYPE_P (subtype))
15527 if (TREE_CODE (sub) == ADDR_EXPR)
15529 tree op = TREE_OPERAND (sub, 0);
15530 tree optype = TREE_TYPE (op);
15531 /* *&CONST_DECL -> to the value of the const decl. */
15532 if (TREE_CODE (op) == CONST_DECL)
15533 return DECL_INITIAL (op);
15534 /* *&p => p; make sure to handle *&"str"[cst] here. */
15535 if (type == optype)
15537 tree fop = fold_read_from_constant_string (op);
15543 /* *(foo *)&fooarray => fooarray[0] */
15544 else if (TREE_CODE (optype) == ARRAY_TYPE
15545 && type == TREE_TYPE (optype))
15547 tree type_domain = TYPE_DOMAIN (optype);
15548 tree min_val = size_zero_node;
15549 if (type_domain && TYPE_MIN_VALUE (type_domain))
15550 min_val = TYPE_MIN_VALUE (type_domain);
15551 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
15553 /* *(foo *)&complexfoo => __real__ complexfoo */
15554 else if (TREE_CODE (optype) == COMPLEX_TYPE
15555 && type == TREE_TYPE (optype))
15556 return fold_build1 (REALPART_EXPR, type, op);
15557 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15558 else if (TREE_CODE (optype) == VECTOR_TYPE
15559 && type == TREE_TYPE (optype))
15561 tree part_width = TYPE_SIZE (type);
15562 tree index = bitsize_int (0);
15563 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
15567 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15568 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15569 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15571 tree op00 = TREE_OPERAND (sub, 0);
15572 tree op01 = TREE_OPERAND (sub, 1);
15576 op00type = TREE_TYPE (op00);
15577 if (TREE_CODE (op00) == ADDR_EXPR
15578 && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
15579 && type == TREE_TYPE (TREE_TYPE (op00type)))
15581 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
15582 tree part_width = TYPE_SIZE (type);
15583 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
15584 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15585 tree index = bitsize_int (indexi);
15587 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
15588 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
15589 part_width, index);
15595 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15596 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15597 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15599 tree op00 = TREE_OPERAND (sub, 0);
15600 tree op01 = TREE_OPERAND (sub, 1);
15604 op00type = TREE_TYPE (op00);
15605 if (TREE_CODE (op00) == ADDR_EXPR
15606 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
15607 && type == TREE_TYPE (TREE_TYPE (op00type)))
15609 tree size = TYPE_SIZE_UNIT (type);
15610 if (tree_int_cst_equal (size, op01))
15611 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
15615 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15616 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15617 && type == TREE_TYPE (TREE_TYPE (subtype)))
15620 tree min_val = size_zero_node;
15621 sub = build_fold_indirect_ref (sub);
15622 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15623 if (type_domain && TYPE_MIN_VALUE (type_domain))
15624 min_val = TYPE_MIN_VALUE (type_domain);
15625 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
15631 /* Builds an expression for an indirection through T, simplifying some
15635 build_fold_indirect_ref (tree t)
15637 tree type = TREE_TYPE (TREE_TYPE (t));
15638 tree sub = fold_indirect_ref_1 (type, t);
15643 return build1 (INDIRECT_REF, type, t);
15646 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15649 fold_indirect_ref (tree t)
15651 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
15659 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15660 whose result is ignored. The type of the returned tree need not be
15661 the same as the original expression. */
15664 fold_ignored_result (tree t)
15666 if (!TREE_SIDE_EFFECTS (t))
15667 return integer_zero_node;
15670 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15673 t = TREE_OPERAND (t, 0);
15677 case tcc_comparison:
15678 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15679 t = TREE_OPERAND (t, 0);
15680 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15681 t = TREE_OPERAND (t, 1);
15686 case tcc_expression:
15687 switch (TREE_CODE (t))
15689 case COMPOUND_EXPR:
15690 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15692 t = TREE_OPERAND (t, 0);
15696 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15697 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15699 t = TREE_OPERAND (t, 0);
15712 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
15713 This can only be applied to objects of a sizetype. */
15716 round_up (tree value, int divisor)
15718 tree div = NULL_TREE;
15720 gcc_assert (divisor > 0);
15724 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15725 have to do anything. Only do this when we are not given a const,
15726 because in that case, this check is more expensive than just
15728 if (TREE_CODE (value) != INTEGER_CST)
15730 div = build_int_cst (TREE_TYPE (value), divisor);
15732 if (multiple_of_p (TREE_TYPE (value), value, div))
15736 /* If divisor is a power of two, simplify this to bit manipulation. */
15737 if (divisor == (divisor & -divisor))
15739 if (TREE_CODE (value) == INTEGER_CST)
15741 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
15742 unsigned HOST_WIDE_INT high;
15745 if ((low & (divisor - 1)) == 0)
15748 overflow_p = TREE_OVERFLOW (value);
15749 high = TREE_INT_CST_HIGH (value);
15750 low &= ~(divisor - 1);
15759 return force_fit_type_double (TREE_TYPE (value), low, high,
15766 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15767 value = size_binop (PLUS_EXPR, value, t);
15768 t = build_int_cst (TREE_TYPE (value), -divisor);
15769 value = size_binop (BIT_AND_EXPR, value, t);
15775 div = build_int_cst (TREE_TYPE (value), divisor);
15776 value = size_binop (CEIL_DIV_EXPR, value, div);
15777 value = size_binop (MULT_EXPR, value, div);
15783 /* Likewise, but round down. */
15786 round_down (tree value, int divisor)
15788 tree div = NULL_TREE;
15790 gcc_assert (divisor > 0);
15794 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15795 have to do anything. Only do this when we are not given a const,
15796 because in that case, this check is more expensive than just
15798 if (TREE_CODE (value) != INTEGER_CST)
15800 div = build_int_cst (TREE_TYPE (value), divisor);
15802 if (multiple_of_p (TREE_TYPE (value), value, div))
15806 /* If divisor is a power of two, simplify this to bit manipulation. */
15807 if (divisor == (divisor & -divisor))
15811 t = build_int_cst (TREE_TYPE (value), -divisor);
15812 value = size_binop (BIT_AND_EXPR, value, t);
15817 div = build_int_cst (TREE_TYPE (value), divisor);
15818 value = size_binop (FLOOR_DIV_EXPR, value, div);
15819 value = size_binop (MULT_EXPR, value, div);
15825 /* Returns the pointer to the base of the object addressed by EXP and
15826 extracts the information about the offset of the access, storing it
15827 to PBITPOS and POFFSET. */
15830 split_address_to_core_and_offset (tree exp,
15831 HOST_WIDE_INT *pbitpos, tree *poffset)
15834 enum machine_mode mode;
15835 int unsignedp, volatilep;
15836 HOST_WIDE_INT bitsize;
15838 if (TREE_CODE (exp) == ADDR_EXPR)
15840 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
15841 poffset, &mode, &unsignedp, &volatilep,
15843 core = build_fold_addr_expr (core);
15849 *poffset = NULL_TREE;
15855 /* Returns true if addresses of E1 and E2 differ by a constant, false
15856 otherwise. If they do, E1 - E2 is stored in *DIFF. */
15859 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
15862 HOST_WIDE_INT bitpos1, bitpos2;
15863 tree toffset1, toffset2, tdiff, type;
15865 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
15866 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
15868 if (bitpos1 % BITS_PER_UNIT != 0
15869 || bitpos2 % BITS_PER_UNIT != 0
15870 || !operand_equal_p (core1, core2, 0))
15873 if (toffset1 && toffset2)
15875 type = TREE_TYPE (toffset1);
15876 if (type != TREE_TYPE (toffset2))
15877 toffset2 = fold_convert (type, toffset2);
15879 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
15880 if (!cst_and_fits_in_hwi (tdiff))
15883 *diff = int_cst_value (tdiff);
15885 else if (toffset1 || toffset2)
15887 /* If only one of the offsets is non-constant, the difference cannot
15894 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
15898 /* Simplify the floating point expression EXP when the sign of the
15899 result is not significant. Return NULL_TREE if no simplification
15903 fold_strip_sign_ops (tree exp)
15907 switch (TREE_CODE (exp))
15911 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15912 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
15916 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
15918 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
15919 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15920 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
15921 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
15922 arg0 ? arg0 : TREE_OPERAND (exp, 0),
15923 arg1 ? arg1 : TREE_OPERAND (exp, 1));
15926 case COMPOUND_EXPR:
15927 arg0 = TREE_OPERAND (exp, 0);
15928 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15930 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
15934 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
15935 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
15937 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
15938 arg0 ? arg0 : TREE_OPERAND (exp, 1),
15939 arg1 ? arg1 : TREE_OPERAND (exp, 2));
15944 const enum built_in_function fcode = builtin_mathfn_code (exp);
15947 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15948 /* Strip copysign function call, return the 1st argument. */
15949 arg0 = CALL_EXPR_ARG (exp, 0);
15950 arg1 = CALL_EXPR_ARG (exp, 1);
15951 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
15954 /* Strip sign ops from the argument of "odd" math functions. */
15955 if (negate_mathfn_p (fcode))
15957 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
15959 return build_call_expr (get_callee_fndecl (exp), 1, arg0);